hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cb2bd2b9078c9954b594240126f3b07cf2f8170d
| 26,296
|
py
|
Python
|
tests/routes/test_groups.py
|
suneettipirneni/hackathon-2021-backend
|
18df5ce348303900cefa21cc88cc56e1b07dc562
|
[
"MIT"
] | null | null | null |
tests/routes/test_groups.py
|
suneettipirneni/hackathon-2021-backend
|
18df5ce348303900cefa21cc88cc56e1b07dc562
|
[
"MIT"
] | null | null | null |
tests/routes/test_groups.py
|
suneettipirneni/hackathon-2021-backend
|
18df5ce348303900cefa21cc88cc56e1b07dc562
|
[
"MIT"
] | null | null | null |
# flake8: noqa
import json
from src.models.hacker import Hacker
from src.models.user import ROLES
from src.models.group import Group
from tests.base import BaseTestCase
class TestGroupsBlueprint(BaseTestCase):
"""Tests for the Groups Endpoints"""
"""create_group (worked on by Conroy)"""
def test_create_group(self):
"""create hackers to put inside group"""
res1 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "conroy",
"email": "conroy@gmail.com",
"password": "fdsagfwedgasd"
}
)},
content_type="multipart/form-data"
)
res2 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "john",
"email": "john@gmail.com",
"password": "fgnjmdsftgjh"
}
)},
content_type="multipart/form-data"
)
res3 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "doe",
"email": "doe@gmail.com",
"password": "sdfghjk"
}
)},
content_type="multipart/form-data"
)
self.assertEqual(res1.status_code, 201)
self.assertEqual(res2.status_code, 201)
self.assertEqual(res3.status_code, 201)
self.assertEqual(Hacker.objects.count(), 3)
"""create a group"""
res4 = self.client.post(
"/api/groups/",
data=json.dumps(
{
"name" : "My Group",
"members" : [
"conroy@gmail.com",
"john@gmail.com",
"doe@gmail.com"],
"categories" : [
"category 1",
"category 2",
"category 3"]
}
),
content_type="application/json",
)
self.assertEqual(res4.status_code, 201)
self.assertEqual(Group.objects.count(), 1)
def test_create_group_invalid_json(self):
res4 = self.client.post(
"/api/groups/",
data=json.dumps({}),
content_type="application/json",
)
self.assertEqual(res4.status_code, 400)
self.assertEqual(Group.objects.count(), 0)
def test_create_group_member_not_found(self):
"""create hackers to put inside group"""
res1 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "conroy",
"email": "conroy@gmail.com",
"password": "fdsagfwedgasd",
}
)},
content_type="multipart/form-data",
)
res2 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "john",
"email": "john@gmail.com",
"password": "fgnjmdsftgjh",
}
)},
content_type="multipart/form-data",
)
res3 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "doe",
"email": "doe@gmail.com",
"password": "sdfghjk",
}
)},
content_type="multipart/form-data",
)
self.assertEqual(res1.status_code, 201)
self.assertEqual(res2.status_code, 201)
self.assertEqual(res3.status_code, 201)
self.assertEqual(Hacker.objects.count(), 3)
"""create a group"""
res4 = self.client.post(
"/api/groups/",
data=json.dumps(
{
"name" : "My Group",
"members" : [
"obviouslynotmyemail@gmail.com",
"john@gmail.com",
"doe@gmail.com"],
"categories" : [
"category 1",
"category 2",
"category 3"]
}
),
content_type="application/json",
)
self.assertEqual(res4.status_code, 404)
self.assertEqual(Group.objects.count(), 0)
def test_create_group_duplicate_group(self):
"""create hackers to put inside groups"""
res1 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "conroy",
"email": "conroy@gmail.com",
"password": "fdsagfwedgasd",
}
)},
content_type="multipart/form-data",
)
res2 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "john",
"email": "john@gmail.com",
"password": "fgnjmdsftgjh",
}
)},
content_type="multipart/form-data",
)
res3 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "doe",
"email": "doe@gmail.com",
"password": "sdfghjk",
}
)},
content_type="multipart/form-data",
)
self.assertEqual(res1.status_code, 201)
self.assertEqual(res2.status_code, 201)
self.assertEqual(res3.status_code, 201)
self.assertEqual(Hacker.objects.count(), 3)
"""create groups"""
res4 = self.client.post(
"/api/groups/",
data=json.dumps(
{
"name" : "My Group",
"members" : [
"conroy@gmail.com",
"john@gmail.com"],
"categories" : [
"category 1",
"category 2",
"category 3"]
}
),
content_type="application/json",
)
self.assertEqual(res4.status_code, 201)
self.assertEqual(Group.objects.count(), 1)
res5 = self.client.post(
"/api/groups/",
data=json.dumps(
{
"name" : "My Group",
"members" : [
"doe@gmail.com"],
"categories" : [
"category 1",
"category 2",
"category 3"]
}
),
content_type="application/json",
)
self.assertEqual(res5.status_code, 409)
self.assertEqual(Group.objects.count(), 1)
def test_create_group_invalid_datatypes(self):
"""create hackers to put inside group"""
res1 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "conroy",
"email": "conroy@gmail.com",
"password": "fdsagfwedgasd",
}
)},
content_type="multipart/form-data",
)
res2 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "john",
"email": "john@gmail.com",
"password": "fgnjmdsftgjh",
}
)},
content_type="multipart/form-data",
)
res3 = self.client.post(
"/api/hackers/",
data={"hacker": json.dumps(
{
"username": "doe",
"email": "doe@gmail.com",
"password": "sdfghjk",
}
)},
content_type="multipart/form-data",
)
self.assertEqual(res1.status_code, 201)
self.assertEqual(res2.status_code, 201)
self.assertEqual(res3.status_code, 201)
self.assertEqual(Hacker.objects.count(), 3)
"""create a group"""
res4 = self.client.post(
"/api/groups/",
data=json.dumps(
{
"name" : 2142114,
"members" : [
"conroy@gmail.com",
"john@gmail.com",
"doe@gmail.com"],
"categories" : [
"category 1",
"category 2",
"category 3"]
}
),
content_type="application/json",
)
self.assertEqual(res4.status_code, 400)
self.assertEqual(Group.objects.count(), 0)
"""edit_group (worked on by Conroy)"""
def test_edit_group(self):
"""create hackers to put inside group"""
new_hacker1 = Hacker.createOne(
first_name = "Conroy",
username = "conroy",
email = "conroy@gmail.com",
password = "dsafadsgdasg",
roles = ROLES.HACKER
)
new_hacker2 = Hacker.createOne(
first_name = "John",
username = "john",
email = "john@gmail.com",
password = "fgnjmdsftgjh",
roles = ROLES.HACKER
)
new_hacker3 = Hacker.createOne(
first_name = "Doe",
username = "doe",
email = "doe@gmail.com",
password = "sdfghjk",
roles = ROLES.HACKER
)
"""create a group"""
new_group = Group.createOne(
name = "My Group",
members = [
new_hacker1,
new_hacker2,
new_hacker3],
categories = [
"category 1",
"category 2",
"category 3"]
)
"""edit group"""
res = self.client.put(
"/api/groups/My Group/",
data=json.dumps({"name": "My Updated Group",
"members": [
"conroy@gmail.com",
"john@gmail.com",
"doe@gmail.com"]}),
content_type="application/json",
)
self.assertEqual(res.status_code, 201)
def test_edit_group_invalid_json(self):
"""create hackers to put inside group"""
new_hacker1 = Hacker.createOne(
first_name = "Conroy",
username = "conroy",
email = "conroy@gmail.com",
password = "dsafadsgdasg",
roles = ROLES.HACKER
)
new_hacker2 = Hacker.createOne(
first_name = "John",
username = "john",
email = "john@gmail.com",
password = "fgnjmdsftgjh",
roles = ROLES.HACKER
)
new_hacker3 = Hacker.createOne(
first_name = "Doe",
username = "doe",
email = "doe@gmail.com",
password = "sdfghjk",
roles = ROLES.HACKER
)
"""create a group"""
new_group = Group.createOne(
name = "My Group",
members = [
new_hacker1,
new_hacker2,
new_hacker3],
categories = [
"category 1",
"category 2",
"category 3"]
)
"""edit group"""
res = self.client.put(
"/api/groups/My Group/",
data=json.dumps({}),
content_type="application/json",
)
self.assertEqual(res.status_code, 400)
def test_edit_group_not_found(self):
"""create hackers to put inside group"""
new_hacker1 = Hacker.createOne(
first_name = "Conroy",
username = "conroy",
email = "conroy@gmail.com",
password = "dsafadsgdasg",
roles = ROLES.HACKER
)
new_hacker2 = Hacker.createOne(
first_name = "John",
username = "john",
email = "john@gmail.com",
password = "fgnjmdsftgjh",
roles = ROLES.HACKER
)
new_hacker3 = Hacker.createOne(
first_name = "Doe",
username = "doe",
email = "doe@gmail.com",
password = "sdfghjk",
roles = ROLES.HACKER
)
"""create a group"""
new_group = Group.createOne(
name = "My Group",
members = [
new_hacker1,
new_hacker2,
new_hacker3],
categories = [
"category 1",
"category 2",
"category 3"]
)
"""edit group"""
res = self.client.put(
"/api/groups/Not My Group/",
data=json.dumps({"name": "My Updated Group",
"members": [
"conroy@gmail.com",
"john@gmail.com",
"doe@gmail.com"]}),
content_type="application/json",
)
self.assertEqual(res.status_code, 404)
def test_edit_group_member_not_found(self):
"""create hackers to put inside group"""
new_hacker1 = Hacker.createOne(
first_name = "Conroy",
username = "conroy",
email = "conroy@gmail.com",
password = "dsafadsgdasg",
roles = ROLES.HACKER
)
new_hacker2 = Hacker.createOne(
first_name = "John",
username = "john",
email = "john@gmail.com",
password = "fgnjmdsftgjh",
roles = ROLES.HACKER
)
new_hacker3 = Hacker.createOne(
first_name = "Doe",
username = "doe",
email = "doe@gmail.com",
password = "sdfghjk",
roles = ROLES.HACKER
)
"""create a group"""
new_group = Group.createOne(
name = "My Group",
members = [
new_hacker1,
new_hacker2,
new_hacker3],
categories = [
"category 1",
"category 2",
"category 3"]
)
"""edit group"""
res = self.client.put(
"/api/groups/My Group/",
data=json.dumps({"name": "My Updated Group",
"members": [
"obviouslynotmyemail@gmail.com",
"john@gmail.com",
"doe@gmail.com"]}),
content_type="application/json"
)
self.assertEqual(res.status_code, 404)
def test_edit_group_duplicate_group(self):
"""create hackers to put inside group"""
new_hacker1 = Hacker.createOne(
first_name = "Conroy",
username = "conroy",
email = "conroy@gmail.com",
password = "dsafadsgdasg",
roles = ROLES.HACKER
)
new_hacker2 = Hacker.createOne(
first_name = "John",
username = "john",
email = "john@gmail.com",
password = "fgnjmdsftgjh",
roles = ROLES.HACKER
)
new_hacker3 = Hacker.createOne(
first_name = "Doe",
username = "doe",
email = "doe@gmail.com",
password = "sdfghjk",
roles = ROLES.HACKER
)
"""create groups"""
new_group1 = Group.createOne(
name = "Group 1",
members = [
new_hacker1,
new_hacker2,
new_hacker3],
categories = [
"category 1",
"category 2",
"category 3"]
)
new_group2 = Group.createOne(
name = "Group 2",
members = [
new_hacker1,
new_hacker2,
new_hacker3],
categories = [
"category 1",
"category 2",
"category 3"]
)
"""edit a group"""
res = self.client.put(
"/api/groups/Group 1/",
data=json.dumps({"name": "Group 2",
"members": [
"conroy@gmail.com",
"john@gmail.com",
"doe@gmail.com"]}),
content_type="application/json",
)
self.assertEqual(res.status_code, 409)
def test_edit_group_invalid_datatypes(self):
"""create hackers to put inside group"""
new_hacker1 = Hacker.createOne(
first_name = "Conroy",
username = "conroy",
email = "conroy@gmail.com",
password = "dsafadsgdasg",
roles = ROLES.HACKER
)
new_hacker2 = Hacker.createOne(
first_name = "John",
username = "john",
email = "john@gmail.com",
password = "fgnjmdsftgjh",
roles = ROLES.HACKER
)
new_hacker3 = Hacker.createOne(
first_name = "Doe",
username = "doe",
email = "doe@gmail.com",
password = "sdfghjk",
roles = ROLES.HACKER
)
"""create a group"""
new_group = Group.createOne(
name = "My Group",
members = [
new_hacker1,
new_hacker2,
new_hacker3],
categories = [
"category 1",
"category 2",
"category 3"]
)
"""edit group"""
res = self.client.put(
"/api/groups/My Group/",
data=json.dumps({"name": 1,
"members": [
"conroy@gmail.com",
"john@gmail.com",
"doe@gmail.com"]}),
content_type="application/json",
)
self.assertEqual(res.status_code, 400)
"""add_member_to_group"""
def test_add_member_to_group(self):
new_hacker1 = Hacker.createOne(
first_name = "Conroy",
username = "conroy",
email = "conroy@gmail.com",
password = "dsafadsgdasg",
roles = ROLES.HACKER
)
new_hacker2 = Hacker.createOne(
first_name = "John",
username = "john",
email = "john@gmail.com",
password = "fgnjmdsftgjh",
roles = ROLES.HACKER
)
Hacker.createOne(
first_name = "Doe",
username = "doe",
email = "doe@gmail.com",
password = "sdfghjk",
roles = ROLES.HACKER
)
Group.createOne(
name = "My Group",
members = [
new_hacker1,
new_hacker2],
categories = [
"category 1",
"category 2",
"category 3"]
)
res = self.client.put("/api/groups/My Group/doe/")
self.assertEqual(res.status_code, 200)
self.assertEqual(Group.objects.first()["members"][2]["username"], "doe")
""" Test for the case when the group is initially empty"""
Group.createOne(
name = "My Group2",
categories = [
"category 1",
"category 2",
"category 3"]
)
res = self.client.put("/api/groups/My Group2/doe/")
self.assertEqual(res.status_code, 200)
self.assertEqual(Group.objects[1]["members"][0]["username"], "doe")
def test_add_member_to_group_group_not_found(self):
res = self.client.put("/api/groups/group/hacker/")
data = json.loads(res.data.decode())
self.assertEqual(res.status_code, 404)
self.assertEqual(data["description"], "Group with the given name was not found.")
def test_add_member_to_group_member_not_found(self):
Group.createOne(
name = "My Group",
categories = [
"category 1",
"category 2",
"category 3"]
)
res = self.client.put("/api/groups/My Group/hacker/")
data = json.loads(res.data.decode())
self.assertEqual(res.status_code, 404)
self.assertEqual(data["description"], "Hacker with the given username was not found.")
"""get_group (worked on by Conroy)"""
def test_get_group(self):
"""create hackers to put inside group"""
new_hacker1 = Hacker.createOne(
first_name = "Conroy",
username = "conroy",
email = "conroy@gmail.com",
password = "dsafadsgdasg",
roles = ROLES.HACKER
)
new_hacker2 = Hacker.createOne(
first_name = "John",
username = "john",
email = "john@gmail.com",
password = "fgnjmdsftgjh",
roles = ROLES.HACKER
)
new_hacker3 = Hacker.createOne(
first_name = "Doe",
username = "doe",
email = "doe@gmail.com",
password = "sdfghjk",
roles = ROLES.HACKER
)
"""create a group"""
new_group = Group.createOne(
name = "My Group",
members = [
new_hacker1,
new_hacker2,
new_hacker3],
categories = [
"category 1",
"category 2",
"category 3"]
)
"""get the group"""
res = self.client.get("/api/groups/My Group/")
self.assertEqual(res.status_code, 200)
def test_get_group_not_found(self):
"""create hackers to put inside group"""
new_hacker1 = Hacker.createOne(
first_name = "Conroy",
username = "conroy",
email = "conroy@gmail.com",
password = "dsafadsgdasg",
roles = ROLES.HACKER
)
new_hacker2 = Hacker.createOne(
first_name = "John",
username = "john",
email = "john@gmail.com",
password = "fgnjmdsftgjh",
roles = ROLES.HACKER
)
new_hacker3 = Hacker.createOne(
first_name = "Doe",
username = "doe",
email = "doe@gmail.com",
password = "sdfghjk",
roles = ROLES.HACKER
)
"""create a group"""
new_group = Group.createOne(
name = "My Group",
members = [
new_hacker1,
new_hacker2,
new_hacker3],
categories = [
"category 1",
"category 2",
"category 3"]
)
""" "get" the group"""
res = self.client.get("/api/groups/Obviously Not My Group/")
self.assertEqual(res.status_code, 404)
"""get_all_groups"""
def test_get_all_groups(self):
hacker1 = Hacker.createOne(
first_name = "Conroy",
username = "conroy",
email = "conroy@gmail.com",
password = "dsafadsgdasg",
roles = ROLES.HACKER
)
hacker2 = Hacker.createOne(
first_name = "John",
username = "john",
email = "john@gmail.com",
password = "fgnjmdsftgjh",
roles = ROLES.HACKER
)
hacker3 = Hacker.createOne(
first_name = "Doe",
username = "doe",
email = "doe@gmail.com",
password = "sdfghjk",
roles = ROLES.HACKER
)
Group.createOne(
name = "My Group",
members = [
hacker1,
hacker2,
hacker3],
categories = [
"category 1",
"category 2",
"category 3"]
)
Group.createOne(
name = "His Group",
members = [
hacker2,
hacker3],
categories = [
"category 1",
"category 2",
"category 3"]
)
res = self.client.get("api/groups/get_all_groups/")
data = json.loads(res.data.decode())
self.assertEqual(res.status_code, 200)
self.assertEqual(data["groups"][0]["name"], "My Group")
self.assertEqual(data["groups"][1]["name"], "His Group")
def test_get_all_groups_not_found(self):
res = self.client.get("api/groups/get_all_groups/")
data = json.loads(res.data.decode())
self.assertEqual(res.status_code, 404)
self.assertEqual(data["name"], "Not Found")
| 30.190586
| 94
| 0.427023
| 2,132
| 26,296
| 5.156191
| 0.053002
| 0.050214
| 0.06113
| 0.065496
| 0.930956
| 0.920222
| 0.911216
| 0.891749
| 0.889657
| 0.875466
| 0
| 0.019977
| 0.459385
| 26,296
| 870
| 95
| 30.225287
| 0.753306
| 0.017683
| 0
| 0.765537
| 0
| 0
| 0.166865
| 0.006198
| 0
| 0
| 0
| 0
| 0.069209
| 1
| 0.025424
| false
| 0.059322
| 0.007062
| 0
| 0.033898
| 0.001412
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
cb78c1357d27b47d45ffc803db531ec47a701015
| 2,193
|
py
|
Python
|
service/verify/issue_4_33.py
|
NASA-PDS/registry-api
|
31956b47e366a20f280889bbad043fedade486a1
|
[
"Apache-2.0"
] | null | null | null |
service/verify/issue_4_33.py
|
NASA-PDS/registry-api
|
31956b47e366a20f280889bbad043fedade486a1
|
[
"Apache-2.0"
] | 61
|
2022-01-05T15:41:53.000Z
|
2022-03-31T21:36:57.000Z
|
service/verify/issue_4_33.py
|
NASA-PDS/registry-api
|
31956b47e366a20f280889bbad043fedade486a1
|
[
"Apache-2.0"
] | null | null | null |
#! /usr/bin/env python3
'''verify the acceptance criteria for issue 4
*requires* requests
'''
import requests
TEST_N_CRITERIA=[
('http://localhost:8080/products/urn:nasa:pds:izenberg_pdart14_meap:document::1.0',200),
('http://localhost:8080/products/urn:nasa:pds:izenberg_pdart14_meap:document',200),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecube::1.0',200),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecube',200),
('http://localhost:8080/bundles/urn:nasa:pds:izenberg_pdart14_meap::1.0',200),
('http://localhost:8080/bundles/urn:nasa:pds:izenberg_pdart14_meap',200),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecube::1.0/products',200),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecube/products',200),
('http://localhost:8080/bundles/urn:nasa:pds:izenberg_pdart14_meap::1.0/collections',200),
('http://localhost:8080/bundles/urn:nasa:pds:izenberg_pdart14_meap/collections',200),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecube::1.',404),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecube::',404),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecube:',404),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecub',404),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecube::1./products',200),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecube::1/products',200),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecube::/products',200),
('http://localhost:8080/collections/urn:nasa:pds:izenberg_pdart14_meap:data_imagecube:/products',404),
]
for url,expectation in TEST_N_CRITERIA:
result = requests.get(url, headers={'Accept':'application/json'})
if result.status_code == expectation:
print ('success', result.status_code, url)
else: print ('failed', expectation, '!=', result.status_code, url)
pass
| 59.27027
| 110
| 0.75285
| 298
| 2,193
| 5.355705
| 0.177852
| 0.146617
| 0.191729
| 0.203008
| 0.815163
| 0.801378
| 0.798246
| 0.798246
| 0.798246
| 0.798246
| 0
| 0.087235
| 0.074783
| 2,193
| 36
| 111
| 60.916667
| 0.699359
| 0.039216
| 0
| 0
| 0
| 0.222222
| 0.740952
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.037037
| 0.037037
| 0
| 0.037037
| 0.074074
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
1dfbeaf3b1e97a8c09736a63c6096513db2464d7
| 8,579
|
py
|
Python
|
modelchimp/views/api/project_dashboard.py
|
samzer/modelchimp-server
|
48668d0f73025b2cc967006b3193b67aaf970ad7
|
[
"BSD-2-Clause"
] | 134
|
2018-11-07T08:35:47.000Z
|
2022-01-09T00:39:40.000Z
|
modelchimp/views/api/project_dashboard.py
|
samzer/modelchimp-server
|
48668d0f73025b2cc967006b3193b67aaf970ad7
|
[
"BSD-2-Clause"
] | 841
|
2018-11-06T19:45:04.000Z
|
2022-03-31T13:07:16.000Z
|
modelchimp/views/api/project_dashboard.py
|
samzer/modelchimp-server
|
48668d0f73025b2cc967006b3193b67aaf970ad7
|
[
"BSD-2-Clause"
] | 16
|
2019-02-08T12:48:17.000Z
|
2021-02-18T22:11:38.000Z
|
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes
from rest_framework.response import Response
from modelchimp.utils.data_utils import execute_query
from modelchimp.api_permissions import HasProjectMembership
from rest_framework.permissions import IsAuthenticated
@api_view(['GET'])
@permission_classes((HasProjectMembership, IsAuthenticated))
def experiment_metric_chart(request, project_id):
try:
# Get the filter parameters from the request
params = request.query_params
metric = None
max_min_flag = None
# Check if the metric type exists
if 'max-min-flag' in params:
max_min_flag = int(params['max-min-flag'])
if max_min_flag not in [1,0]:
raise ValueError('Passed in incorrect max_min_flag')
if 'metric' in params:
metric = str(params['metric'])
metric_query = '''
select distinct value as metric
from modelchimp_experiment ml,
jsonb_array_elements(ml.metrics::jsonb -> 'metric_list')
where project_id = %s
'''
metric_query = metric_query % (project_id,)
metric_query_result = execute_query(metric_query)
metric_query_result = [ mqr['metric'] for mqr in metric_query_result]
if metric not in metric_query_result:
raise ValueError('Metric does not exist')
except Exception as e:
return Response("Error: %s" % e, status=status.HTTP_400_BAD_REQUEST)
query = '''
select
id,
name,
short_name,
%s(value::decimal) as value
from (select
id,
name,
CASE WHEN name = experiment_id THEN SUBSTRING(name,0,8)
ELSE name
END as short_name,
value ->> 'epoch' as epoch,
value ->> 'value' as value
from modelchimp_experiment ml,
jsonb_array_elements(ml.metrics::jsonb -> 'evaluation' -> '%s')
where project_id = %s ) a
group by id, name, short_name
'''
query = query % (
'max' if max_min_flag == 1 else 'min',
metric,
project_id,
)
result_raw = execute_query(query)
return Response(result_raw, status=status.HTTP_200_OK)
@api_view(['GET'])
@permission_classes((HasProjectMembership, IsAuthenticated))
def experiment_metric_filter(request, project_id):
try:
# Get the filter parameters from the request
params = request.query_params
if 'param' in params:
param = str(params['param'])
param_query = '''
select distinct json_object_keys(parameters::json) as param
from modelchimp_experiment ml
where json_typeof(parameters::json) = 'object'
and project_id = %s
'''
param_query = param_query % (project_id,)
param_query_result = execute_query(param_query)
param_query_result = [ pqr['param'] for pqr in param_query_result]
if param not in param_query_result:
raise ValueError('Model Parameter does not exist')
else:
param = None
except Exception as e:
return Response("Error: %s" % e, status=status.HTTP_400_BAD_REQUEST)
query = '''
select distinct value as metric
from modelchimp_experiment ml,
jsonb_array_elements(ml.metrics::jsonb -> 'metric_list')
where project_id = %s %s
order by metric
'''
query = query % (
project_id,
"and parameters->>'%s' is not null" % (param, ) if param else ''
)
result_raw = execute_query(query)
return Response(result_raw, status=status.HTTP_200_OK)
@api_view(['GET'])
@permission_classes((HasProjectMembership, IsAuthenticated))
def experiment_duration_chart(request, project_id):
try:
# Get the filter parameters from the request
params = request.query_params
tag = None
if 'tag' in params:
tag = str(params['tag'])
tag_query = '''
select distinct value as tag
from modelchimp_experiment ml,
jsonb_array_elements(ml.durations::jsonb -> 'tag_list')
where project_id = %s
'''
tag_query = tag_query % (project_id,)
tag_query_result = execute_query(tag_query)
tag_query_result = [ tag['tag'] for tag in tag_query_result]
if tag not in tag_query_result:
raise ValueError('Duration tag does not exist')
except Exception as e:
return Response("Error: %s" % e, status=status.HTTP_400_BAD_REQUEST)
query = '''
select
id,
name,
short_name,
sum(value::decimal) as value
from (select
id,
name,
CASE WHEN name = experiment_id THEN SUBSTRING(name,0,8)
ELSE name
END as short_name,
value ->> 'epoch' as epoch,
value ->> 'value' as value
from modelchimp_experiment ml,
json_array_elements(ml.durations::json -> 'duration' -> '%s')
where project_id = %s ) a
group by id, name, short_name
'''
query = query % (
tag,
project_id,
)
result_raw = execute_query(query)
return Response(result_raw, status=status.HTTP_200_OK)
@api_view(['GET'])
@permission_classes((HasProjectMembership, IsAuthenticated))
def experiment_duration_filter(request, project_id):
query = '''
select distinct value as tag
from modelchimp_experiment ml,
jsonb_array_elements(ml.durations::jsonb -> 'tag_list')
where project_id = %s
order by tag
'''
query = query % (
project_id,
)
result_raw = execute_query(query)
return Response(result_raw, status=status.HTTP_200_OK)
@api_view(['GET'])
@permission_classes((HasProjectMembership, IsAuthenticated))
def experiment_parameter_metric_chart(request, project_id):
try:
# Get the filter parameters from the request
params = request.query_params
metric = None
max_min_flag = None
param = None
# Check if the metric type exists
if 'max-min-flag' in params:
max_min_flag = int(params['max-min-flag'])
if max_min_flag not in [1,0]:
raise ValueError('Passed in incorrect max_min_flag')
if 'metric' in params:
metric = str(params['metric'])
metric_query = '''
select distinct value as metric
from modelchimp_experiment ml,
jsonb_array_elements(ml.metrics::jsonb -> 'metric_list')
where project_id = %s
'''
metric_query = metric_query % (project_id,)
metric_query_result = execute_query(metric_query)
metric_query_result = [ mqr['metric'] for mqr in metric_query_result]
if metric not in metric_query_result:
raise ValueError('Metric does not exist')
if 'param' in params:
param = str(params['param'])
param_query = '''
select distinct json_object_keys(parameters::json) as param
from modelchimp_experiment ml
where json_typeof(parameters::json) = 'object'
and project_id = %s
'''
param_query = param_query % (project_id,)
param_query_result = execute_query(param_query)
param_query_result = [ pqr['param'] for pqr in param_query_result]
if param not in param_query_result:
raise ValueError('Model Parameter does not exist')
except Exception as e:
return Response("Error: %s" % e, status=status.HTTP_400_BAD_REQUEST)
max_sql = 'DESC' if max_min_flag == 1 else ''
query = f'''
select id,
name,
short_name,
param,
value,
max_rank
from (select
id,
name,
CASE WHEN name = experiment_id THEN SUBSTRING(name,0,8)
ELSE name
END as short_name,
parameters->>'{param}' as param,
value ->> 'epoch' as epoch,
value -> 'value' as value,
row_number() OVER (PARTITION BY parameters->>'{param}' ORDER BY value -> 'value' {max_sql}) as max_rank
from modelchimp_experiment ml,
jsonb_array_elements(ml.metrics::jsonb -> 'evaluation' -> '{metric}')
where project_id = {project_id}
and parameters->>'{param}' is not null) a
where max_rank = 1
'''
result_raw = execute_query(query)
return Response(result_raw, status=status.HTTP_200_OK)
@api_view(['GET'])
@permission_classes((HasProjectMembership, IsAuthenticated))
def experiment_parameter_metric_filter(request, project_id):
query = '''
select distinct json_object_keys(parameters::json) as param
from modelchimp_experiment ml
where json_typeof(parameters::json) = 'object'
and project_id = %s
'''
query = query % (
project_id,
)
result_raw = execute_query(query)
return Response(result_raw, status=status.HTTP_200_OK)
| 29.685121
| 109
| 0.658119
| 1,091
| 8,579
| 4.944088
| 0.107241
| 0.046719
| 0.025955
| 0.053022
| 0.84835
| 0.843344
| 0.837041
| 0.828884
| 0.822581
| 0.822581
| 0
| 0.006644
| 0.2456
| 8,579
| 288
| 110
| 29.788194
| 0.826792
| 0.027392
| 0
| 0.782051
| 0
| 0.004274
| 0.379273
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025641
| false
| 0.008547
| 0.025641
| 0
| 0.094017
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
69942a69acfed2f6d38becceddf8283244f9fa48
| 2,616
|
py
|
Python
|
Sample_Code/Problem_5.py
|
clarkbains/1405_Practice_Problems
|
f8b20e2d0e469000f08f3fb698f26f43d7394b33
|
[
"Unlicense"
] | null | null | null |
Sample_Code/Problem_5.py
|
clarkbains/1405_Practice_Problems
|
f8b20e2d0e469000f08f3fb698f26f43d7394b33
|
[
"Unlicense"
] | null | null | null |
Sample_Code/Problem_5.py
|
clarkbains/1405_Practice_Problems
|
f8b20e2d0e469000f08f3fb698f26f43d7394b33
|
[
"Unlicense"
] | null | null | null |
print ("Please Select an option")
print ("[1] - Add a course")
print ("[2] - Drop a course")
print ("[3] - Edit a grade")
optionL1 = input ("option: ")
optionAsNumL1 = int(optionL1)
if optionAsNumL1 == 1:
courseName = input ("What is the name of this course to add? ")
courseId = input ("What is the id of this course? ")
elif optionAsNumL1 == 2:
courseName = input ("What is the name of this course to remove? ")
elif optionAsNumL1 == 3:
optionL2 = input ("Please Select a course: ")
courseName = int(optionL2)
print ("Please Select an option")
print ("[1] - Add a Grade")
print ("[2] - Drop a Grade")
print ("[3] - Edit a Grade")
optionL3 = input ("option: ")
optionAsNumL3 = int(optionL3)
if optionAsNumL3 == 1:
grade_weight = float(input ("What is the weight of the grade? [0.00-1.00] "))
grade_val = float(input ("What is the grade? [0.00-1.00] "))
elif optionAsNumL3 == 2:
print ("Pick the grade you want to remove: ")
print ("*List Grades here*")
optionL4 = input ("Grade: ")
optionAsNumL4 = int(optionL4)
elif optionAsNumL3 == 3:
print ("Pick the grade you want to edit: ")
print ("*List Grades here*")
optionL4 = input ("Grade: ")
optionAsNumL4 = int(optionL4)
new_mark = float(input ("New grade is: "))
new_weight = float(input ("New weight is: "))
if optionAsNumL1 == 1:
print ("You have added " + courseName + ", course id " + courseId + ".")
elif optionAsNumL1 == 2:
courseName = input ("What is the name of this course ot remove? ")
elif optionAsNumL1 == 3:
optionL2 = input ("Please Select a course: ")
courseName = int(optionL2)
print ("Please Select an option")
print ("[1] - Add a Grade")
print ("[2] - Drop a Grade")
print ("[3] - Edit a Grade")
optionL3 = input ("option: ")
optionAsNumL3 = int(optionL3)
if optionAsNumL3 == 1:
grade_weight = float(input ("What is the weight of the grade? [0.00-1.00] "))
grade_val = float(input ("What is the grade? [0.00-1.00] "))
print ()
elif optionAsNumL3 == 2:
print ("Pick the grade you want to remove: ")
print ("*List Grades here*")
optionL4 = input ("Grade: ")
optionAsNumL4 = int(optionL4)
elif optionAsNumL3 == 3:
print ("Pick the grade you want to edit: ")
print ("*List Grades here*")
optionL4 = input ("Grade: ")
optionAsNumL4 = int(optionL4)
new_mark = float(input ("New grade is: "))
new_weight = float(input ("New weight is: "))
| 37.371429
| 85
| 0.594801
| 337
| 2,616
| 4.593472
| 0.154303
| 0.046512
| 0.056848
| 0.072351
| 0.879199
| 0.868863
| 0.868863
| 0.868863
| 0.868863
| 0.846253
| 0
| 0.042797
| 0.267584
| 2,616
| 69
| 86
| 37.913043
| 0.765136
| 0
| 0
| 0.828125
| 0
| 0
| 0.357061
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.34375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
699ea624de5305713fce2b21f830ad1e283dc04e
| 20,786
|
py
|
Python
|
Battleship.py
|
W553Li/Battleship
|
c0d7d74657122ef3ac374eceeb7724bf94955b48
|
[
"MIT"
] | null | null | null |
Battleship.py
|
W553Li/Battleship
|
c0d7d74657122ef3ac374eceeb7724bf94955b48
|
[
"MIT"
] | null | null | null |
Battleship.py
|
W553Li/Battleship
|
c0d7d74657122ef3ac374eceeb7724bf94955b48
|
[
"MIT"
] | null | null | null |
import random
import os
clear = lambda: os.system('cls')
# Creating Board and Player Selection
board = []
board_size = 0
clear()
player_number = int(input("How many players are playing?(1-2): "))
while board_size < 1 or board_size > 10:
board_size = int(input("Board Size(1-10): "))
if board_size < 1 or board_size > 10:
print ("Thats not between 1-10!")
if player_number == 2:
for x in range(0,board_size):
board.append(["~"] * board_size + ["|"] + ["~"] * board_size)
def print_board(board):
for row in board:
print (" ".join(row))
if player_number == 1:
for x in range(0,board_size):
board.append(["~"] * board_size)
def print_board(board):
for row in board:
print (" ".join(row))
print_board(board)
# Creating Battleships
if player_number == 1:
player_select = "player"
while player_select != "sink" and player_select != "hide":
player_select = input("Do you want to sink or hide ships? (Choose 'sink' or 'hide'): ")
if player_select != str("sink") and player_select != str("hide"):
print ("That is not a valid playmode!")
Number_Battleships = 0
while Number_Battleships < 1 or Number_Battleships > 3:
Number_Battleships = int(input("Number of Battleships(1-3): "))
if Number_Battleships < 1 or Number_Battleships > 3:
print ("Thats not between 1-3!")
ship_row = []
ship_column = []
if player_select == "sink":
if board_size >= Number_Battleships:
ship_row = random.sample(range(len(board)), Number_Battleships)
ship_column = random.sample(range(len(board)), Number_Battleships)
if board_size < Number_Battleships:
for ships in range(Number_Battleships):
def random_row(board):
return random.randint(0, len(board) - 1)
def random_column(board):
return random.randint(0, len(board) - 1)
ship_row.append(random_row(board))
ship_column.append(random_column(board))
for ships in range(Number_Battleships):
if player_select == "hide":
sr = -1
while sr < 0 or sr > ((board_size) - 1):
sr = int(input("Hide your ship (row) between 0-" + str((board_size) - 1) + ": "))
if sr < 0 or sr > ((board_size) - 1):
print ("Thats not between 0-" + str((board_size) - 1) + "!")
if sr > 0 and sr < ((board_size) - 1):
ship_row.append(sr)
sc = -1
while sc < 0 or sc > ((board_size) - 1):
sc = int(input("Hide your ship (column) between 0-" + str((board_size) - 1) + ": "))
if sc < 0 or sc > ((board_size) - 1):
print ("Thats not between 0-" + str((board_size) - 1) + "!")
if sc > 0 and sc < ((board_size) - 1):
ship_column.append(sc)
if player_number == 2:
Number_Battleships = 0
while Number_Battleships < 1 or Number_Battleships > 3:
Number_Battleships = int(input("Number of Battleships(1-3): "))
if Number_Battleships < 1 or Number_Battleships > 3:
print ("Thats not between 1-3!")
player_one_r = []
player_one_c = []
for ships in range(Number_Battleships):
por = -1
while por < 0 or por > board_size - 1:
por = int(input("Player 1, hide your ship (row) between 0-" + str((board_size - 1)) + ": "))
if por < 0 or por > board_size - 1:
print ("Thats not between 0-" + str((board_size - 1)) + "!")
if por > 0 and por < board_size - 1:
player_one_r.append(por)
poc = -1
while poc < 0 or poc > board_size - 1:
poc = int(input("Player 1, hide your ship (column) between 0-" + str((board_size - 1)) + ": "))
if poc < 0 or poc > board_size - 1:
print ("Thats not between 0-" + str((board_size - 1) )+ "!")
if poc > 0 and poc < board_size - 1:
player_one_c.append(poc)
clear()
player_two_r = []
player_two_c = []
for ships in range(Number_Battleships):
ptr = -1
while ptr < 0 or ptr > board_size - 1:
ptr = int(input("Player 1, hide your ship (row) between 0-" + str((board_size) - 1) + ": "))
if ptr < 0 or ptr > board_size - 1:
print ("Thats not between 0-" + str((board_size - 1))+ "!")
if ptr > 0 and ptr < board_size - 1:
player_two_r.append(ptr)
ptc = -1
while ptc < 0 or ptc > board_size - 1:
ptc = int(input("Player 1, hide your ship (column) between 0-" + str((board_size) - 1) + ": "))
if ptc < 0 or ptc > board_size - 1:
print ("Thats not between 0-" + str((board_size) - 1) + "!")
if ptc > 0 and ptc < board_size - 1:
player_two_c.append(ptc)
clear()
# Turns/Guesses
if player_number == 1:
if player_select == "sink":
turn_number = int(input("How many turns do you want?: "))
ships_hit = 0
for turn in range(turn_number):
print ("Turn #: " + str(turn + 1))
turn += 1
guess_row = int(input("Guess row (0-" + str(len(board) - 1) + "): "))
guess_column = int(input("Guess column (0-" + str(len(board) - 1) + "): "))
if Number_Battleships == 1:
if (guess_row == ship_row[0] and guess_column == ship_column[0]):
print ("You hit a battleship")
if board[guess_row][guess_column] == "H":
print ("You guessed that already")
elif board[guess_row][guess_column] == "~":
ships_hit += 1
board[guess_row][guess_column] = "H"
if ships_hit == Number_Battleships:
print ("You win")
print_board(board)
break
else:
if (guess_row >= board_size or guess_row < 0) or (guess_column >= board_size or guess_column < 0):
print ("Thats not on the map")
elif (board[guess_row][guess_column] == "X"):
print ("You guessed that already")
else:
board[guess_row][guess_column] = "X"
print ("You missed my battleship")
if turn == turn_number:
print ("Game over")
print_board(board)
for i in range(Number_Battleships):
print ("My battleship was hidden at " + str(ship_row[i]) + "," + str(ship_column[i]))
if Number_Battleships == 2:
if (guess_row == ship_row[0] and guess_column == ship_column[0]) or (guess_row == ship_row[1] and guess_column == ship_column[1]):
print ("You hit a battleship")
if board[guess_row][guess_column] == "H":
print ("You guessed that already")
elif board[guess_row][guess_column] == "~":
ships_hit += 1
board[guess_row][guess_column] = "H"
if ships_hit == Number_Battleships:
print ("You win")
print_board(board)
break
else:
if (guess_row >= board_size or guess_row < 0) or (guess_column >= board_size or guess_column < 0):
print ("Thats not on the map")
elif (board[guess_row][guess_column] == "X"):
print ("You guessed that already")
else:
board[guess_row][guess_column] = "X"
print ("You missed my battleship")
if turn == turn_number:
print ("Game over")
print_board(board)
for i in range(Number_Battleships):
print ("My battleship was hidden at " + str(ship_row[i]) + "," + str(ship_column[i]))
if Number_Battleships == 3:
if (guess_row == ship_row[0] and guess_column == ship_column[0]) or (guess_row == ship_row[1] and guess_column == ship_column[1]) or (guess_row == ship_row[2] and guess_column == ship_column[2]):
print ("You hit a battleship")
if board[guess_row][guess_column] == "H":
print ("You guessed that already")
elif board[guess_row][guess_column] == "~":
ships_hit += 1
board[guess_row][guess_column] = "H"
if ships_hit == Number_Battleships:
print ("You win")
print_board(board)
break
else:
if (guess_row >= board_size or guess_row < 0) or (guess_column >= board_size or guess_column < 0):
print ("Thats not on the map")
elif (board[guess_row][guess_column] == "X"):
print ("You guessed that already")
else:
board[guess_row][guess_column] = "X"
print ("You missed my battleship")
if turn == turn_number:
print ("Game over")
print_board(board)
for i in range(Number_Battleships):
print ("My battleship was hidden at " + str(ship_row[i]) + "," + str(ship_column[i]))
if player_select == "hide":
turn_number = (2 * Number_Battleships)
ships_hit = 0
for turn in range(turn_number):
print ("Turn #: " + str(turn + 1))
turn += 1
guess_row = (random.randint(1, len(board)))
guess_column = (random.randint(1, len(board)))
print (guess_row)
print (guess_column)
if Number_Battleships == 1:
if (guess_row == ship_row[0] and guess_column == ship_column[0]):
print ("I hit your battleship")
ships_hit += 1
board[guess_row - 1][guess_column - 1] = "H"
if ships_hit == Number_Battleships:
print ("I win")
break
else:
board[guess_row - 1][guess_column - 1] = "X"
if turn == turn_number:
print ("I Lost")
print_board(board)
if Number_Battleships == 2:
if (guess_row == ship_row[0] and guess_column == ship_column[0]) or (guess_row == ship_row[1] and guess_column == ship_column[1]):
print ("I hit your battleship")
ships_hit += 1
board[guess_row - 1][guess_column - 1] = "H"
if ships_hit == Number_Battleships:
print ("I win")
break
else:
board[guess_row - 1][guess_column - 1] = "X"
if turn == turn_number:
print ("I Lost")
print_board(board)
if Number_Battleships == 3:
if (guess_row == ship_row[0] and guess_column == ship_column[0]) or (guess_row == ship_row[1] and guess_column == ship_column[1]) or (guess_row == ship_row[2] and guess_column == ship_column[2]):
print ("I hit your battleship")
ships_hit += 1
board[guess_row - 1][guess_column - 1] = "H"
if ships_hit == Number_Battleships:
print ("I win")
break
else:
board[guess_row - 1][guess_column - 1] = "X"
if turn == turn_number:
print ("I Lost")
print_board(board)
if player_number == 2:
turn_number = int(input("How many turns do you want?: ")) * 2
p1_ships_hit = 0
p2_ships_hit = 0
for turn in range(turn_number):
if turn == 0:
print (" ")
if (turn % 2) != 0:
print ("P1 Turn #: " + str(int((turn) / 2 + 1)))
guess_row = int(input("Guess row (0-" + str(len(board) - 1) + "): "))
guess_column = int(input("Guess column (0-" + str(len(board) - 1) + "): "))
if Number_Battleships == 1:
if ((guess_row) == player_two_r[0] and (guess_column) == player_two_c[0]):
print ("You hit a battleship")
if board[guess_row][guess_column + (len(board) + 1)] == "H":
print ("You guessed that already")
elif board[guess_row][guess_column + (len(board) + 1)] == "~":
p1_ships_hit += 1
board[guess_row][guess_column + (len(board) + 1)] = "H"
if p1_ships_hit == Number_Battleships:
print ("Player One Wins")
print_board(board)
break
else:
if (guess_row >= board_size or guess_row < 0) or (guess_column >= board_size or guess_column < 0):
print ("Thats not on the map")
elif (board[guess_row][guess_column + (len(board) + 1)] == "X"):
print ("You guessed that already")
else:
board[guess_row][guess_column + (len(board) + 1)] = "X"
print ("You missed my battleship")
print_board(board)
if Number_Battleships == 2:
if ((guess_row) == player_two_r[0] and (guess_column) == player_two_c[0]) or ((guess_row) == player_two_r[1] and (guess_column) == player_two_c[1]):
print ("You hit a battleship")
if board[guess_row][guess_column + (len(board) + 1)] == "H":
print ("You guessed that already")
elif board[guess_row][guess_column + (len(board) + 1)] == "~":
p1_ships_hit += 1
board[guess_row][guess_column + (len(board) + 1)] = "H"
if p1_ships_hit == Number_Battleships:
print ("Player One Wins")
print_board(board)
break
else:
if (guess_row >= board_size or guess_row < 0) or (guess_column >= board_size or guess_column < 0):
print ("Thats not on the map")
elif (board[guess_row][guess_column + (len(board) + 1)] == "X"):
print ("You guessed that already")
else:
board[guess_row][guess_column + (len(board) + 1)] = "X"
print ("You missed my battleship")
print_board(board)
if Number_Battleships == 3:
if ((guess_row) == player_two_r[0] and (guess_column) == player_two_c[0]) or ((guess_row) == player_two_r[1] and (guess_column) == player_two_c[1]) or ((guess_row) == player_two_r[2] and (guess_column) == player_two_c[2]):
print ("You hit a battleship")
if board[guess_row][guess_column + (len(board) + 1)] == "H":
print ("You guessed that already")
elif board[guess_row][guess_column + (len(board) + 1)] == "~":
p1_ships_hit += 1
board[guess_row][guess_column + (len(board) + 1)] = "H"
if p1_ships_hit == Number_Battleships:
print ("Player One Wins")
print_board(board)
break
else:
if (guess_row >= board_size or guess_row < 0) or (guess_column >= board_size or guess_column < 0):
print ("Thats not on the map")
elif (board[guess_row][guess_column + (len(board) + 1)] == "X"):
print ("You guessed that already")
else:
board[guess_row][guess_column + (len(board) + 1)] = "X"
print ("You missed my battleship")
print_board(board)
if (turn % 2) == 0:
print ("P2 Turn #: " + str(int(turn / 2) + 1))
guess_row = int(input("Guess row (0-" + str(len(board) - 1) + "): "))
guess_column = int(input("Guess column (0-" + str(len(board) - 1) + "): "))
if Number_Battleships == 1:
if (guess_row == player_one_r[0] and guess_column == player_one_c[0]):
print ("You hit a battleship")
if board[guess_row][guess_column] == "H":
print ("You guessed that already")
elif board[guess_row][guess_column] == "~":
p2_ships_hit += 1
board[guess_row][guess_column] = "H"
if p2_ships_hit == Number_Battleships:
print ("Player Two Wins")
print_board(board)
break
else:
if (guess_row >= board_size or guess_row < 0) or (guess_column >= board_size or guess_column < 0):
print ("Thats not on the map")
elif (board[guess_row][guess_column] == "X"):
print ("You guessed that already")
else:
board[guess_row][guess_column] = "X"
print ("You missed my battleship")
print_board(board)
if Number_Battleships == 2:
if (guess_row == player_one_r[0] and guess_column == player_one_c[0]) or (guess_row == player_one_r[1] and guess_column == player_one_c[1]):
print ("You hit a battleship")
if board[guess_row][guess_column] == "H":
print ("You guessed that already")
elif board[guess_row][guess_column] == "~":
p2_ships_hit += 1
board[guess_row][guess_column] = "H"
if p2_ships_hit == Number_Battleships:
print ("Player Two Wins")
print_board(board)
break
else:
if (guess_row >= board_size or guess_row < 0) or (guess_column >= board_size or guess_column < 0):
print ("Thats not on the map")
elif (board[guess_row][guess_column] == "X"):
print ("You guessed that already")
else:
board[guess_row][guess_column] = "X"
print ("You missed my battleship")
print_board(board)
if Number_Battleships == 3:
if (guess_row == player_one_r[0] and guess_column == player_one_c[0]) or (guess_row == player_one_r[1] and guess_column == player_one_c[1]) or (guess_row == player_one_r[2] and guess_column == player_one_c[2]):
print ("You hit a battleship")
if board[guess_row][guess_column] == "H":
print ("You guessed that already")
elif board[guess_row][guess_column] == "~":
p2_ships_hit += 1
board[guess_row][guess_column] = "H"
if p2_ships_hit == Number_Battleships:
print ("Player Two Wins")
print_board(board)
break
else:
if (guess_row >= board_size or guess_row < 0) or (guess_column >= board_size or guess_column < 0):
print ("Thats not on the map")
elif (board[guess_row][guess_column] == "X"):
print ("You guessed that already")
else:
board[guess_row][guess_column] = "X"
print ("You missed my battleship")
print_board(board)
| 48.793427
| 238
| 0.47787
| 2,378
| 20,786
| 3.964256
| 0.043314
| 0.085711
| 0.07033
| 0.085923
| 0.892861
| 0.865493
| 0.849369
| 0.813939
| 0.806725
| 0.798557
| 0
| 0.022898
| 0.409603
| 20,786
| 426
| 239
| 48.793427
| 0.745274
| 0.003368
| 0
| 0.769029
| 0
| 0
| 0.109593
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010499
| false
| 0
| 0.005249
| 0.005249
| 0.020997
| 0.293963
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
387521918517eaa6925211d925d2f35d100de40f
| 5,150
|
py
|
Python
|
pysal/spreg/tests/test_probit.py
|
cubensys/pysal
|
8d50990f6e6603ba79ae1a887a20a1e3a0734e51
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
pysal/spreg/tests/test_probit.py
|
cubensys/pysal
|
8d50990f6e6603ba79ae1a887a20a1e3a0734e51
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
pysal/spreg/tests/test_probit.py
|
cubensys/pysal
|
8d50990f6e6603ba79ae1a887a20a1e3a0734e51
|
[
"MIT",
"BSD-3-Clause"
] | 1
|
2021-07-19T01:46:17.000Z
|
2021-07-19T01:46:17.000Z
|
import unittest
import pysal
import numpy as np
from pysal.spreg import probit as PB
from pysal.common import RTOL
class TestBaseProbit(unittest.TestCase):
def setUp(self):
db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
y = np.array(db.by_col("CRIME"))
y = np.reshape(y, (49,1))
self.y = (y>40).astype(float)
X = []
X.append(db.by_col("INC"))
X.append(db.by_col("HOVAL"))
self.X = np.array(X).T
self.X = np.hstack((np.ones(self.y.shape),self.X))
self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
self.w.transform = 'r'
def test_model(self):
reg = PB.BaseProbit(self.y, self.X, w=self.w)
betas = np.array([[ 3.35381078], [-0.1996531 ], [-0.02951371]])
np.testing.assert_allclose(reg.betas,betas,RTOL)
predy = np.array([ 0.00174739])
np.testing.assert_allclose(reg.predy[0],predy,RTOL)
n = 49
np.testing.assert_allclose(reg.n,n,RTOL)
k = 3
np.testing.assert_allclose(reg.k,k,RTOL)
y = np.array([ 0.])
np.testing.assert_allclose(reg.y[0],y,RTOL)
x = np.array([ 1. , 19.531 , 80.467003])
np.testing.assert_allclose(reg.x[0],x,RTOL)
vm = np.array([[ 8.52813879e-01, -4.36272459e-02, -8.05171472e-03], [ -4.36272459e-02, 4.11381444e-03, -1.92834842e-04], [ -8.05171472e-03, -1.92834842e-04, 3.09660240e-04]])
np.testing.assert_allclose(reg.vm,vm,RTOL)
xmean = np.array([[ 1. ], [ 14.37493876], [ 38.43622447 ]])
np.testing.assert_allclose(reg.xmean,xmean,RTOL)
predpc = 85.714285714285708
np.testing.assert_allclose(reg.predpc,predpc,RTOL)
logl = -20.06009093055782
np.testing.assert_allclose(reg.logl,logl,RTOL)
scale = 0.23309310130643665
np.testing.assert_allclose(reg.scale,scale,RTOL)
slopes = np.array([[-0.04653776], [-0.00687944]])
np.testing.assert_allclose(reg.slopes,slopes,RTOL)
slopes_vm = np.array([[ 1.77101993e-04, -1.65021168e-05], [ -1.65021168e-05, 1.60575016e-05]])
np.testing.assert_allclose(reg.slopes_vm,slopes_vm,RTOL)
LR = 25.317683245671716
np.testing.assert_allclose(reg.LR[0],LR,RTOL)
Pinkse_error = 2.9632385352516728
np.testing.assert_allclose(reg.Pinkse_error[0],Pinkse_error,RTOL)
KP_error = 1.6509224700582124
np.testing.assert_allclose(reg.KP_error[0],KP_error,RTOL)
PS_error = 2.3732463777623511
np.testing.assert_allclose(reg.PS_error[0],PS_error,RTOL)
class TestProbit(unittest.TestCase):
def setUp(self):
db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
y = np.array(db.by_col("CRIME"))
y = np.reshape(y, (49,1))
self.y = (y>40).astype(float)
X = []
X.append(db.by_col("INC"))
X.append(db.by_col("HOVAL"))
self.X = np.array(X).T
self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
self.w.transform = 'r'
def test_model(self):
reg = PB.Probit(self.y, self.X, w=self.w)
betas = np.array([[ 3.35381078], [-0.1996531 ], [-0.02951371]])
np.testing.assert_allclose(reg.betas,betas,RTOL)
predy = np.array([ 0.00174739])
np.testing.assert_allclose(reg.predy[0],predy,RTOL)
n = 49
np.testing.assert_allclose(reg.n,n,RTOL)
k = 3
np.testing.assert_allclose(reg.k,k,RTOL)
y = np.array([ 0.])
np.testing.assert_allclose(reg.y[0],y,RTOL)
x = np.array([ 1. , 19.531 , 80.467003])
np.testing.assert_allclose(reg.x[0],x,RTOL)
vm = np.array([[ 8.52813879e-01, -4.36272459e-02, -8.05171472e-03], [ -4.36272459e-02, 4.11381444e-03, -1.92834842e-04], [ -8.05171472e-03, -1.92834842e-04, 3.09660240e-04]])
np.testing.assert_allclose(reg.vm,vm,RTOL)
xmean = np.array([[ 1. ], [ 14.37493876], [ 38.43622447 ]])
np.testing.assert_allclose(reg.xmean,xmean,RTOL)
predpc = 85.714285714285708
np.testing.assert_allclose(reg.predpc,predpc,RTOL)
logl = -20.06009093055782
np.testing.assert_allclose(reg.logl,logl,RTOL)
scale = 0.23309310130643665
np.testing.assert_allclose(reg.scale,scale,RTOL)
slopes = np.array([[-0.04653776], [-0.00687944]])
np.testing.assert_allclose(reg.slopes,slopes,RTOL)
slopes_vm = np.array([[ 1.77101993e-04, -1.65021168e-05], [ -1.65021168e-05, 1.60575016e-05]])
np.testing.assert_allclose(reg.slopes_vm,slopes_vm,RTOL)
LR = 25.317683245671716
np.testing.assert_allclose(reg.LR[0],LR,RTOL)
Pinkse_error = 2.9632385352516728
np.testing.assert_allclose(reg.Pinkse_error[0],Pinkse_error,RTOL)
KP_error = 1.6509224700582124
np.testing.assert_allclose(reg.KP_error[0],KP_error,RTOL)
PS_error = 2.3732463777623511
np.testing.assert_allclose(reg.PS_error[0],PS_error,RTOL)
if __name__ == '__main__':
unittest.main()
| 46.818182
| 191
| 0.620388
| 738
| 5,150
| 4.218157
| 0.161247
| 0.098297
| 0.163829
| 0.251205
| 0.936717
| 0.936717
| 0.936717
| 0.936717
| 0.936717
| 0.936717
| 0
| 0.18268
| 0.221942
| 5,150
| 109
| 192
| 47.247706
| 0.59421
| 0
| 0
| 0.884615
| 0
| 0
| 0.016699
| 0
| 0
| 0
| 0
| 0
| 0.326923
| 1
| 0.038462
| false
| 0
| 0.048077
| 0
| 0.105769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3897772a2bc14e5c71f3b526a9bd42daa63df676
| 5,195
|
py
|
Python
|
tests/certificatemanager_test.py
|
Kosinkadink/ceptic
|
06d03ffbad6c28e40c541053218dbea7383eea1c
|
[
"MIT"
] | 2
|
2017-07-18T03:12:12.000Z
|
2019-11-21T20:00:25.000Z
|
tests/certificatemanager_test.py
|
Kosinkadink/ceptic
|
06d03ffbad6c28e40c541053218dbea7383eea1c
|
[
"MIT"
] | null | null | null |
tests/certificatemanager_test.py
|
Kosinkadink/ceptic
|
06d03ffbad6c28e40c541053218dbea7383eea1c
|
[
"MIT"
] | null | null | null |
import os
import pytest
from ceptic.certificatemanager import CertificateManager, CertificateManagerException, create_ssl_config
# FIXTURES
@pytest.fixture(scope="module")
def locations():
# location of tests (current dir)
class _RealObject(object):
def __init__(self):
self.test_dir = os.path.join(os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__))))
self.server_certs = os.path.join(self.test_dir, "server_certs")
self.client_certs = os.path.join(self.test_dir, "client_certs")
return _RealObject()
# END FIXTURES
# TESTS:
# Client Tests
def test_client_generage_context_all_files(locations):
certfile = os.path.join(locations.client_certs, "cert_client.pem")
keyfile = os.path.join(locations.client_certs, "key_client.pem")
cafile = os.path.join(locations.client_certs, "cert_server.pem")
config = create_ssl_config(certfile=certfile, keyfile=keyfile, cafile=cafile)
manager = CertificateManager.client(config)
assert manager.generate_context_tls == manager.generate_context_client
manager.generate_context_tls()
def test_client_generate_context_certfile_keyfile_only(locations):
certfile = os.path.join(locations.client_certs, "cert_client.pem")
keyfile = os.path.join(locations.client_certs, "key_client.pem")
config = create_ssl_config(certfile=certfile, keyfile=keyfile)
manager = CertificateManager.client(config)
assert manager.generate_context_tls == manager.generate_context_client
manager.generate_context_tls()
def test_client_generage_context_no_files(locations):
manager = CertificateManager.client()
assert manager.generate_context_tls == manager.generate_context_client
manager.generate_context_tls()
def test_client_generage_context_not_secure(locations):
config = create_ssl_config(secure=False)
manager = CertificateManager.client(config)
assert manager.generate_context_tls == manager.generate_context_client
manager.generate_context_tls()
def test_client_generate_context_certfile_only_raises_exception(locations):
certfile = os.path.join(locations.client_certs, "cert_client.pem")
config = create_ssl_config(certfile=certfile)
manager = CertificateManager.client(config)
assert manager.generate_context_tls == manager.generate_context_client
with pytest.raises(CertificateManagerException):
manager.generate_context_tls()
def test_client_generate_context_keyfile_only_raises_exception(locations):
keyfile = os.path.join(locations.client_certs, "key_client.pem")
config = create_ssl_config(keyfile=keyfile)
manager = CertificateManager.client(config)
assert manager.generate_context_tls == manager.generate_context_client
with pytest.raises(CertificateManagerException):
manager.generate_context_tls()
# Server
def test_server_generate_context_all_files(locations):
certfile = os.path.join(locations.server_certs, "cert_server.pem")
keyfile = os.path.join(locations.server_certs, "key_server.pem")
cafile = os.path.join(locations.server_certs, "cert_client.pem")
config = create_ssl_config(certfile=certfile, keyfile=keyfile, cafile=cafile)
manager = CertificateManager.server(config)
assert manager.generate_context_tls == manager.generate_context_server
manager.generate_context_tls()
def test_server_generate_context_certfile_keyfile_only(locations):
certfile = os.path.join(locations.server_certs, "cert_server.pem")
keyfile = os.path.join(locations.server_certs, "key_server.pem")
config = create_ssl_config(certfile=certfile, keyfile=keyfile)
manager = CertificateManager.server(config)
assert manager.generate_context_tls == manager.generate_context_server
manager.generate_context_tls()
def test_server_generage_context_no_files_raises_exception(locations):
manager = CertificateManager.server()
assert manager.generate_context_tls == manager.generate_context_server
with pytest.raises(CertificateManagerException):
manager.generate_context_tls()
def test_server_generage_context_not_secure(locations):
config = create_ssl_config(secure=False)
manager = CertificateManager.server(config)
assert manager.generate_context_tls == manager.generate_context_server
manager.generate_context_tls()
def test_server_generate_context_certfile_only_raises_exception(locations):
certfile = os.path.join(locations.server_certs, "cert_server.pem")
config = create_ssl_config(certfile=certfile)
manager = CertificateManager.server(config)
assert manager.generate_context_tls == manager.generate_context_server
with pytest.raises(CertificateManagerException):
manager.generate_context_tls()
def test_server_generate_context_keyfile_only_raises_exception(locations):
keyfile = os.path.join(locations.server_certs, "key_server.pem")
config = create_ssl_config(keyfile=keyfile)
manager = CertificateManager.server(config)
assert manager.generate_context_tls == manager.generate_context_server
with pytest.raises(CertificateManagerException):
manager.generate_context_tls()
# END TESTS
| 41.56
| 104
| 0.786718
| 623
| 5,195
| 6.221509
| 0.096308
| 0.166409
| 0.204334
| 0.154799
| 0.872807
| 0.861971
| 0.858875
| 0.832559
| 0.832559
| 0.819401
| 0
| 0
| 0.126083
| 5,195
| 124
| 105
| 41.895161
| 0.853744
| 0.017324
| 0
| 0.685393
| 0
| 0
| 0.045909
| 0
| 0
| 0
| 0
| 0
| 0.134831
| 1
| 0.157303
| false
| 0
| 0.033708
| 0
| 0.213483
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
38a3833ea539bd2f1fe83f63cf9ae0990d8f271a
| 9,793
|
py
|
Python
|
SBaaS_quantification/stage01_quantification_peakInformation_postgresql_models.py
|
dmccloskey/SBaaS_quantification
|
b2a9c7a9a0d318f22ff20e311f94c213852ba914
|
[
"MIT"
] | null | null | null |
SBaaS_quantification/stage01_quantification_peakInformation_postgresql_models.py
|
dmccloskey/SBaaS_quantification
|
b2a9c7a9a0d318f22ff20e311f94c213852ba914
|
[
"MIT"
] | null | null | null |
SBaaS_quantification/stage01_quantification_peakInformation_postgresql_models.py
|
dmccloskey/SBaaS_quantification
|
b2a9c7a9a0d318f22ff20e311f94c213852ba914
|
[
"MIT"
] | null | null | null |
from SBaaS_base.postgresql_orm_base import *
class data_stage01_quantification_peakInformation(Base):
__tablename__ = 'data_stage01_quantification_peakInformation'
id = Column(Integer, Sequence('data_stage01_quantification_peakInformation_id_seq'), primary_key=True)
analysis_id = Column(String(50))
#experiment_id = Column(postgresql.ARRAY(String(50))) need to remove policies first...
experiment_id = Column(String(50))
component_group_name = Column(String(100))
component_name = Column(String(500))
peakInfo_parameter = Column(String(50))
peakInfo_n = Column(Integer)
peakInfo_ave = Column(Float)
peakInfo_cv = Column(Float)
peakInfo_lb = Column(Float)
peakInfo_ub = Column(Float)
peakInfo_units = Column(String(50))
sample_names = Column(postgresql.ARRAY(String(100)))
sample_name_abbreviation = Column(String(100))
sample_types = Column(postgresql.ARRAY(String(100)))
acqusition_date_and_times = Column(postgresql.ARRAY(DateTime))
peakInfo_data = Column(postgresql.ARRAY(Float))
used_ = Column(Boolean);
comment_ = Column(Text);
__table_args__ = (UniqueConstraint(
'analysis_id','experiment_id','sample_name_abbreviation','component_name','peakInfo_parameter'),
)
def __init__(self,
row_dict_I,
):
self.analysis_id=row_dict_I['analysis_id'];
self.sample_name_abbreviation=row_dict_I['sample_name_abbreviation'];
self.experiment_id=row_dict_I['experiment_id'];
self.component_group_name=row_dict_I['component_group_name'];
self.component_name=row_dict_I['component_name'];
self.peakInfo_parameter=row_dict_I['peakInfo_parameter'];
self.peakInfo_n=row_dict_I['peakInfo_n'];
self.peakInfo_ave=row_dict_I['peakInfo_ave'];
self.peakInfo_cv=row_dict_I['peakInfo_cv'];
self.peakInfo_lb=row_dict_I['peakInfo_lb'];
self.peakInfo_ub=row_dict_I['peakInfo_ub'];
self.peakInfo_units=row_dict_I['peakInfo_units'];
self.sample_names=row_dict_I['sample_names'];
self.sample_types=row_dict_I['sample_types'];
self.acqusition_date_and_times=row_dict_I['acqusition_date_and_times'];
self.peakInfo_data=row_dict_I['peakInfo_data'];
self.used_=row_dict_I['used_'];
self.comment_=row_dict_I['comment_'];
def __set__row__(self,
analysis_id_I,
experiment_id_I,
component_group_name_I,
component_name_I,
peakInfo_parameter_I,
peakInfo_n_I,
peakInfo_ave_I,
peakInfo_cv_I,
peakInfo_lb_I,
peakInfo_ub_I,
peakInfo_units_I,
sample_names_I,
sample_name_abbreviation_I,
sample_types_I,
acqusition_date_and_times_I,
peakInfo_data_I,
used__I,
comment__I):
self.analysis_id=analysis_id_I
self.experiment_id=experiment_id_I
self.component_group_name=component_group_name_I
self.component_name=component_name_I
self.peakInfo_parameter=peakInfo_parameter_I
self.peakInfo_n=peakInfo_n_I
self.peakInfo_ave=peakInfo_ave_I
self.peakInfo_cv=peakInfo_cv_I
self.peakInfo_lb=peakInfo_lb_I
self.peakInfo_ub=peakInfo_ub_I
self.peakInfo_units=peakInfo_units_I
self.sample_names=sample_names_I
self.sample_name_abbreviation=sample_name_abbreviation_I
self.sample_types=sample_types_I
self.acqusition_date_and_times=acqusition_date_and_times_I
self.peakInfo_data=peakInfo_data_I
self.used_=used__I
self.comment_=comment__I
def __repr__dict__(self):
return {'id':self.id,
'analysis_id':self.analysis_id,
'experiment_id':self.experiment_id,
'component_group_name':self.component_group_name,
'component_name':self.component_name,
'peakInfo_parameter':self.peakInfo_parameter,
'peakInfo_n':self.peakInfo_n,
'peakInfo_ave':self.peakInfo_ave,
'peakInfo_cv':self.peakInfo_cv,
'peakInfo_lb':self.peakInfo_lb,
'peakInfo_ub':self.peakInfo_ub,
'peakInfo_units':self.peakInfo_units,
'sample_names':self.sample_names,
'sample_name_abbreviation':self.sample_name_abbreviation,
'sample_types':self.sample_types,
'acqusition_date_and_times':self.acqusition_date_and_times,
'peakInfo_data':self.peakInfo_data,
'used_':self.used_,
'comment_':self.comment_}
def __repr__json__(self):
return json.dumps(self.__repr__dict__())
class data_stage01_quantification_peakResolution(Base):
__tablename__ = 'data_stage01_quantification_peakResolution'
id = Column(Integer, Sequence('data_stage01_quantification_peakResolution_id_seq'), primary_key=True)
analysis_id = Column(String(50))
#experiment_id = Column(postgresql.ARRAY(String(50))) need to remove policies first...
experiment_id = Column(String(50))
component_group_name_pair = Column(postgresql.ARRAY(String(100)))
component_name_pair = Column(postgresql.ARRAY(String(500)))
peakInfo_parameter = Column(String(50))
peakInfo_n = Column(Integer)
peakInfo_ave = Column(Float)
peakInfo_cv = Column(Float)
peakInfo_lb = Column(Float)
peakInfo_ub = Column(Float)
peakInfo_units = Column(String(50))
sample_names = Column(postgresql.ARRAY(String(100)))
sample_name_abbreviation = Column(String(100))
sample_types = Column(postgresql.ARRAY(String(100)))
acqusition_date_and_times = Column(postgresql.ARRAY(DateTime))
peakInfo_data = Column(postgresql.ARRAY(Float))
used_ = Column(Boolean);
comment_ = Column(Text);
__table_args__ = (UniqueConstraint(
'analysis_id',
'experiment_id',
'sample_name_abbreviation',
'component_name_pair',
'peakInfo_parameter'),
)
def __init__(self,
row_dict_I,
):
self.peakInfo_units=row_dict_I['peakInfo_units'];
self.peakInfo_ub=row_dict_I['peakInfo_ub'];
self.peakInfo_lb=row_dict_I['peakInfo_lb'];
self.peakInfo_cv=row_dict_I['peakInfo_cv'];
self.peakInfo_ave=row_dict_I['peakInfo_ave'];
self.peakInfo_parameter=row_dict_I['peakInfo_parameter'];
self.experiment_id=row_dict_I['experiment_id'];
self.component_group_name_pair=row_dict_I['component_group_name_pair'];
self.comment_=row_dict_I['comment_'];
self.used_=row_dict_I['used_'];
self.peakInfo_data=row_dict_I['peakInfo_data'];
self.acqusition_date_and_times=row_dict_I['acqusition_date_and_times'];
self.component_name_pair=row_dict_I['component_name_pair'];
self.sample_types=row_dict_I['sample_types'];
self.sample_names=row_dict_I['sample_names'];
self.analysis_id=row_dict_I['analysis_id'];
self.sample_name_abbreviation=row_dict_I['sample_name_abbreviation'];
self.peakInfo_n=row_dict_I['peakInfo_n'];
def __set__row__(self,
analysis_id_I,
experiment_id_I,
component_group_name_pair_I,
component_name_pair_I,
peakInfo_parameter_I,
peakInfo_n_I,
peakInfo_ave_I,
peakInfo_cv_I,
peakInfo_lb_I,
peakInfo_ub_I,
peakInfo_units_I,
sample_names_I,
sample_name_abbreviation_I,
sample_types_I,
acqusition_date_and_times_I,
peakInfo_data_I,
used__I,
comment__I):
self.experiment_id=experiment_id_I
self.component_group_name_pair=component_group_name_pair_I
self.component_name_pair=component_name_pair_I
self.peakInfo_parameter=peakInfo_parameter_I
self.peakInfo_ave=peakInfo_ave_I
self.peakInfo_cv=peakInfo_cv_I
self.peakInfo_lb=peakInfo_lb_I
self.peakInfo_ub=peakInfo_ub_I
self.peakInfo_units=peakInfo_units_I
self.sample_names=sample_names_I
self.sample_types=sample_types_I
self.acqusition_date_and_times=acqusition_date_and_times_I
self.peakInfo_data=peakInfo_data_I
self.used_=used__I
self.comment_=comment__I
self.analysis_id=analysis_id_I
self.peakInfo_n=peakInfo_n_I
self.sample_name_abbreviation=sample_name_abbreviation_I
def __repr__dict__(self):
return {'id':self.id,
'analysis_id':self.analysis_id,
'experiment_id':self.experiment_id,
'component_group_name_pair':self.component_group_name_pair,
'component_name_pair':self.component_name_pair,
'peakInfo_parameter':self.peakInfo_parameter,
'peakInfo_n':self.peakInfo_n,
'peakInfo_ave':self.peakInfo_ave,
'peakInfo_cv':self.peakInfo_cv,
'peakInfo_lb':self.peakInfo_lb,
'peakInfo_ub':self.peakInfo_ub,
'peakInfo_units':self.peakInfo_units,
'sample_names':self.sample_names,
'sample_name_abbreviation':self.sample_name_abbreviation,
'sample_types':self.sample_types,
'acqusition_date_and_times':self.acqusition_date_and_times,
'peakInfo_data':self.peakInfo_data,
'used_':self.used_,
'comment_':self.comment_}
def __repr__json__(self):
return json.dumps(self.__repr__dict__())
| 43.71875
| 106
| 0.67344
| 1,186
| 9,793
| 5.027825
| 0.061551
| 0.096596
| 0.050981
| 0.059031
| 0.951367
| 0.901057
| 0.861311
| 0.838504
| 0.825088
| 0.715244
| 0
| 0.008308
| 0.237925
| 9,793
| 224
| 107
| 43.71875
| 0.790701
| 0.017359
| 0
| 0.835681
| 0
| 0
| 0.1402
| 0.049678
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037559
| false
| 0
| 0.004695
| 0.018779
| 0.267606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a3bbd1b6d4b16dcd3008d93fcfcd45dfbd69c10
| 147
|
py
|
Python
|
src/core_backend/libs/__init__.py
|
jhchen3121/wechat_shop
|
c9d9ad009df1e5bb0eb23ca8d830dd5c15df5328
|
[
"Apache-2.0"
] | null | null | null |
src/core_backend/libs/__init__.py
|
jhchen3121/wechat_shop
|
c9d9ad009df1e5bb0eb23ca8d830dd5c15df5328
|
[
"Apache-2.0"
] | 5
|
2021-01-28T21:18:27.000Z
|
2022-03-25T19:10:01.000Z
|
src/core_backend/libs/__init__.py
|
jhchen3121/wechat_shop
|
c9d9ad009df1e5bb0eb23ca8d830dd5c15df5328
|
[
"Apache-2.0"
] | null | null | null |
import sys, os
#sys.path.append(os.path.join(os.getcwd(),'.'))
#sys.path.append(os.path.join(os.getcwd(),'./database/'))
#print "loading libs..."
| 24.5
| 57
| 0.659864
| 23
| 147
| 4.217391
| 0.478261
| 0.14433
| 0.268041
| 0.309278
| 0.639175
| 0.639175
| 0.639175
| 0.639175
| 0
| 0
| 0
| 0
| 0.061224
| 147
| 5
| 58
| 29.4
| 0.702899
| 0.85034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2a58c192ad92ad404a648571e5b62463ffd8c3d6
| 157
|
py
|
Python
|
HackerRank/Python/Integers Come In All Sizes.py
|
AkashSCIENTIST/CompetitiveSolutions
|
236db303a21c7195ebf721394a54ce9df70782f5
|
[
"Apache-2.0"
] | null | null | null |
HackerRank/Python/Integers Come In All Sizes.py
|
AkashSCIENTIST/CompetitiveSolutions
|
236db303a21c7195ebf721394a54ce9df70782f5
|
[
"Apache-2.0"
] | null | null | null |
HackerRank/Python/Integers Come In All Sizes.py
|
AkashSCIENTIST/CompetitiveSolutions
|
236db303a21c7195ebf721394a54ce9df70782f5
|
[
"Apache-2.0"
] | null | null | null |
# Enter your code here. Read input from STDIN. Print output to STDOUT
a,b,c,d = int(input()), int(input()), int(input()), int(input())
print(a**b + c**d)
| 39.25
| 70
| 0.630573
| 29
| 157
| 3.413793
| 0.586207
| 0.323232
| 0.333333
| 0.484848
| 0.323232
| 0.323232
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159236
| 157
| 3
| 71
| 52.333333
| 0.75
| 0.426752
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
2a59bc49e7f0048c2872c34061d9327d6d57c550
| 98,954
|
py
|
Python
|
swagger_client/api/api_v2___to_move_api.py
|
Fates-List/fateslist.py-autogen
|
0643434d9d0e71f781f99b2703a2ef52f49d8875
|
[
"MIT"
] | null | null | null |
swagger_client/api/api_v2___to_move_api.py
|
Fates-List/fateslist.py-autogen
|
0643434d9d0e71f781f99b2703a2ef52f49d8875
|
[
"MIT"
] | null | null | null |
swagger_client/api/api_v2___to_move_api.py
|
Fates-List/fateslist.py-autogen
|
0643434d9d0e71f781f99b2703a2ef52f49d8875
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Fates List
Current API: v2 beta 3 Default API: v2 API Docs: https://apidocs.fateslist.xyz Enum Reference: https://apidocs.fateslist.xyz/structures/enums.autogen # noqa: E501
OpenAPI spec version: 0.3.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class APIV2ToMoveApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_command_api_v2_bots_bot_id_commands_post(self, body, bot_id, **kwargs): # noqa: E501
"""Add Command # noqa: E501
Adds a command to your bot # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_command_api_v2_bots_bot_id_commands_post(body, bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotCommand body: (required)
:param int bot_id: (required)
:return: IDResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_command_api_v2_bots_bot_id_commands_post_with_http_info(body, bot_id, **kwargs) # noqa: E501
else:
(data) = self.add_command_api_v2_bots_bot_id_commands_post_with_http_info(body, bot_id, **kwargs) # noqa: E501
return data
def add_command_api_v2_bots_bot_id_commands_post_with_http_info(self, body, bot_id, **kwargs): # noqa: E501
"""Add Command # noqa: E501
Adds a command to your bot # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_command_api_v2_bots_bot_id_commands_post_with_http_info(body, bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotCommand body: (required)
:param int bot_id: (required)
:return: IDResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'bot_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_command_api_v2_bots_bot_id_commands_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_command_api_v2_bots_bot_id_commands_post`") # noqa: E501
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `add_command_api_v2_bots_bot_id_commands_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bot'] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/commands', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IDResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_guild_api_api_v2_servers_guild_id_post(self, body, user_id, guild_id, **kwargs): # noqa: E501
"""Add Guild Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_guild_api_api_v2_servers_guild_id_post(body, user_id, guild_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ServersAdd body: (required)
:param int user_id: (required)
:param int guild_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_guild_api_api_v2_servers_guild_id_post_with_http_info(body, user_id, guild_id, **kwargs) # noqa: E501
else:
(data) = self.add_guild_api_api_v2_servers_guild_id_post_with_http_info(body, user_id, guild_id, **kwargs) # noqa: E501
return data
def add_guild_api_api_v2_servers_guild_id_post_with_http_info(self, body, user_id, guild_id, **kwargs): # noqa: E501
"""Add Guild Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_guild_api_api_v2_servers_guild_id_post_with_http_info(body, user_id, guild_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ServersAdd body: (required)
:param int user_id: (required)
:param int guild_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'user_id', 'guild_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_guild_api_api_v2_servers_guild_id_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_guild_api_api_v2_servers_guild_id_post`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `add_guild_api_api_v2_servers_guild_id_post`") # noqa: E501
# verify the required parameter 'guild_id' is set
if ('guild_id' not in params or
params['guild_id'] is None):
raise ValueError("Missing the required parameter `guild_id` when calling `add_guild_api_api_v2_servers_guild_id_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'guild_id' in params:
path_params['guild_id'] = params['guild_id'] # noqa: E501
query_params = []
if 'user_id' in params:
query_params.append(('user_id', params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['User'] # noqa: E501
return self.api_client.call_api(
'/api/v2/servers/{guild_id}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def bots_index_page_api_v2_index_bots_get(self, **kwargs): # noqa: E501
"""Bots Index Page # noqa: E501
For any potential Android/iOS app, crawlers etc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bots_index_page_api_v2_index_bots_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: BotIndex
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bots_index_page_api_v2_index_bots_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.bots_index_page_api_v2_index_bots_get_with_http_info(**kwargs) # noqa: E501
return data
def bots_index_page_api_v2_index_bots_get_with_http_info(self, **kwargs): # noqa: E501
"""Bots Index Page # noqa: E501
For any potential Android/iOS app, crawlers etc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bots_index_page_api_v2_index_bots_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: BotIndex
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bots_index_page_api_v2_index_bots_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/index/bots', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BotIndex', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def bots_search_page_api_v2_search_bots_get(self, q, **kwargs): # noqa: E501
"""Bots Search Page # noqa: E501
For any potential Android/iOS app, crawlers etc. Q is the query to search for # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bots_search_page_api_v2_search_bots_get(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: (required)
:return: BotSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bots_search_page_api_v2_search_bots_get_with_http_info(q, **kwargs) # noqa: E501
else:
(data) = self.bots_search_page_api_v2_search_bots_get_with_http_info(q, **kwargs) # noqa: E501
return data
def bots_search_page_api_v2_search_bots_get_with_http_info(self, q, **kwargs): # noqa: E501
"""Bots Search Page # noqa: E501
For any potential Android/iOS app, crawlers etc. Q is the query to search for # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bots_search_page_api_v2_search_bots_get_with_http_info(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: (required)
:return: BotSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['q'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bots_search_page_api_v2_search_bots_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'q' is set
if ('q' not in params or
params['q'] is None):
raise ValueError("Missing the required parameter `q` when calling `bots_search_page_api_v2_search_bots_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'q' in params:
query_params.append(('q', params['q'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/bots', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BotSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_command_api_v2_bots_bot_id_commands_id_delete(self, bot_id, id, **kwargs): # noqa: E501
"""Delete Command # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_command_api_v2_bots_bot_id_commands_id_delete(bot_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param str id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_command_api_v2_bots_bot_id_commands_id_delete_with_http_info(bot_id, id, **kwargs) # noqa: E501
else:
(data) = self.delete_command_api_v2_bots_bot_id_commands_id_delete_with_http_info(bot_id, id, **kwargs) # noqa: E501
return data
def delete_command_api_v2_bots_bot_id_commands_id_delete_with_http_info(self, bot_id, id, **kwargs): # noqa: E501
"""Delete Command # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_command_api_v2_bots_bot_id_commands_id_delete_with_http_info(bot_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param str id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_command_api_v2_bots_bot_id_commands_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `delete_command_api_v2_bots_bot_id_commands_id_delete`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_command_api_v2_bots_bot_id_commands_id_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bot'] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/commands/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def edit_command_api_v2_bots_bot_id_commands_id_patch(self, body, bot_id, id, **kwargs): # noqa: E501
"""Edit Command # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.edit_command_api_v2_bots_bot_id_commands_id_patch(body, bot_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotCommand body: (required)
:param int bot_id: (required)
:param str id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.edit_command_api_v2_bots_bot_id_commands_id_patch_with_http_info(body, bot_id, id, **kwargs) # noqa: E501
else:
(data) = self.edit_command_api_v2_bots_bot_id_commands_id_patch_with_http_info(body, bot_id, id, **kwargs) # noqa: E501
return data
def edit_command_api_v2_bots_bot_id_commands_id_patch_with_http_info(self, body, bot_id, id, **kwargs): # noqa: E501
"""Edit Command # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.edit_command_api_v2_bots_bot_id_commands_id_patch_with_http_info(body, bot_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotCommand body: (required)
:param int bot_id: (required)
:param str id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'bot_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method edit_command_api_v2_bots_bot_id_commands_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `edit_command_api_v2_bots_bot_id_commands_id_patch`") # noqa: E501
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `edit_command_api_v2_bots_bot_id_commands_id_patch`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `edit_command_api_v2_bots_bot_id_commands_id_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bot'] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/commands/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_bot_events_api_api_v2_bots_bot_id_events_get(self, bot_id, **kwargs): # noqa: E501
"""Get Bot Events Api # noqa: E501
Get bot events, all exclude and filters must be comma seperated # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_bot_events_api_api_v2_bots_bot_id_events_get(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param str exclude:
:param str filter:
:return: BotEvents
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_bot_events_api_api_v2_bots_bot_id_events_get_with_http_info(bot_id, **kwargs) # noqa: E501
else:
(data) = self.get_bot_events_api_api_v2_bots_bot_id_events_get_with_http_info(bot_id, **kwargs) # noqa: E501
return data
def get_bot_events_api_api_v2_bots_bot_id_events_get_with_http_info(self, bot_id, **kwargs): # noqa: E501
"""Get Bot Events Api # noqa: E501
Get bot events, all exclude and filters must be comma seperated # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_bot_events_api_api_v2_bots_bot_id_events_get_with_http_info(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param str exclude:
:param str filter:
:return: BotEvents
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id', 'exclude', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_bot_events_api_api_v2_bots_bot_id_events_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `get_bot_events_api_api_v2_bots_bot_id_events_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
if 'exclude' in params:
query_params.append(('exclude', params['exclude'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bot'] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/events', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BotEvents', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_bot_ws_events_api_v2_bots_bot_id_ws_events_get(self, bot_id, **kwargs): # noqa: E501
"""Get Bot Ws Events # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_bot_ws_events_api_v2_bots_bot_id_ws_events_get(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_bot_ws_events_api_v2_bots_bot_id_ws_events_get_with_http_info(bot_id, **kwargs) # noqa: E501
else:
(data) = self.get_bot_ws_events_api_v2_bots_bot_id_ws_events_get_with_http_info(bot_id, **kwargs) # noqa: E501
return data
def get_bot_ws_events_api_v2_bots_bot_id_ws_events_get_with_http_info(self, bot_id, **kwargs): # noqa: E501
"""Get Bot Ws Events # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_bot_ws_events_api_v2_bots_bot_id_ws_events_get_with_http_info(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_bot_ws_events_api_v2_bots_bot_id_ws_events_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `get_bot_ws_events_api_v2_bots_bot_id_ws_events_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bot'] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/ws_events', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_commands_api_v2_bots_bot_id_commands_get(self, bot_id, **kwargs): # noqa: E501
"""Get Commands # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_commands_api_v2_bots_bot_id_commands_get(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param str filter:
:param str lang:
:return: BotCommandsGet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_commands_api_v2_bots_bot_id_commands_get_with_http_info(bot_id, **kwargs) # noqa: E501
else:
(data) = self.get_commands_api_v2_bots_bot_id_commands_get_with_http_info(bot_id, **kwargs) # noqa: E501
return data
def get_commands_api_v2_bots_bot_id_commands_get_with_http_info(self, bot_id, **kwargs): # noqa: E501
"""Get Commands # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_commands_api_v2_bots_bot_id_commands_get_with_http_info(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:param str filter:
:param str lang:
:return: BotCommandsGet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id', 'filter', 'lang'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_commands_api_v2_bots_bot_id_commands_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `get_commands_api_v2_bots_bot_id_commands_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'lang' in params:
query_params.append(('lang', params['lang'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/commands', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BotCommandsGet', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_feature_api_api_v2_features_name_get(self, name, **kwargs): # noqa: E501
"""Get Feature Api # noqa: E501
Gets a feature given its internal name (custom_prefix, open_source etc) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_feature_api_api_v2_features_name_get(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:return: FLFeature
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_feature_api_api_v2_features_name_get_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.get_feature_api_api_v2_features_name_get_with_http_info(name, **kwargs) # noqa: E501
return data
def get_feature_api_api_v2_features_name_get_with_http_info(self, name, **kwargs): # noqa: E501
"""Get Feature Api # noqa: E501
Gets a feature given its internal name (custom_prefix, open_source etc) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_feature_api_api_v2_features_name_get_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:return: FLFeature
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_feature_api_api_v2_features_name_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_feature_api_api_v2_features_name_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/features/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FLFeature', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_maintenance_mode_api_v2_bots_bot_id_maintenance_get(self, bot_id, **kwargs): # noqa: E501
"""Get Maintenance Mode # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_maintenance_mode_api_v2_bots_bot_id_maintenance_get(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:return: BotMaintenance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_maintenance_mode_api_v2_bots_bot_id_maintenance_get_with_http_info(bot_id, **kwargs) # noqa: E501
else:
(data) = self.get_maintenance_mode_api_v2_bots_bot_id_maintenance_get_with_http_info(bot_id, **kwargs) # noqa: E501
return data
def get_maintenance_mode_api_v2_bots_bot_id_maintenance_get_with_http_info(self, bot_id, **kwargs): # noqa: E501
"""Get Maintenance Mode # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_maintenance_mode_api_v2_bots_bot_id_maintenance_get_with_http_info(bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int bot_id: (required)
:return: BotMaintenance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bot_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_maintenance_mode_api_v2_bots_bot_id_maintenance_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `get_maintenance_mode_api_v2_bots_bot_id_maintenance_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/maintenance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BotMaintenance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tags_api_api_v2_tags_name_get(self, name, **kwargs): # noqa: E501
"""Get Tags Api # noqa: E501
Gets a tag given its internal name (custom_prefix, open_source etc) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tags_api_api_v2_tags_name_get(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:return: FLTag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tags_api_api_v2_tags_name_get_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.get_tags_api_api_v2_tags_name_get_with_http_info(name, **kwargs) # noqa: E501
return data
def get_tags_api_api_v2_tags_name_get_with_http_info(self, name, **kwargs): # noqa: E501
"""Get Tags Api # noqa: E501
Gets a tag given its internal name (custom_prefix, open_source etc) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tags_api_api_v2_tags_name_get_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:return: FLTag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tags_api_api_v2_tags_name_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_tags_api_api_v2_tags_name_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/tags/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FLTag', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_api_api_v2_users_user_id_get(self, user_id, **kwargs): # noqa: E501
"""Get User Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_api_api_v2_users_user_id_get(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int user_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_api_api_v2_users_user_id_get_with_http_info(user_id, **kwargs) # noqa: E501
else:
(data) = self.get_user_api_api_v2_users_user_id_get_with_http_info(user_id, **kwargs) # noqa: E501
return data
def get_user_api_api_v2_users_user_id_get_with_http_info(self, user_id, **kwargs): # noqa: E501
"""Get User Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_api_api_v2_users_user_id_get_with_http_info(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int user_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_api_api_v2_users_user_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_user_api_api_v2_users_user_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/users/{user_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_vanity_api_api_v2_code_vanity_get(self, vanity, **kwargs): # noqa: E501
"""Get Vanity Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vanity_api_api_v2_code_vanity_get(vanity, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vanity: (required)
:return: BotVanity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_vanity_api_api_v2_code_vanity_get_with_http_info(vanity, **kwargs) # noqa: E501
else:
(data) = self.get_vanity_api_api_v2_code_vanity_get_with_http_info(vanity, **kwargs) # noqa: E501
return data
def get_vanity_api_api_v2_code_vanity_get_with_http_info(self, vanity, **kwargs): # noqa: E501
"""Get Vanity Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vanity_api_api_v2_code_vanity_get_with_http_info(vanity, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str vanity: (required)
:return: BotVanity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['vanity'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_vanity_api_api_v2_code_vanity_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'vanity' is set
if ('vanity' not in params or
params['vanity'] is None):
raise ValueError("Missing the required parameter `vanity` when calling `get_vanity_api_api_v2_code_vanity_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'vanity' in params:
path_params['vanity'] = params['vanity'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/code/{vanity}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BotVanity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def prepare_servers_api_api_v2_users_user_id_servers_prepare_post(self, body, user_id, **kwargs): # noqa: E501
"""Prepare Servers Api # noqa: E501
Prepares a user to add servers and returns available servers for said user. Scopes must have guild permission This request may change the access token and this should be set on the client and will be returned in the json response as well # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.prepare_servers_api_api_v2_users_user_id_servers_prepare_post(body, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ServerCheck body: (required)
:param int user_id: (required)
:return: ServerListAuthed
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.prepare_servers_api_api_v2_users_user_id_servers_prepare_post_with_http_info(body, user_id, **kwargs) # noqa: E501
else:
(data) = self.prepare_servers_api_api_v2_users_user_id_servers_prepare_post_with_http_info(body, user_id, **kwargs) # noqa: E501
return data
def prepare_servers_api_api_v2_users_user_id_servers_prepare_post_with_http_info(self, body, user_id, **kwargs): # noqa: E501
"""Prepare Servers Api # noqa: E501
Prepares a user to add servers and returns available servers for said user. Scopes must have guild permission This request may change the access token and this should be set on the client and will be returned in the json response as well # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.prepare_servers_api_api_v2_users_user_id_servers_prepare_post_with_http_info(body, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ServerCheck body: (required)
:param int user_id: (required)
:return: ServerListAuthed
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method prepare_servers_api_api_v2_users_user_id_servers_prepare_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `prepare_servers_api_api_v2_users_user_id_servers_prepare_post`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `prepare_servers_api_api_v2_users_user_id_servers_prepare_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['User'] # noqa: E501
return self.api_client.call_api(
'/api/v2/users/{user_id}/servers/prepare', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ServerListAuthed', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def preview_api_api_v2_preview_post(self, body, **kwargs): # noqa: E501
"""Preview Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.preview_api_api_v2_preview_post(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PrevRequest body: (required)
:param str lang:
:return: PrevResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.preview_api_api_v2_preview_post_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.preview_api_api_v2_preview_post_with_http_info(body, **kwargs) # noqa: E501
return data
def preview_api_api_v2_preview_post_with_http_info(self, body, **kwargs): # noqa: E501
"""Preview Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.preview_api_api_v2_preview_post_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PrevRequest body: (required)
:param str lang:
:return: PrevResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'lang'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method preview_api_api_v2_preview_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `preview_api_api_v2_preview_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'lang' in params:
query_params.append(('lang', params['lang'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/preview', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PrevResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def profiles_search_page_api_v2_search_profiles_get(self, q, **kwargs): # noqa: E501
"""Profiles Search Page # noqa: E501
For any potential Android/iOS app, crawlers etc. Q is the query to search for # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.profiles_search_page_api_v2_search_profiles_get(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: (required)
:return: ProfileSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.profiles_search_page_api_v2_search_profiles_get_with_http_info(q, **kwargs) # noqa: E501
else:
(data) = self.profiles_search_page_api_v2_search_profiles_get_with_http_info(q, **kwargs) # noqa: E501
return data
def profiles_search_page_api_v2_search_profiles_get_with_http_info(self, q, **kwargs): # noqa: E501
"""Profiles Search Page # noqa: E501
For any potential Android/iOS app, crawlers etc. Q is the query to search for # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.profiles_search_page_api_v2_search_profiles_get_with_http_info(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: (required)
:return: ProfileSearch
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['q'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method profiles_search_page_api_v2_search_profiles_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'q' is set
if ('q' not in params or
params['q'] is None):
raise ValueError("Missing the required parameter `q` when calling `profiles_search_page_api_v2_search_profiles_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'q' in params:
query_params.append(('q', params['q'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/search/profiles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProfileSearch', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def regenerate_user_token_api_v2_users_user_id_token_patch(self, user_id, **kwargs): # noqa: E501
"""Regenerate User Token # noqa: E501
Regenerate the User API token ** User API Token**: You can get this by clicking your profile and scrolling to the bottom and you will see your API Token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.regenerate_user_token_api_v2_users_user_id_token_patch(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int user_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.regenerate_user_token_api_v2_users_user_id_token_patch_with_http_info(user_id, **kwargs) # noqa: E501
else:
(data) = self.regenerate_user_token_api_v2_users_user_id_token_patch_with_http_info(user_id, **kwargs) # noqa: E501
return data
def regenerate_user_token_api_v2_users_user_id_token_patch_with_http_info(self, user_id, **kwargs): # noqa: E501
"""Regenerate User Token # noqa: E501
Regenerate the User API token ** User API Token**: You can get this by clicking your profile and scrolling to the bottom and you will see your API Token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.regenerate_user_token_api_v2_users_user_id_token_patch_with_http_info(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int user_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method regenerate_user_token_api_v2_users_user_id_token_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `regenerate_user_token_api_v2_users_user_id_token_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['User'] # noqa: E501
return self.api_client.call_api(
'/api/v2/users/{user_id}/token', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_js_mode_api_v2_users_user_id_js_allowed_patch(self, body, user_id, **kwargs): # noqa: E501
"""Set Js Mode # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_js_mode_api_v2_users_user_id_js_allowed_patch(body, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserJSPatch body: (required)
:param int user_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_js_mode_api_v2_users_user_id_js_allowed_patch_with_http_info(body, user_id, **kwargs) # noqa: E501
else:
(data) = self.set_js_mode_api_v2_users_user_id_js_allowed_patch_with_http_info(body, user_id, **kwargs) # noqa: E501
return data
def set_js_mode_api_v2_users_user_id_js_allowed_patch_with_http_info(self, body, user_id, **kwargs): # noqa: E501
"""Set Js Mode # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_js_mode_api_v2_users_user_id_js_allowed_patch_with_http_info(body, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserJSPatch body: (required)
:param int user_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_js_mode_api_v2_users_user_id_js_allowed_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `set_js_mode_api_v2_users_user_id_js_allowed_patch`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `set_js_mode_api_v2_users_user_id_js_allowed_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['User'] # noqa: E501
return self.api_client.call_api(
'/api/v2/users/{user_id}/js_allowed', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_maintenance_mode_api_v2_bots_bot_id_maintenance_patch(self, body, bot_id, **kwargs): # noqa: E501
"""Set Maintenance Mode # noqa: E501
This is just an endpoint for enabling or disabling maintenance mode. **API Token**: You can get this by clicking your bot and clicking edit and scrolling down to API Token **Mode**: Whether you want to enter or exit maintenance mode. Setting this to 1 will enable maintenance, setting this to 2 will enable long-lasting maintenance mode and setting this to 0 will disable maintenance mode. More flying in soon :) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_maintenance_mode_api_v2_bots_bot_id_maintenance_patch(body, bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotMaintenancePartial body: (required)
:param int bot_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_maintenance_mode_api_v2_bots_bot_id_maintenance_patch_with_http_info(body, bot_id, **kwargs) # noqa: E501
else:
(data) = self.set_maintenance_mode_api_v2_bots_bot_id_maintenance_patch_with_http_info(body, bot_id, **kwargs) # noqa: E501
return data
def set_maintenance_mode_api_v2_bots_bot_id_maintenance_patch_with_http_info(self, body, bot_id, **kwargs): # noqa: E501
"""Set Maintenance Mode # noqa: E501
This is just an endpoint for enabling or disabling maintenance mode. **API Token**: You can get this by clicking your bot and clicking edit and scrolling down to API Token **Mode**: Whether you want to enter or exit maintenance mode. Setting this to 1 will enable maintenance, setting this to 2 will enable long-lasting maintenance mode and setting this to 0 will disable maintenance mode. More flying in soon :) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_maintenance_mode_api_v2_bots_bot_id_maintenance_patch_with_http_info(body, bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotMaintenancePartial body: (required)
:param int bot_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'bot_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_maintenance_mode_api_v2_bots_bot_id_maintenance_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `set_maintenance_mode_api_v2_bots_bot_id_maintenance_patch`") # noqa: E501
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `set_maintenance_mode_api_v2_bots_bot_id_maintenance_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bot'] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/maintenance', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_user_description_api_api_v2_users_user_id_description_patch(self, body, user_id, **kwargs): # noqa: E501
"""Set User Description Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_user_description_api_api_v2_users_user_id_description_patch(body, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserDescEdit body: (required)
:param int user_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_user_description_api_api_v2_users_user_id_description_patch_with_http_info(body, user_id, **kwargs) # noqa: E501
else:
(data) = self.set_user_description_api_api_v2_users_user_id_description_patch_with_http_info(body, user_id, **kwargs) # noqa: E501
return data
def set_user_description_api_api_v2_users_user_id_description_patch_with_http_info(self, body, user_id, **kwargs): # noqa: E501
"""Set User Description Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_user_description_api_api_v2_users_user_id_description_patch_with_http_info(body, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserDescEdit body: (required)
:param int user_id: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_user_description_api_api_v2_users_user_id_description_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `set_user_description_api_api_v2_users_user_id_description_patch`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `set_user_description_api_api_v2_users_user_id_description_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['User'] # noqa: E501
return self.api_client.call_api(
'/api/v2/users/{user_id}/description', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_vote_reminder_api_v2_users_user_id_bots_bot_id_reminders_patch(self, body, user_id, bot_id, **kwargs): # noqa: E501
"""Set Vote Reminder # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_vote_reminder_api_v2_users_user_id_bots_bot_id_reminders_patch(body, user_id, bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param VoteReminderPatch body: (required)
:param int user_id: (required)
:param int bot_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_vote_reminder_api_v2_users_user_id_bots_bot_id_reminders_patch_with_http_info(body, user_id, bot_id, **kwargs) # noqa: E501
else:
(data) = self.set_vote_reminder_api_v2_users_user_id_bots_bot_id_reminders_patch_with_http_info(body, user_id, bot_id, **kwargs) # noqa: E501
return data
def set_vote_reminder_api_v2_users_user_id_bots_bot_id_reminders_patch_with_http_info(self, body, user_id, bot_id, **kwargs): # noqa: E501
"""Set Vote Reminder # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_vote_reminder_api_v2_users_user_id_bots_bot_id_reminders_patch_with_http_info(body, user_id, bot_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param VoteReminderPatch body: (required)
:param int user_id: (required)
:param int bot_id: (required)
:return: Object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'user_id', 'bot_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_vote_reminder_api_v2_users_user_id_bots_bot_id_reminders_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `set_vote_reminder_api_v2_users_user_id_bots_bot_id_reminders_patch`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `set_vote_reminder_api_v2_users_user_id_bots_bot_id_reminders_patch`") # noqa: E501
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `set_vote_reminder_api_v2_users_user_id_bots_bot_id_reminders_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id'] # noqa: E501
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['User'] # noqa: E501
return self.api_client.call_api(
'/api/v2/users/{user_id}/bots/{bot_id}/reminders', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def vote_review_api_api_v2_bots_bot_id_reviews_rid_votes_patch(self, body, user_id, bot_id, rid, **kwargs): # noqa: E501
"""Vote Review Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.vote_review_api_api_v2_bots_bot_id_reviews_rid_votes_patch(body, user_id, bot_id, rid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotReviewVote body: (required)
:param int user_id: (required)
:param int bot_id: (required)
:param str rid: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.vote_review_api_api_v2_bots_bot_id_reviews_rid_votes_patch_with_http_info(body, user_id, bot_id, rid, **kwargs) # noqa: E501
else:
(data) = self.vote_review_api_api_v2_bots_bot_id_reviews_rid_votes_patch_with_http_info(body, user_id, bot_id, rid, **kwargs) # noqa: E501
return data
def vote_review_api_api_v2_bots_bot_id_reviews_rid_votes_patch_with_http_info(self, body, user_id, bot_id, rid, **kwargs): # noqa: E501
"""Vote Review Api # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.vote_review_api_api_v2_bots_bot_id_reviews_rid_votes_patch_with_http_info(body, user_id, bot_id, rid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param BotReviewVote body: (required)
:param int user_id: (required)
:param int bot_id: (required)
:param str rid: (required)
:return: APIResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'user_id', 'bot_id', 'rid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method vote_review_api_api_v2_bots_bot_id_reviews_rid_votes_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `vote_review_api_api_v2_bots_bot_id_reviews_rid_votes_patch`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `vote_review_api_api_v2_bots_bot_id_reviews_rid_votes_patch`") # noqa: E501
# verify the required parameter 'bot_id' is set
if ('bot_id' not in params or
params['bot_id'] is None):
raise ValueError("Missing the required parameter `bot_id` when calling `vote_review_api_api_v2_bots_bot_id_reviews_rid_votes_patch`") # noqa: E501
# verify the required parameter 'rid' is set
if ('rid' not in params or
params['rid'] is None):
raise ValueError("Missing the required parameter `rid` when calling `vote_review_api_api_v2_bots_bot_id_reviews_rid_votes_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bot_id' in params:
path_params['bot_id'] = params['bot_id'] # noqa: E501
if 'rid' in params:
path_params['rid'] = params['rid'] # noqa: E501
query_params = []
if 'user_id' in params:
query_params.append(('user_id', params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['User'] # noqa: E501
return self.api_client.call_api(
'/api/v2/bots/{bot_id}/reviews/{rid}/votes', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.841015
| 436
| 0.623199
| 12,303
| 98,954
| 4.67959
| 0.022677
| 0.047244
| 0.015285
| 0.028763
| 0.984854
| 0.98277
| 0.980077
| 0.977229
| 0.96884
| 0.958574
| 0
| 0.017914
| 0.289195
| 98,954
| 2,364
| 437
| 41.858714
| 0.800617
| 0.309275
| 0
| 0.813793
| 0
| 0
| 0.20472
| 0.081098
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036015
| false
| 0
| 0.003065
| 0
| 0.09272
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a71b1d59af3fe16f3774d7f9d598ada4d8ce46f
| 138
|
py
|
Python
|
main.py
|
Berdugo1994/Tweeter-Search-Engine
|
ff80707d64b792288b877814d79e39c5b5ceb7ad
|
[
"MIT"
] | null | null | null |
main.py
|
Berdugo1994/Tweeter-Search-Engine
|
ff80707d64b792288b877814d79e39c5b5ceb7ad
|
[
"MIT"
] | null | null | null |
main.py
|
Berdugo1994/Tweeter-Search-Engine
|
ff80707d64b792288b877814d79e39c5b5ceb7ad
|
[
"MIT"
] | null | null | null |
import search_engine_1
import search_engine_3
import search_engine_best
if __name__ == '__main__':
search_engine_best.main()
| 17.25
| 30
| 0.768116
| 19
| 138
| 4.736842
| 0.473684
| 0.533333
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.173913
| 138
| 7
| 31
| 19.714286
| 0.77193
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
aa962289156c0ac26d85fa703757138e6544a9d8
| 150
|
py
|
Python
|
loldib/getratings/models/NA/na_reksai/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_reksai/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_reksai/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_reksai_top import *
from .na_reksai_jng import *
from .na_reksai_mid import *
from .na_reksai_bot import *
from .na_reksai_sup import *
| 25
| 29
| 0.766667
| 25
| 150
| 4.2
| 0.36
| 0.285714
| 0.571429
| 0.685714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 150
| 5
| 30
| 30
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
aa9adc4354f540546cad8951167b2cf338416de7
| 147,071
|
py
|
Python
|
examples/AutoTest/ConnectTest.py
|
n-kawauchi/pipeline-test
|
aa29d84f177c7d8cefc81adab29abc06fccd61cb
|
[
"RSA-MD"
] | 15
|
2019-01-08T15:34:04.000Z
|
2022-03-01T08:36:17.000Z
|
examples/AutoTest/ConnectTest.py
|
n-kawauchi/pipeline-test
|
aa29d84f177c7d8cefc81adab29abc06fccd61cb
|
[
"RSA-MD"
] | 448
|
2018-12-27T03:13:56.000Z
|
2022-03-24T09:57:03.000Z
|
examples/AutoTest/ConnectTest.py
|
n-kawauchi/pipeline-test
|
aa29d84f177c7d8cefc81adab29abc06fccd61cb
|
[
"RSA-MD"
] | 31
|
2018-12-26T04:34:22.000Z
|
2021-11-25T04:39:51.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
## ConnectTest.py
##
## コンポーネント接続テスト
##
from rtc_handle import *
from BasicDataType_idl import *
import time
import commands
import SDOPackage
import os
##--------------------------------------------------------------------
g_test_name = "<< component connection test >>"
## ネームサーバー定義
#env = RtmEnv(sys.argv, ["localhost:2809"])
#list0 = env.name_space["localhost:2809"].list_obj()
#env.name_space['localhost:2809'].rtc_handles.keys()
#ns = env.name_space['localhost:2809']
env = RtmEnv(sys.argv, ["localhost:2809"])
list0 = env.name_space["localhost:2809"].list_obj()
env.name_space['localhost:2809'].rtc_handles.keys()
ns = env.name_space['localhost:2809']
g_compo_send = ns.rtc_handles["AutoTestOut0.rtc"]
g_compo_recv = ns.rtc_handles["AutoTestIn0.rtc"]
ec_send = g_compo_send.rtc_ref.get_owned_contexts()
ec_recv = g_compo_recv.rtc_ref.get_owned_contexts()
g_out_ports = g_compo_send.rtc_ref.get_ports()
g_in_ports = g_compo_recv.rtc_ref.get_ports()
#print "<<< g_out_ports.length=",len(g_out_ports)
#SeqOutの場合
#length=8 [0]:Short [1]:Long [2]:Float [3]:Double [4]:ShortSeq [5]:LongSeq [6]:FloatSeq [7]:DoubleSeq
#print "<<< g_in_ports.length=",len(g_in_ports)
time.sleep(2)
##--------------------------------------------------------------------
## コネクタープロファイルデフォルト定義
g_interface_type1 = "corba_cdr"
g_dataflow_type = "push"
g_subscription_type = "flush"
g_push_policy = "NEW"
g_push_rate = "2000"
g_skip_count = "4"
#g_skip_count = "0"
## ポート番号指定 ( get_ports()より )
g_port1 = 0
g_port2 = 1
g_port3 = 2
## ConnectorProfile(name, connector_id, ports, properties)
## String name
## String connector_id
## RTC.PortService ports[]
## SDOPackage.NameValue properties[]
## データポート TimedFloat
g_name1 = "out"
g_connector_id1 = "001"
g_data_type1 = "TimedFloat"
g_conprof1 = RTC.ConnectorProfile(g_name1, g_connector_id1, [g_out_ports[g_port1], g_in_ports[g_port1]], [SDOPackage.NameValue("dataport.data_type",any.to_any(g_data_type1)),SDOPackage.NameValue("dataport.interface_type",any.to_any(g_interface_type1)),SDOPackage.NameValue("dataport.dataflow_type",any.to_any(g_dataflow_type)),SDOPackage.NameValue("dataport.subscription_type",any.to_any(g_subscription_type)),SDOPackage.NameValue("dataport.publisher.push_policy",any.to_any(g_push_policy)),SDOPackage.NameValue("dataport.publisher.push_rate",any.to_any(g_push_rate)),SDOPackage.NameValue("dataport.publisher.skip_count",any.to_any(g_skip_count))])
## データポート TimedFloatSeq
g_name2 = "seqout"
g_connector_id2 = "002"
g_data_type2 = "TimedFloatSeq"
g_conprof2 = RTC.ConnectorProfile(g_name2, g_connector_id2, [g_out_ports[g_port2], g_in_ports[g_port2]], [SDOPackage.NameValue("dataport.data_type",any.to_any(g_data_type2)),SDOPackage.NameValue("dataport.interface_type",any.to_any(g_interface_type1)),SDOPackage.NameValue("dataport.dataflow_type",any.to_any(g_dataflow_type)),SDOPackage.NameValue("dataport.subscription_type",any.to_any(g_subscription_type)),SDOPackage.NameValue("dataport.publisher.push_policy",any.to_any(g_push_policy)),SDOPackage.NameValue("dataport.publisher.push_rate",any.to_any(g_push_rate)),SDOPackage.NameValue("dataport.publisher.skip_count",any.to_any(g_skip_count))])
## サービスポート
g_name3 = "MyService"
g_connector_id3 = "003"
g_interface_type3 = "MyService"
g_conprof3 = RTC.ConnectorProfile(g_name3, g_connector_id3, [g_out_ports[g_port3], g_in_ports[g_port3]], [SDOPackage.NameValue("dataport.interface_type",any.to_any(g_interface_type3))])
##--------------------------------------------------------------------
## 送受信結果判定関連
g_diff_send_file = "./original-data"
g_diff_recv_file = "./received-data"
g_check_message = g_diff_recv_file + " file not found."
g_test_result_file = "./ResultTest.log"
g_test_case = "case"
g_test_cnt = "count"
g_test_ok = "OK."
g_test_ng = "NG detected."
g_test_ng_message = " < received-data >"
g_mess_header = "< "
g_mess_footer = " > "
# テスト結果内容
# 例)ケース1、1回目 -> "<<< case1 count1 >>> OK."
# 例)ケース1、2回目 -> "<<< case1 count2 >>> NG detected."
##--------------------------------------------------------------------
## 内部関数:コネクタープロファイル設定(データポート)
##
## (引数)
## subscription_type : "flush", "new", "periodic"
## push_policy : "ALL", "FIFO", "SKIP", "NEW", ""
## connect_direction : 0:outport -> inport, 1:inport -> outport
##--------------------------------------------------------------------
def make_connecter_profile(subscription_type, push_policy, connect_direction):
global g_conprof1, g_conprof2, g_conprof3
if connect_direction == 0:
## outport -> inport Set
g_conprof1 = RTC.ConnectorProfile(g_name1, g_connector_id1, [g_out_ports[g_port1], g_in_ports[g_port1]], [SDOPackage.NameValue("dataport.data_type",any.to_any(g_data_type1)),SDOPackage.NameValue("dataport.interface_type",any.to_any(g_interface_type1)),SDOPackage.NameValue("dataport.dataflow_type",any.to_any(g_dataflow_type)),SDOPackage.NameValue("dataport.subscription_type",any.to_any(subscription_type)),SDOPackage.NameValue("dataport.publisher.push_policy",any.to_any(push_policy)),SDOPackage.NameValue("dataport.publisher.push_rate",any.to_any(g_push_rate)),SDOPackage.NameValue("dataport.publisher.skip_count",any.to_any(g_skip_count))])
g_conprof2 = RTC.ConnectorProfile(g_name2, g_connector_id2, [g_out_ports[g_port2], g_in_ports[g_port2]], [SDOPackage.NameValue("dataport.data_type",any.to_any(g_data_type2)),SDOPackage.NameValue("dataport.interface_type",any.to_any(g_interface_type1)),SDOPackage.NameValue("dataport.dataflow_type",any.to_any(g_dataflow_type)),SDOPackage.NameValue("dataport.subscription_type",any.to_any(subscription_type)),SDOPackage.NameValue("dataport.publisher.push_policy",any.to_any(push_policy)),SDOPackage.NameValue("dataport.publisher.push_rate",any.to_any(g_push_rate)),SDOPackage.NameValue("dataport.publisher.skip_count",any.to_any(g_skip_count))])
#print "outport -> inport set >>>"
#print "g_conprof1=",g_conprof1
#print "g_conprof2=",g_conprof2
else:
## inport -> outport Set
g_conprof1 = RTC.ConnectorProfile(g_name1, g_connector_id1, [g_in_ports[g_port1], g_out_ports[g_port1]], [SDOPackage.NameValue("dataport.data_type",any.to_any(g_data_type1)),SDOPackage.NameValue("dataport.interface_type",any.to_any(g_interface_type1)),SDOPackage.NameValue("dataport.dataflow_type",any.to_any(g_dataflow_type)),SDOPackage.NameValue("dataport.subscription_type",any.to_any(subscription_type)),SDOPackage.NameValue("dataport.publisher.push_policy",any.to_any(push_policy)),SDOPackage.NameValue("dataport.publisher.push_rate",any.to_any(g_push_rate)),SDOPackage.NameValue("dataport.publisher.skip_count",any.to_any(g_skip_count))])
g_conprof2 = RTC.ConnectorProfile(g_name2, g_connector_id2, [g_in_ports[g_port2], g_out_ports[g_port2]], [SDOPackage.NameValue("dataport.data_type",any.to_any(g_data_type2)),SDOPackage.NameValue("dataport.interface_type",any.to_any(g_interface_type1)),SDOPackage.NameValue("dataport.dataflow_type",any.to_any(g_dataflow_type)),SDOPackage.NameValue("dataport.subscription_type",any.to_any(subscription_type)),SDOPackage.NameValue("dataport.publisher.push_policy",any.to_any(push_policy)),SDOPackage.NameValue("dataport.publisher.push_rate",any.to_any(g_push_rate)),SDOPackage.NameValue("dataport.publisher.skip_count",any.to_any(g_skip_count))])
#print "inport -> outport set >>>"
#print "g_conprof1=",g_conprof1
#print "g_conprof2=",g_conprof2
return
##--------------------------------------------------------------------
## 内部関数:受信ファイル削除
##
## (引数)
## なし
##--------------------------------------------------------------------
def delete_recv_file():
## ファイルが存在する場合
if os.path.isfile(g_diff_recv_file) == True:
os.remove(g_diff_recv_file)
return
##--------------------------------------------------------------------
## 内部関数:送受信ファイルのデータ比較
##
## (引数)
## なし
## (戻り値) True : 一致、 False : 不一致
##--------------------------------------------------------------------
def diff_file():
bret = True
## if connect_direction == 0:
## else:
## 送信ファイル有無判定
if os.path.isfile(g_diff_send_file) == False:
print "send_file (%s) not found." % send_file
return False
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
print "recv_file (%s) not found." % recv_file
return False
## 送受信データ差分判定
f_send = open(g_diff_send_file, 'r')
f_recv = open(g_diff_recv_file, 'r')
while(1):
str_send = f_send.readline()
str_recv = f_recv.readline()
if len(str_send) == 0:
break
#print "original send date=(%s)" % str_send
#print ''.join(['%x ' % ord(s) for s in str_send])
#print "original recv date=(%s)" % str_recv
#print ''.join(['%x ' % ord(s) for s in str_recv])
## 末尾の改行、復帰コード削除
str_send2 = str_send.rstrip('\n')
str_send2 = str_send2.rstrip('\r')
str_recv2 = str_recv.rstrip('\n')
str_recv2 = str_recv2.rstrip('\r')
#print "rstrip after send date=(%s)" % str_send2
#print "rstrip after recv date=(%s)" % str_recv2
## データ比較
if str_send2 != str_recv2:
#print "data difference"
#print "send date=(%s)" % str_send2
#print "recv date=(%s)" % str_recv2
bret = False
break;
f_recv.close()
f_send.close()
return bret
##--------------------------------------------------------------------
## テストケース番号の初期値設定
## 上から連番を振っている
case_no = 0
## ケース毎のテスト回数
loop_count = 3
## 受信側activate_componentから送信側activate_componentまでのスリープ時間(秒数)
sleep_recv_act_time = 1
## activate_componentからdeactivate_componentまでのスリープ時間(秒数)
sleep_act_time = 10
## forループのスリープ時間(秒数)
sleep_for_time = 2
## connectからdisconnectまでのスリープ時間(秒数)
sleep_connect_time = 2
# テスト結果ファイルの作成
fout = open(g_test_result_file, 'w')
fout.write(g_test_name + '\n')
fout.close()
#print g_test_name
time.sleep(1)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:out->in 接続・切断テスト2
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, flush) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:in->out 接続・切断テスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, flush) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:out->in Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(out->in, flush), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:in->out Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(in->out, flush), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:out->in Activate・Deactivateテスト10
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Not Connect(out->in, flush), Activate -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 0)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:out->in 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, flush) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:in->out 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, flush) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:out->in 接続・切断・Activate・Deactivateテスト2
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, flush) -> Activate -> send/recv -> Disconnect -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:in->out 接続・切断・Activate・Deactivateテスト2
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, flush) -> Activate -> send/recv -> Disconnect -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:out->in 接続・切断・Activate・Deactivateテスト3
##--------------------------------------------------------------------
## ●注意:Activateを先に行っている為、受信データは途中からの内容になります。
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Activate -> Connect(out->in, flush) -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 0)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:in->out 接続・切断・Activate・Deactivateテスト3
##--------------------------------------------------------------------
## ●注意:Activateを先に行っている為、受信データは途中からの内容になります。
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Activate -> Connect(in->out, flush) -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 1)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:out->in 接続・切断・Activate・Deactivateテスト4
##--------------------------------------------------------------------
## ●注意:Activateを先に行っている為、受信データは途中からの内容になります。
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Activate -> Connect(out->in, flush) -> send/recv -> Disconnect -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 0)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_act_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:flush 方向:in->out 接続・切断・Activate・Deactivateテスト4
##--------------------------------------------------------------------
## ●注意:Activateを先に行っている為、受信データは途中からの内容になります。
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Activate -> Connect(in->out, flush) -> send/recv -> Disconnect -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("flush", "", 1)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_act_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:ALL 接続・切断テスト3
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, new,ALL) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("new", "ALL", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:FIFO 接続・切断テスト4
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, new,FIFO) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("new", "FIFO", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:NEW 接続・切断テスト6
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, new,NEW) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("new", "NEW", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:SKIP 接続・切断テスト5
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, new,SKIP) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("new", "SKIP", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:ALL 接続・切断テスト3
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, new,ALL) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("new", "ALL", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:FIFO 接続・切断テスト4
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, new,FIFO) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("new", "FIFO", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:NEW 接続・切断テスト6
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, new,NEW) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("new", "NEW", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:SKIP 接続・切断テスト5
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, new,SKIP) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("new", "SKIP", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:ALL Activate・Deactivateテスト2
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(out->in, new,ALL), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("new", "ALL", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:FIFO Activate・Deactivateテスト3
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(out->in, new,FIFO), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("new", "FIFO", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:NEW Activate・Deactivateテスト5
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(out->in, new,NEW), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("new", "NEW", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:SKIP Activate・Deactivateテスト4
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(out->in, new,SKIP), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("new", "SKIP", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:ALL Activate・Deactivateテスト2
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(in->out, new,ALL), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("new", "ALL", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:FIFO Activate・Deactivateテスト3
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(in->out, new,FIFO), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("new", "FIFO", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:NEW Activate・Deactivateテスト5
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(in->out, new,NEW), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("new", "NEW", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:SKIP Activate・Deactivateテスト4
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(in->out, new,SKIP), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("new", "SKIP", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:ALL 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, new,ALL) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("new", "ALL", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:FIFO 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, new,FIFO) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("new", "FIFO", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:NEW 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, new,NEW) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("new", "NEW", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:out->in ポリシー:SKIP 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, new,SKIP) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("new", "SKIP", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:ALL 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, new,ALL) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("new", "ALL", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:FIFO 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, new,FIFO) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("new", "FIFO", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:NEW 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, new,NEW) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("new", "NEW", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:new 方向:in->out ポリシー:SKIP 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, new,SKIP) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("new", "SKIP", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:ALL 接続・切断テスト7
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, periodic,ALL) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "ALL", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:FIFO 接続・切断テスト8
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, periodic,FIFO) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "FIFO", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:NEW 接続・切断テスト10
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, periodic,NEW) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "NEW", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:SKIP 接続・切断テスト9
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, periodic,SKIP) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "SKIP", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:ALL 接続・切断テスト7
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, periodic,ALL) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "ALL", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:FIFO 接続・切断テスト8
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, periodic,FIFO) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "FIFO", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:NEW 接続・切断テスト10
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, periodic,NEW) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "NEW", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:SKIP 接続・切断テスト9
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, periodic,SKIP) -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "SKIP", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
time.sleep(sleep_connect_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## テスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_test_ok
print message
fout.write(message + '\n')
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:ALL Activate・Deactivateテスト6
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(out->in, periodic,ALL), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "ALL", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:FIFO Activate・Deactivateテスト7
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(out->in, periodic,FIFO), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "FIFO", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:NEW Activate・Deactivateテスト9
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(out->in, periodic,NEW), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "NEW", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:SKIP Activate・Deactivateテスト8
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(out->in, periodic,SKIP), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "SKIP", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:ALL Activate・Deactivateテスト6
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(in->out, periodic,ALL), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "ALL", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:FIFO Activate・Deactivateテスト7
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(in->out, periodic,FIFO), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "FIFO", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:NEW Activate・Deactivateテスト9
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(in->out, periodic,NEW), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "NEW", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:SKIP Activate・Deactivateテスト8
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connecting(in->out, periodic,SKIP), Activate -> send/recv -> Deactivate"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "SKIP", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:ALL 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, periodic,ALL) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "ALL", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:FIFO 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, periodic,FIFO) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "FIFO", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:NEW 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, periodic,NEW) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "NEW", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:out->in ポリシー:SKIP 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(out->in, periodic,SKIP) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "SKIP", 0)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:ALL 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, periodic,ALL) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "ALL", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:FIFO 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, periodic,FIFO) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "FIFO", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:NEW 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, periodic,NEW) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "NEW", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
##--------------------------------------------------------------------
## 接続タイプ:periodic 方向:in->out ポリシー:SKIP 接続・切断・Activate・Deactivateテスト1
##--------------------------------------------------------------------
case_no = case_no + 1
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " "
message = message + "Connect(in->out, periodic,SKIP) -> Activate -> send/recv -> Deactivate -> Disconnect"
message = message + g_mess_footer
fout.write(message + '\n')
fout.close()
print message
for i in range(loop_count):
## 2 受信データファイル削除
delete_recv_file()
## 1 コネクタープロファイル設定
make_connecter_profile("periodic", "SKIP", 1)
## 3 ポート接続
# データポート1 TimedFloat
ret0 = g_out_ports[g_port1].connect(g_conprof1)
# データポート2 TimedFloatSeq
ret1 = g_out_ports[g_port2].connect(g_conprof2)
# サービスポート MyService
ret2 = g_out_ports[g_port3].connect(g_conprof3)
## 4 アクティベート
ec_recv[0].activate_component(g_compo_recv.rtc_ref)
time.sleep(sleep_recv_act_time)
ec_send[0].activate_component(g_compo_send.rtc_ref)
time.sleep(sleep_act_time)
## 5 ディアクティベート
ec_send[0].deactivate_component(g_compo_send.rtc_ref)
ec_recv[0].deactivate_component(g_compo_recv.rtc_ref)
## 6 ポート切断
g_in_ports[g_port3].disconnect(g_conprof3.connector_id)
g_in_ports[g_port2].disconnect(g_conprof2.connector_id)
g_in_ports[g_port1].disconnect(g_conprof1.connector_id)
## 受信ファイル有無判定
if os.path.isfile(g_diff_recv_file) == False:
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
message = message + g_check_message
fout.write(message + '\n')
fout.close()
print message
time.sleep(sleep_for_time)
continue
## 7 送受信データ比較
time.sleep(sleep_act_time)
bret = diff_file()
## 差分ファイルからテスト結果出力
fout = open(g_test_result_file, 'a')
message = g_mess_header + g_test_case + str(case_no) + " " + g_test_cnt + str(i+1) + g_mess_footer
# bret==True なら送受信データ一致
if bret == True:
# テスト結果 OK
message = message + g_test_ok
print message
fout.write(message + '\n')
else:
# テスト結果 NG
message = message + g_test_ng
print message
message = message + g_test_ng_message
fout.write(message + '\n')
# 受信データをテスト結果ファイルへコピー
fin2 = open(g_diff_recv_file, 'r')
while(1):
s2 = fin2.readline()
if len(s2) == 0:
break
fout.write(s2)
fin2.close()
fout.close()
time.sleep(sleep_for_time)
print "Test Complete."
| 30.83896
| 652
| 0.624195
| 19,943
| 147,071
| 4.287319
| 0.014391
| 0.034268
| 0.040771
| 0.040958
| 0.97055
| 0.968059
| 0.966165
| 0.965767
| 0.96517
| 0.964632
| 0
| 0.019975
| 0.20348
| 147,071
| 4,768
| 653
| 30.845428
| 0.708942
| 0.176989
| 0
| 0.946492
| 0
| 0
| 0.05587
| 0.008142
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.002126
| null | null | 0.074061
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
aab781d012e03c128883aeac5cdbf707b9c34a34
| 104
|
py
|
Python
|
xendit/models/directdebit/payment/__init__.py
|
glendaesutanto/xendit-python
|
f9b131882ff7d045f2e2c6518933d1594efba3e6
|
[
"MIT"
] | 10
|
2020-10-31T23:34:34.000Z
|
2022-03-08T19:08:55.000Z
|
xendit/models/directdebit/payment/__init__.py
|
glendaesutanto/xendit-python
|
f9b131882ff7d045f2e2c6518933d1594efba3e6
|
[
"MIT"
] | 22
|
2020-07-30T14:25:07.000Z
|
2022-03-31T03:55:46.000Z
|
xendit/models/directdebit/payment/__init__.py
|
glendaesutanto/xendit-python
|
f9b131882ff7d045f2e2c6518933d1594efba3e6
|
[
"MIT"
] | 11
|
2020-07-28T08:09:40.000Z
|
2022-03-18T00:14:02.000Z
|
from .direct_debit_basket import DirectDebitBasket
from .direct_debit_payment import DirectDebitPayment
| 34.666667
| 52
| 0.903846
| 12
| 104
| 7.5
| 0.666667
| 0.222222
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 104
| 2
| 53
| 52
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2ac0963606ac6276f3b94fe6c00a0b7c890de81c
| 108
|
py
|
Python
|
ui/pypesvds/lib/auth.py
|
onfire73/pypeskg
|
2171d0141e184999ba03c3e535ecc9bfddef10be
|
[
"Apache-2.0"
] | 117
|
2015-02-28T15:38:11.000Z
|
2022-02-12T16:08:41.000Z
|
ui/pypesvds/lib/auth.py
|
onfire73/pypeskg
|
2171d0141e184999ba03c3e535ecc9bfddef10be
|
[
"Apache-2.0"
] | 1
|
2015-10-07T19:59:52.000Z
|
2015-10-07T19:59:52.000Z
|
ui/pypesvds/lib/auth.py
|
onfire73/pypeskg
|
2171d0141e184999ba03c3e535ecc9bfddef10be
|
[
"Apache-2.0"
] | 41
|
2015-06-11T09:32:14.000Z
|
2018-09-17T03:10:23.000Z
|
from pylons.templating import render_mako as render
def render_signin():
return render('signin.html')
| 18
| 51
| 0.768519
| 15
| 108
| 5.4
| 0.733333
| 0.296296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 108
| 5
| 52
| 21.6
| 0.880435
| 0
| 0
| 0
| 0
| 0
| 0.102804
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
2afaae9fc21d9cf59e474428670c7c2a83389602
| 329
|
py
|
Python
|
tests/test_picklist_parsers.py
|
Edinburgh-Genome-Foundry/plateo
|
c9a608658325f3c507788d9b966a3f3c8e516bc5
|
[
"MIT"
] | 22
|
2018-01-29T21:34:25.000Z
|
2021-12-14T15:31:49.000Z
|
tests/test_picklist_parsers.py
|
Edinburgh-Genome-Foundry/plateo
|
c9a608658325f3c507788d9b966a3f3c8e516bc5
|
[
"MIT"
] | 3
|
2017-09-20T16:08:45.000Z
|
2021-05-28T17:45:14.000Z
|
tests/test_picklist_parsers.py
|
Edinburgh-Genome-Foundry/plateo
|
c9a608658325f3c507788d9b966a3f3c8e516bc5
|
[
"MIT"
] | 5
|
2018-09-18T08:53:37.000Z
|
2021-04-28T08:44:38.000Z
|
from plateo.parsers import (picklist_from_labcyte_echo_logfile,
picklist_from_tecan_evo_picklist_file)
def test_picklist_from_labcyte_echo_logfile():
picklist_from_labcyte_echo_logfile
pass
def test_picklist_from_tecan_evo_picklist_file():
picklist_from_tecan_evo_picklist_file
pass
| 29.909091
| 66
| 0.790274
| 43
| 329
| 5.372093
| 0.325581
| 0.311688
| 0.246753
| 0.298701
| 0.805195
| 0.727273
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0.179331
| 329
| 10
| 67
| 32.9
| 0.855556
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.125
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
631f44eece4f465aedf11d553cb5951d0faf8ea1
| 63
|
py
|
Python
|
robit/job/__init__.py
|
stratusadv/robit
|
7e0414d0ed3d98bb2c9a8785bf36961ac08f1d27
|
[
"MIT"
] | null | null | null |
robit/job/__init__.py
|
stratusadv/robit
|
7e0414d0ed3d98bb2c9a8785bf36961ac08f1d27
|
[
"MIT"
] | 1
|
2021-11-01T18:51:04.000Z
|
2021-11-01T18:51:04.000Z
|
robit/job/__init__.py
|
stratusadv/robit
|
7e0414d0ed3d98bb2c9a8785bf36961ac08f1d27
|
[
"MIT"
] | null | null | null |
from robit.job.job import Job
from robit.job.group import Group
| 31.5
| 33
| 0.825397
| 12
| 63
| 4.333333
| 0.416667
| 0.346154
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 63
| 2
| 33
| 31.5
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2dd5379aea9d896a53743e1d357f228c9e583eb2
| 10,038
|
py
|
Python
|
chess_pieces_defaults.py
|
FanimeCosplayChess/CosplayChessBoard
|
43416c8c6f76b7ad5ddeabb233ba8e4be0f6f981
|
[
"MIT"
] | 1
|
2019-11-04T05:08:50.000Z
|
2019-11-04T05:08:50.000Z
|
chess_pieces_defaults.py
|
FanimeCosplayChess/CosplayChessBoard
|
43416c8c6f76b7ad5ddeabb233ba8e4be0f6f981
|
[
"MIT"
] | 10
|
2020-02-04T01:50:15.000Z
|
2020-03-08T23:07:15.000Z
|
chess_pieces_defaults.py
|
FanimeCosplayChess/CosplayChessBoard
|
43416c8c6f76b7ad5ddeabb233ba8e4be0f6f981
|
[
"MIT"
] | 1
|
2019-11-03T20:02:55.000Z
|
2019-11-03T20:02:55.000Z
|
from chesspiece import ChessPiece
x_off = 50
y_off = 50
pieces_dict = {
'w_rook_1':ChessPiece(
init_x = 0+x_off,
init_y = 0+y_off,
team = 'white',
piece_type = 'rook'
),
'w_knight_1':ChessPiece(
init_x = 0+x_off,
init_y = 50+y_off,
team = 'white',
piece_type = 'rook'
),
'w_bishop_1':ChessPiece(
init_x = 0+x_off,
init_y = 100+y_off,
team = 'white',
piece_type = 'rook'
),
'w_queen':ChessPiece(
init_x = 0+x_off,
init_y = 150+y_off,
team = 'white',
piece_type = 'rook'
),
'w_king':ChessPiece(
init_x = 0+x_off,
init_y = 200+y_off,
team = 'white',
piece_type = 'rook'
),
'w_bishop_2':ChessPiece(
init_x = 0+x_off,
init_y = 250+y_off,
team = 'white',
piece_type = 'rook'
),
'w_knight_2':ChessPiece(
init_x = 0+x_off,
init_y = 300+y_off,
team = 'white',
piece_type = 'rook'
),
'w_rook_2':ChessPiece(
init_x = 0+x_off,
init_y = 350+y_off,
team = 'white',
piece_type = 'rook'
),
'w_pawn_1':ChessPiece(
init_x = 50+x_off,
init_y = 0+y_off,
team = 'white',
piece_type = 'rook'
),
'w_pawn_2':ChessPiece(
init_x = 50+x_off,
init_y = 50+y_off,
team = 'white',
piece_type = 'rook'
),
'w_pawn_3':ChessPiece(
init_x = 50+x_off,
init_y = 100+y_off,
team = 'white',
piece_type = 'rook'
),
'w_pawn_4':ChessPiece(
init_x = 50+x_off,
init_y = 150+y_off,
team = 'white',
piece_type = 'rook'
),
'w_pawn_5':ChessPiece(
init_x = 50+x_off,
init_y = 200+y_off,
team = 'white',
piece_type = 'rook'
),
'w_pawn_6':ChessPiece(
init_x = 50+x_off,
init_y = 250+y_off,
team = 'white',
piece_type = 'rook'
),
'w_pawn_7':ChessPiece(
init_x = 50+x_off,
init_y = 300+y_off,
team = 'white',
piece_type = 'rook'
),
'w_pawn_8':ChessPiece(
init_x = 50+x_off,
init_y = 350+y_off,
team = 'white',
piece_type = 'rook'
),
'b_rook_1':ChessPiece(
init_x = 350+x_off,
init_y = 0+y_off,
team = 'white',
piece_type = 'rook'
),
'b_knight_1':ChessPiece(
init_x = 350+x_off,
init_y = 50+y_off,
team = 'white',
piece_type = 'rook'
),
'b_bishop_1':ChessPiece(
init_x = 350+x_off,
init_y = 100+y_off,
team = 'white',
piece_type = 'rook'
),
'b_queen':ChessPiece(
init_x = 350+x_off,
init_y = 150+y_off,
team = 'white',
piece_type = 'rook'
),
'b_king':ChessPiece(
init_x = 350+x_off,
init_y = 200+y_off,
team = 'white',
piece_type = 'rook'
),
'b_bishop_2':ChessPiece(
init_x = 350+x_off,
init_y = 250+y_off,
team = 'white',
piece_type = 'rook'
),
'b_knight_2':ChessPiece(
init_x = 350+x_off,
init_y = 300+y_off,
team = 'white',
piece_type = 'rook'
),
'b_rook_2':ChessPiece(
init_x = 350+x_off,
init_y = 350+y_off,
team = 'white',
piece_type = 'rook'
),
'b_pawn_1':ChessPiece(
init_x = 300+x_off,
init_y = 0+y_off,
team = 'white',
piece_type = 'rook'
),
'b_pawn_2':ChessPiece(
init_x = 300+x_off,
init_y = 50+y_off,
team = 'white',
piece_type = 'rook'
),
'b_pawn_3':ChessPiece(
init_x = 300+x_off,
init_y = 100+y_off,
team = 'white',
piece_type = 'rook'
),
'b_pawn_4':ChessPiece(
init_x = 300+x_off,
init_y = 150+y_off,
team = 'white',
piece_type = 'rook'
),
'b_pawn_5':ChessPiece(
init_x = 300+x_off,
init_y = 200+y_off,
team = 'white',
piece_type = 'rook'
),
'b_pawn_6':ChessPiece(
init_x = 300+x_off,
init_y = 250+y_off,
team = 'white',
piece_type = 'rook'
),
'b_pawn_7':ChessPiece(
init_x = 300+x_off,
init_y = 300+y_off,
team = 'white',
piece_type = 'rook'
),
'b_pawn_8':ChessPiece(
init_x = 300+x_off,
init_y = 350+y_off,
team = 'white',
piece_type = 'rook'
)
}
grid_dict = {
'x':{
0:'a',
1:'b',
2:'c',
3:'d',
4:'e',
5:'f',
6:'g',
7:'h',
},
'y':{
0:'1',
1:'2',
2:'3',
3:'4',
4:'5',
5:'6',
6:'7',
7:'8',
}
}
| 39.519685
| 55
| 0.227137
| 652
| 10,038
| 3.153374
| 0.069018
| 0.064202
| 0.233463
| 0.140078
| 0.945039
| 0.933366
| 0.933366
| 0.933366
| 0.764105
| 0.600195
| 0
| 0.07138
| 0.704124
| 10,038
| 253
| 56
| 39.675889
| 0.620875
| 0
| 0
| 0.726027
| 0
| 0
| 0.056983
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004566
| 0
| 0.004566
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9335ba47824d0fa77d38a17472973803ed3724d5
| 15,784
|
py
|
Python
|
src/CLI/actioner/sonic-cli-route-match.py
|
project-arlo/sonic-mgmt-framework
|
562cd84ff3fec9ca705c7df621742f2daa61ce71
|
[
"Apache-2.0"
] | 7
|
2019-10-17T06:12:02.000Z
|
2021-09-08T11:16:19.000Z
|
src/CLI/actioner/sonic-cli-route-match.py
|
noolex/sonic-mgmt-framework
|
5493889adc47fc584b04dca1a0cc0a2007211df4
|
[
"Apache-2.0"
] | 207
|
2019-06-24T04:48:11.000Z
|
2020-05-06T05:51:37.000Z
|
src/CLI/actioner/sonic-cli-route-match.py
|
noolex/sonic-mgmt-framework
|
5493889adc47fc584b04dca1a0cc0a2007211df4
|
[
"Apache-2.0"
] | 20
|
2019-06-27T19:24:45.000Z
|
2021-07-15T21:12:30.000Z
|
#!/usr/bin/python
###########################################################################
#
# Copyright 2019 Dell, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
import sys
import time
import json
import ast
from rpipe_utils import pipestr
import cli_client as cc
from scripts.render_cli import show_cli_output
def invoke_api(func, args=[]):
api = cc.ApiClient()
keypath = []
body = None
if func == 'patch_openconfig_routing_policy_routing_policy_policy_definitions_policy_definition_statements_statement_actions_config_policy_result':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/actions/config/policy-result',
name=args[0], name1= args[1])
body = {"openconfig-routing-policy:policy-result": args[2]}
return api.patch(keypath, body)
elif func == 'delete_openconfig_routing_policy_routing_policy_policy_definitions_policy_definition_statements_statement_actions':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/actions',
name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_routing_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_match_prefix_set_config_prefix_set':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/match-prefix-set/config/prefix-set',
name=args[0], name1= args[1])
body = {"openconfig-routing-policy:prefix-set":args[2]}
return api.patch(keypath, body)
elif func == 'delete_openconfig_routing_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_match_prefix_set_config_prefix_set':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/match-prefix-set/config/prefix-set',
name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_match_as_path_set_config_as_path_set':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/match-as-path-set/config/as-path-set', name=args[0], name1= args[1])
body = {"openconfig-bgp-policy:as-path-set":args[2]}
return api.patch(keypath, body)
elif func == 'delete_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_match_as_path_set_config_as_path_set':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/match-as-path-set/config/as-path-set', name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_routing_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_match_interface_config_interface':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/match-interface/config/interface',name=args[0], name1= args[1])
body = {"openconfig-routing-policy:interface":args[2]}
return api.patch(keypath, body)
elif func == 'delete_openconfig_routing_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_match_interface_config_interface':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/match-interface/config/interface',name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_community_set':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/community-set',
name=args[0], name1= args[1])
body = {"openconfig-bgp-policy:community-set":args[2]}
return api.patch(keypath, body)
elif func == 'delete_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_community_set':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/community-set',
name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_ext_community_set':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/ext-community-set',
name=args[0], name1= args[1])
body = {"openconfig-bgp-policy:ext-community-set":args[2]}
return api.patch(keypath, body)
elif func == 'delete_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_ext_community_set':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/ext-community-set',
name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_routing_policy_ext_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_match_tag_set_config_tag_value':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/match-tag-set/config/openconfig-routing-policy-ext:tag-value',
name=args[0], name1= args[1])
api.delete(keypath)
body = {"openconfig-routing-policy:tag-value":[int(args[2])]}
return api.patch(keypath, body)
elif func == 'delete_openconfig_routing_policy_ext_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_match_tag_set_config_tag_value':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/match-tag-set/config/openconfig-routing-policy-ext:tag-value',
name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_origin_eq':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/origin-eq',
name=args[0], name1= args[1])
body = {"openconfig-bgp-policy:origin-eq":args[2]}
return api.patch(keypath, body)
elif func == 'delete_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_origin_eq':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/origin-eq',
name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_med_eq':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/med-eq',
name=args[0], name1= args[1])
body = {"openconfig-bgp-policy:med-eq":int(args[2])}
return api.patch(keypath, body)
elif func == 'delete_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_med_eq':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/med-eq',
name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_local_pref_eq':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/local-pref-eq',
name=args[0], name1= args[1])
body = {"openconfig-bgp-policy:local-pref-eq":int(args[2])}
return api.patch(keypath, body)
elif func == 'delete_openconfig_bgp_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_local_pref_eq':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/local-pref-eq',
name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_routing_policy_ext_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_match_neighbor_set_config_address':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/match-neighbor-set/config/openconfig-routing-policy-ext:address',
name=args[0], name1= args[1])
api.delete(keypath)
body = {"openconfig-routing-policy-ext:address":[args[2]]}
return api.patch(keypath, body)
elif func == 'delete_openconfig_routing_policy_ext_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_match_neighbor_set_config_address':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/match-neighbor-set/config/openconfig-routing-policy-ext:address',
name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_bgp_policy_ext_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_next_hop_set':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/openconfig-bgp-policy-ext:next-hop-set',
name=args[0], name1= args[1])
body = {"openconfig-bgp-policy-ext:next-hop-set":args[2]}
return api.patch(keypath, body)
elif func == 'delete_openconfig_bgp_policy_ext_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_bgp_conditions_config_next_hop_set':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/openconfig-bgp-policy:bgp-conditions/config/openconfig-bgp-policy-ext:next-hop-set',
name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_routing_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_config_call_policy':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/config/call-policy',
name=args[0], name1= args[1])
body = {"openconfig-routing-policy:call-policy":args[2]}
return api.patch(keypath, body)
elif func == 'delete_openconfig_routing_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_config_call_policy':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/config/call-policy',
name=args[0], name1= args[1])
return api.delete(keypath)
elif func == 'patch_openconfig_routing_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_config_install_protocol_eq':
proto_number = {"bgp":"BGP","ospf":"OSPF","ospf3":"OSPF3","static":"STATIC","connected":"DIRECTLY_CONNECTED"}
if args[2] not in proto_number.keys():
print("%Error: Invalid protocol number in route-match config")
exit(1)
else:
protocol = proto_number.get(args[2])
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/config/install-protocol-eq',
name=args[0], name1= args[1])
body = {"openconfig-routing-policy:install-protocol-eq":protocol}
return api.patch(keypath, body)
elif func == 'delete_openconfig_routing_policy_routing_policy_policy_definitions_policy_definition_statements_statement_conditions_config_install_protocol_eq':
keypath = cc.Path('/restconf/data/openconfig-routing-policy:routing-policy/policy-definitions/policy-definition={name}/statements/statement={name1}/conditions/config/install-protocol-eq',
name=args[0], name1= args[1])
return api.delete(keypath)
else:
body = {}
return api.cli_not_implemented(func)
def run(func, args):
response = invoke_api(func, args)
if response.ok():
if response.content is not None:
# Get Command Output
api_response = response.content
if api_response is None:
print("Failed")
else:
print response.error_message()
if __name__ == '__main__':
pipestr().write(sys.argv)
run(sys.argv[1], sys.argv[2:])
| 78.138614
| 272
| 0.764698
| 1,961
| 15,784
| 5.894442
| 0.081591
| 0.122588
| 0.093693
| 0.145341
| 0.895925
| 0.89506
| 0.89506
| 0.894455
| 0.894455
| 0.894455
| 0
| 0.010016
| 0.11442
| 15,784
| 201
| 273
| 78.527363
| 0.816927
| 0.036936
| 0
| 0.532051
| 0
| 0.179487
| 0.664738
| 0.65609
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.044872
| null | null | 0.019231
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
937a39a9a5d19ca698ca1abce69e8583bbced473
| 26,758
|
py
|
Python
|
tests/test_rest.py
|
shubhamdotjain/patchew
|
ab352230c1434c01ef9bb9f60b0a7caf011365cf
|
[
"MIT"
] | null | null | null |
tests/test_rest.py
|
shubhamdotjain/patchew
|
ab352230c1434c01ef9bb9f60b0a7caf011365cf
|
[
"MIT"
] | null | null | null |
tests/test_rest.py
|
shubhamdotjain/patchew
|
ab352230c1434c01ef9bb9f60b0a7caf011365cf
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#
# Copyright 2018 Red Hat, Inc.
#
# Authors:
# Paolo Bonzini <pbonzini@redhat.com>
#
# This work is licensed under the MIT License. Please see the LICENSE file or
# http://opensource.org/licenses/MIT.
import sys
import os
import json
import unittest
from django.contrib.auth.models import User
sys.path.append(os.path.dirname(__file__))
from tests.patchewtest import PatchewTestCase, main
from api.models import Message
from api.rest import AddressSerializer
from collections import OrderedDict
try:
import coreapi
except ImportError:
coreapi = None
class RestTest(PatchewTestCase):
def setUp(self):
self.create_superuser()
self.p = self.add_project("QEMU", "qemu-devel@nongnu.org")
self.PROJECT_BASE = '%sprojects/%d/' % (self.REST_BASE, self.p.id)
self.sp = self.add_project("QEMU Block Layer", "qemu-block@nongnu.org")
self.sp.parent_project = self.p
self.sp.prefix_tags = "block"
self.sp.save()
self.SUBPROJECT_BASE = '%sprojects/%d/' % (self.REST_BASE, self.sp.id)
self.p2 = self.add_project("EDK 2", "edk2-devel@lists.01.org")
self.PROJECT_BASE_2 = '%sprojects/%d/' % (self.REST_BASE, self.p2.id)
self.admin = User.objects.get(username='admin')
self.USER_BASE = '%susers/%d/' % (self.REST_BASE, self.admin.id)
def test_root(self):
resp = self.api_client.get(self.REST_BASE)
self.assertEquals(resp.data['users'], self.REST_BASE + 'users/')
self.assertEquals(resp.data['projects'], self.REST_BASE + 'projects/')
self.assertEquals(resp.data['series'], self.REST_BASE + 'series/')
resp = self.api_client.get(self.REST_BASE, HTTP_HOST='patchew.org')
self.assertEquals(resp.data['users'], 'http://patchew.org/api/v1/users/')
self.assertEquals(resp.data['projects'], 'http://patchew.org/api/v1/projects/')
self.assertEquals(resp.data['series'], 'http://patchew.org/api/v1/series/')
def test_users(self):
resp = self.api_client.get(self.REST_BASE + 'users/')
self.assertEquals(resp.data['count'], 1)
self.assertEquals(resp.data['results'][0]['resource_uri'], self.USER_BASE)
self.assertEquals(resp.data['results'][0]['username'], self.admin.username)
def test_user(self):
resp = self.api_client.get(self.USER_BASE)
self.assertEquals(resp.data['resource_uri'], self.USER_BASE)
self.assertEquals(resp.data['username'], self.admin.username)
def test_projects(self):
resp = self.api_client.get(self.REST_BASE + 'projects/')
self.assertEquals(resp.data['count'], 3)
self.assertEquals(resp.data['results'][0]['resource_uri'], self.PROJECT_BASE)
self.assertEquals(resp.data['results'][0]['name'], "QEMU")
self.assertEquals(resp.data['results'][0]['mailing_list'], "qemu-devel@nongnu.org")
self.assertEquals(resp.data['results'][1]['resource_uri'], self.SUBPROJECT_BASE)
self.assertEquals(resp.data['results'][1]['name'], "QEMU Block Layer")
self.assertEquals(resp.data['results'][1]['mailing_list'], "qemu-block@nongnu.org")
self.assertEquals(resp.data['results'][1]['parent_project'], self.PROJECT_BASE)
def test_project(self):
resp = self.api_client.get(self.PROJECT_BASE)
self.assertEquals(resp.data['resource_uri'], self.PROJECT_BASE)
self.assertEquals(resp.data['name'], "QEMU")
self.assertEquals(resp.data['mailing_list'], "qemu-devel@nongnu.org")
resp = self.api_client.get(self.SUBPROJECT_BASE)
self.assertEquals(resp.data['resource_uri'], self.SUBPROJECT_BASE)
self.assertEquals(resp.data['name'], "QEMU Block Layer")
self.assertEquals(resp.data['mailing_list'], "qemu-block@nongnu.org")
self.assertEquals(resp.data['parent_project'], self.PROJECT_BASE)
def test_update_project_head(self):
resp = self.apply_and_retrieve('0001-simple-patch.mbox.gz',
self.p.id, '20160628014747.20971-1-famz@redhat.com')
self.api_client.login(username=self.user, password=self.password)
resp_before = self.api_client.get(self.PROJECT_BASE + "series/"+ "20160628014747.20971-1-famz@redhat.com/")
data = {
"message_ids": ["20160628014747.20971-1-famz@redhat.com"],
"old_head": "None",
"new_head": "000000"
}
resp = self.api_client.post(self.PROJECT_BASE + "update_project_head/", data=json.dumps(data), content_type='application/json')
resp_after = self.api_client.get(self.PROJECT_BASE + "series/"+ "20160628014747.20971-1-famz@redhat.com/")
self.assertEquals(resp_before.data['is_merged'], False)
self.assertEquals(resp.status_code, 200)
self.assertEquals(resp.data['count'], 1)
self.assertEquals(resp.data['new_head'], "000000")
self.assertEquals(resp_after.data['is_merged'], True)
def test_project_post_no_login(self):
data = {
'name': 'keycodemapdb',
}
resp = self.api_client.post(self.REST_BASE + 'projects/', data=data)
self.assertEquals(resp.status_code, 403)
def test_project_post_minimal(self):
data = {
'name': 'keycodemapdb',
}
self.api_client.login(username=self.user, password=self.password)
resp = self.api_client.post(self.REST_BASE + 'projects/', data=data)
self.assertEquals(resp.status_code, 201)
self.assertEquals(resp.data['resource_uri'].startswith(self.REST_BASE + 'projects/'), True)
self.assertEquals(resp.data['name'], data['name'])
resp = self.api_client.get(resp.data['resource_uri'])
self.assertEquals(resp.data['name'], data['name'])
def test_project_post(self):
self.api_client.login(username=self.user, password=self.password)
data = {
'name': 'keycodemapdb',
'mailing_list': 'qemu-devel@nongnu.org',
'prefix_tags': 'keycodemapdb',
'url': 'https://gitlab.com/keycodemap/keycodemapdb/',
'git': 'https://gitlab.com/keycodemap/keycodemapdb/',
'description': 'keycodemapdb generates code to translate key codes',
'display_order': 4321,
'parent_project': self.PROJECT_BASE,
}
resp = self.api_client.post(self.REST_BASE + 'projects/', data=data)
self.assertEquals(resp.status_code, 201)
self.assertEquals(resp.data['resource_uri'].startswith(self.REST_BASE + 'projects/'), True)
self.assertEquals(resp.data['name'], data['name'])
self.assertEquals(resp.data['mailing_list'], data['mailing_list'])
self.assertEquals(resp.data['prefix_tags'], data['prefix_tags'])
self.assertEquals(resp.data['url'], data['url'])
self.assertEquals(resp.data['git'], data['git'])
self.assertEquals(resp.data['description'], data['description'])
self.assertEquals(resp.data['display_order'], data['display_order'])
self.assertEquals(resp.data['logo'], None)
self.assertEquals(resp.data['parent_project'], self.PROJECT_BASE)
resp = self.api_client.get(resp.data['resource_uri'])
self.assertEquals(resp.data['name'], data['name'])
self.assertEquals(resp.data['mailing_list'], data['mailing_list'])
self.assertEquals(resp.data['prefix_tags'], data['prefix_tags'])
self.assertEquals(resp.data['url'], data['url'])
self.assertEquals(resp.data['git'], data['git'])
self.assertEquals(resp.data['description'], data['description'])
self.assertEquals(resp.data['display_order'], data['display_order'])
self.assertEquals(resp.data['logo'], None)
self.assertEquals(resp.data['parent_project'], self.PROJECT_BASE)
def test_project_results_list(self):
resp1 = self.api_client.get(self.PROJECT_BASE)
resp = self.api_client.get(resp1.data['results'])
self.assertEqual(resp.data['count'], len(resp.data['results']))
def test_series_single(self):
resp = self.apply_and_retrieve('0001-simple-patch.mbox.gz',
self.p.id, '20160628014747.20971-1-famz@redhat.com')
self.assertEqual(resp.data['subject'], '[Qemu-devel] [PATCH] quorum: Only compile when supported')
self.assertEqual(resp.data['stripped_subject'], 'quorum: Only compile when supported')
self.assertEqual(resp.data['is_complete'], True)
self.assertEqual(resp.data['total_patches'], 1)
self.assertEqual(len(resp.data['replies']), 0)
self.assertEqual(len(resp.data['patches']), 1)
self.assertEqual(resp.data['patches'][0]['subject'], resp.data['subject'])
self.assertEqual(resp.data['patches'][0]['stripped_subject'], resp.data['stripped_subject'])
def test_series_multiple(self):
resp = self.apply_and_retrieve('0004-multiple-patch-reviewed.mbox.gz',
self.p.id, '1469192015-16487-1-git-send-email-berrange@redhat.com')
self.assertEqual(resp.data['subject'], '[Qemu-devel] [PATCH v4 0/2] Report format specific info for LUKS block driver')
self.assertEqual(resp.data['stripped_subject'], 'Report format specific info for LUKS block driver')
self.assertEqual(resp.data['is_complete'], True)
self.assertEqual(resp.data['total_patches'], 2)
self.assertEqual(len(resp.data['replies']), 2)
self.assertEqual(len(resp.data['patches']), 2)
self.assertEqual(resp.data['replies'][0]['resource_uri'], self.PROJECT_BASE + 'messages/5792265A.5070507@redhat.com/')
self.assertEqual(resp.data['replies'][0]['in_reply_to'], '1469192015-16487-1-git-send-email-berrange@redhat.com')
self.assertEqual(resp.data['replies'][0]['subject'], 'Re: [Qemu-devel] [PATCH v4 0/2] Report format specific info for LUKS block driver')
self.assertEqual(resp.data['replies'][1]['resource_uri'], self.PROJECT_BASE + 'messages/e0858c00-ccb6-e533-ee3e-9ba84ca45a7b@redhat.com/')
self.assertEqual(resp.data['replies'][1]['in_reply_to'], '1469192015-16487-1-git-send-email-berrange@redhat.com')
self.assertEqual(resp.data['replies'][1]['subject'], 'Re: [Qemu-devel] [PATCH v4 0/2] Report format specific info for LUKS block driver')
self.assertEqual(resp.data['patches'][0]['resource_uri'], self.PROJECT_BASE + 'messages/1469192015-16487-2-git-send-email-berrange@redhat.com/')
self.assertEqual(resp.data['patches'][0]['subject'], '[Qemu-devel] [PATCH v4 1/2] crypto: add support for querying parameters for block encryption')
self.assertEqual(resp.data['patches'][0]['stripped_subject'], 'crypto: add support for querying parameters for block encryption')
self.assertEqual(resp.data['patches'][1]['resource_uri'], self.PROJECT_BASE + 'messages/1469192015-16487-3-git-send-email-berrange@redhat.com/')
self.assertEqual(resp.data['patches'][1]['subject'], '[Qemu-devel] [PATCH v4 2/2] block: export LUKS specific data to qemu-img info')
self.assertEqual(resp.data['patches'][1]['stripped_subject'], 'block: export LUKS specific data to qemu-img info')
def test_series_incomplete(self):
resp = self.apply_and_retrieve('0012-incomplete-series.mbox.gz',
self.p.id, '1469192015-16487-1-git-send-email-berrange@redhat.com')
self.assertEqual(resp.data['subject'], '[Qemu-devel] [PATCH v4 0/2] Report format specific info for LUKS block driver')
self.assertEqual(resp.data['stripped_subject'], 'Report format specific info for LUKS block driver')
self.assertEqual(resp.data['is_complete'], False)
self.assertEqual(resp.data['total_patches'], 2)
self.assertEqual(len(resp.data['replies']), 2)
self.assertEqual(len(resp.data['patches']), 1)
self.assertEqual(resp.data['patches'][0]['subject'], '[Qemu-devel] [PATCH v4 1/2] crypto: add support for querying parameters for block encryption')
self.assertEqual(resp.data['patches'][0]['stripped_subject'], 'crypto: add support for querying parameters for block encryption')
def test_series_list(self):
resp1 = self.apply_and_retrieve('0004-multiple-patch-reviewed.mbox.gz',
self.p.id, '1469192015-16487-1-git-send-email-berrange@redhat.com')
resp2 = self.apply_and_retrieve('0001-simple-patch.mbox.gz',
self.p.id, '20160628014747.20971-1-famz@redhat.com')
resp = self.api_client.get(self.REST_BASE + 'series/')
self.assertEqual(resp.data['count'], 2)
resp = self.api_client.get(self.PROJECT_BASE + 'series/')
self.assertEqual(resp.data['count'], 2)
resp = self.api_client.get(self.REST_BASE + 'projects/12345/series/')
self.assertEqual(resp.data['count'], 0)
def test_series_results_list(self):
resp1 = self.apply_and_retrieve('0001-simple-patch.mbox.gz',
self.p.id, '20160628014747.20971-1-famz@redhat.com')
resp = self.api_client.get(resp1.data['results'])
self.assertEqual(resp.data['count'], len(resp.data['results']))
def test_series_search(self):
resp1 = self.apply_and_retrieve('0004-multiple-patch-reviewed.mbox.gz',
self.p.id, '1469192015-16487-1-git-send-email-berrange@redhat.com')
resp2 = self.apply_and_retrieve('0001-simple-patch.mbox.gz',
self.p.id, '20160628014747.20971-1-famz@redhat.com')
resp = self.api_client.get(self.REST_BASE + 'series/?q=quorum')
self.assertEqual(resp.data['count'], 1)
self.assertEqual(resp.data['results'][0]['resource_uri'], resp2.data['resource_uri'])
self.assertEqual(resp.data['results'][0]['subject'], resp2.data['subject'])
self.assertEqual('replies' in resp.data['results'][0], False)
self.assertEqual('patches' in resp.data['results'][0], False)
resp = self.api_client.get(self.REST_BASE + 'series/?q=project:QEMU')
self.assertEqual(resp.data['count'], 2)
self.assertEqual(resp.data['results'][0]['resource_uri'], resp1.data['resource_uri'])
self.assertEqual(resp.data['results'][0]['subject'], resp1.data['subject'])
self.assertEqual('replies' in resp.data['results'][0], False)
self.assertEqual('patches' in resp.data['results'][0], False)
self.assertEqual(resp.data['results'][1]['resource_uri'], resp2.data['resource_uri'])
self.assertEqual(resp.data['results'][1]['subject'], resp2.data['subject'])
self.assertEqual('replies' in resp.data['results'][1], False)
self.assertEqual('patches' in resp.data['results'][1], False)
resp = self.api_client.get(self.REST_BASE + 'projects/12345/series/?q=quorum')
self.assertEqual(resp.data['count'], 0)
resp = self.api_client.get(self.REST_BASE + 'projects/12345/series/?q=project:QEMU')
self.assertEqual(resp.data['count'], 0)
def test_series_delete(self):
test_message_id = '1469192015-16487-1-git-send-email-berrange@redhat.com'
series = self.apply_and_retrieve('0004-multiple-patch-reviewed.mbox.gz',self.p.id,
test_message_id)
message = series.data['message']
resp_before = self.api_client.get(self.REST_BASE + 'projects/' + str(self.p.id)
+ '/series/' + test_message_id + '/')
resp_reply_before = self.api_client.get(message + 'replies/')
resp_without_login = self.api_client.delete(self.REST_BASE + 'projects/' + str(self.p.id)
+ '/series/' + test_message_id + '/')
self.api_client.login(username=self.user, password=self.password)
resp = self.api_client.delete(self.REST_BASE + 'projects/' + str(self.p.id)
+ '/series/' + test_message_id + '/')
self.api_client.logout()
resp_after = self.api_client.get(self.REST_BASE + 'projects/' + str(self.p.id)
+ '/series/' + test_message_id + '/')
resp_reply_after = self.api_client.get(message + 'replies/')
self.assertEqual(resp_before.status_code, 200)
self.assertEqual(resp_reply_before.status_code, 200)
self.assertEqual(resp_without_login.status_code, 403)
self.assertEqual(resp.status_code, 204)
self.assertEqual(resp_after.status_code, 404)
self.assertEqual(resp_reply_after.status_code, 404)
def test_create_message(self):
dp = self.get_data_path("0022-another-simple-patch.json.gz")
with open(dp, "r") as f:
data = f.read()
self.api_client.login(username=self.user, password=self.password)
resp = self.api_client.post(self.PROJECT_BASE + "messages/", data, content_type='application/json')
self.assertEqual(resp.status_code, 201)
resp_get = self.api_client.get(self.PROJECT_BASE + "messages/20171023201055.21973-11-andrew.smirnov@gmail.com/")
self.assertEqual(resp_get.status_code, 200)
self.assertEqual(resp.data['subject'], "[Qemu-devel] [PATCH v2 10/27] imx_fec: Reserve full 4K "
"page for the register file")
def test_create_text_message(self):
dp = self.get_data_path("0004-multiple-patch-reviewed.mbox.gz")
with open(dp, "r") as f:
data = f.read()
self.api_client.login(username=self.user, password=self.password)
resp = self.api_client.post(self.PROJECT_BASE + "messages/", data, content_type='message/rfc822')
self.assertEqual(resp.status_code, 201)
resp_get = self.api_client.get(self.PROJECT_BASE + "messages/1469192015-16487-1-git-send-email-berrange@redhat.com/")
self.assertEqual(resp_get.status_code, 200)
self.assertEqual(resp.data['subject'], "[Qemu-devel] [PATCH v4 0/2] Report format specific info for LUKS block driver")
def test_create_message_without_project_pk(self):
dp = self.get_data_path("0024-multiple-project-patch.json.gz")
with open(dp, "r") as f:
data = f.read()
self.api_client.login(username=self.user, password=self.password)
resp = self.api_client.post(self.REST_BASE + "messages/", data, content_type='application/json')
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp.data['count'], 2)
resp_get = self.api_client.get(self.PROJECT_BASE + "messages/20180223132311.26555-2-marcandre.lureau@redhat.com/")
self.assertEqual(resp_get.status_code, 200)
self.assertEqual(resp_get.data['subject'], "[Qemu-devel] [PATCH 1/7] SecurityPkg/Tcg2Pei: drop Tcg2PhysicalPresenceLib dependency")
resp_get2 = self.api_client.get(self.PROJECT_BASE_2 + "messages/20180223132311.26555-2-marcandre.lureau@redhat.com/")
self.assertEqual(resp_get2.status_code, 200)
def test_create_text_message_without_project_pk(self):
dp = self.get_data_path("0023-multiple-project-patch.mbox.gz")
with open(dp, "r") as f:
data = f.read()
self.api_client.login(username=self.user, password=self.password)
resp = self.api_client.post(self.REST_BASE + "messages/", data, content_type='message/rfc822')
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp.data['count'], 2)
resp_get = self.api_client.get(self.PROJECT_BASE + "messages/20180223132311.26555-2-marcandre.lureau@redhat.com/")
self.assertEqual(resp_get.status_code, 200)
self.assertEqual(resp_get.data['subject'], "[Qemu-devel] [PATCH 1/7] SecurityPkg/Tcg2Pei: drop Tcg2PhysicalPresenceLib dependency")
resp_get2 = self.api_client.get(self.PROJECT_BASE_2 + "messages/20180223132311.26555-2-marcandre.lureau@redhat.com/")
self.assertEqual(resp_get2.status_code, 200)
def test_without_login_create_message(self):
dp = self.get_data_path("0022-another-simple-patch.json.gz")
with open(dp, "r") as f:
data = f.read()
resp = self.api_client.post(self.PROJECT_BASE + "messages/", data, content_type='message/rfc822')
self.assertEqual(resp.status_code, 403)
def test_non_maintainer_create_message(self):
self.create_user(username="test", password="userpass")
self.api_client.login(username="test", password="userpass")
dp = self.get_data_path("0023-multiple-project-patch.mbox.gz")
with open(dp, "r") as f:
data = f.read()
resp = self.api_client.post(self.REST_BASE + "messages/", data, content_type='message/rfc822')
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp.data['count'], 0)
resp_get = self.api_client.get(self.PROJECT_BASE + "messages/20180223132311.26555-2-marcandre.lureau@redhat.com/")
self.assertEqual(resp_get.status_code, 404)
resp_get2 = self.api_client.get(self.PROJECT_BASE_2 + "messages/20180223132311.26555-2-marcandre.lureau@redhat.com/")
self.assertEqual(resp_get2.status_code, 404)
def test_maintainer_create_message(self):
test = self.create_user(username="test", password="userpass")
self.api_client.login(username="test", password="userpass")
self.p.maintainers = (test, )
dp = self.get_data_path("0023-multiple-project-patch.mbox.gz")
with open(dp, "r") as f:
data = f.read()
resp = self.api_client.post(self.REST_BASE + "messages/", data, content_type='message/rfc822')
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp.data['count'], 1)
resp_get = self.api_client.get(self.PROJECT_BASE + "messages/20180223132311.26555-2-marcandre.lureau@redhat.com/")
self.assertEqual(resp_get.status_code, 200)
resp_get2 = self.api_client.get(self.PROJECT_BASE_2 + "messages/20180223132311.26555-2-marcandre.lureau@redhat.com/")
self.assertEqual(resp_get2.status_code, 404)
def test_importer_create_message(self):
dp = self.get_data_path("0023-multiple-project-patch.mbox.gz")
with open(dp, "r") as f:
data = f.read()
test = self.create_user(username="test", password="userpass", groups=['importers'])
self.api_client.login(username="test", password="userpass")
resp = self.api_client.post(self.REST_BASE + "messages/", data, content_type='message/rfc822')
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp.data['count'], 2)
resp_get = self.api_client.get(self.PROJECT_BASE + "messages/20180223132311.26555-2-marcandre.lureau@redhat.com/")
self.assertEqual(resp_get.status_code, 200)
self.assertEqual(resp_get.data['subject'], "[Qemu-devel] [PATCH 1/7] SecurityPkg/Tcg2Pei: drop Tcg2PhysicalPresenceLib dependency")
resp_get2 = self.api_client.get(self.PROJECT_BASE_2 + "messages/20180223132311.26555-2-marcandre.lureau@redhat.com/")
self.assertEqual(resp_get2.status_code, 200)
def test_message(self):
series = self.apply_and_retrieve('0001-simple-patch.mbox.gz',
self.p.id, '20160628014747.20971-1-famz@redhat.com')
message = series.data['patches'][0]['resource_uri']
resp = self.api_client.get(message)
self.assertEqual(resp.data['mbox'], Message.objects.all()[0].get_mbox())
def test_message_mbox(self):
series = self.apply_and_retrieve('0001-simple-patch.mbox.gz',
self.p.id, '20160628014747.20971-1-famz@redhat.com')
message = series.data['patches'][0]['resource_uri']
resp = self.client.get(message + 'mbox/')
self.assertEqual(resp.data, Message.objects.all()[0].get_mbox())
def test_address_serializer(self):
data1 = {"name":"Shubham", "address":"shubhamjain7495@gmail.com"}
serializer1 = AddressSerializer(data = data1)
valid1 = serializer1.is_valid()
valid_data1 = serializer1.validated_data
data2 = {"name":123, "address":"shubhamjain7495@gmail.com"}
serializer2 = AddressSerializer(data = data2)
valid2 = serializer2.is_valid()
valid_data2 = serializer2.validated_data
self.assertEqual(valid1,True)
self.assertEqual(valid_data1,OrderedDict([('name', 'Shubham'), ('address', 'shubhamjain7495@gmail.com')]))
self.assertEqual(valid2,True)
self.assertEqual(valid_data2,OrderedDict([('name', '123'), ('address', 'shubhamjain7495@gmail.com')]))
def test_message_replies(self):
series = self.apply_and_retrieve('0004-multiple-patch-reviewed.mbox.gz',
self.p.id, '1469192015-16487-1-git-send-email-berrange@redhat.com')
message = series.data['message']
resp = self.api_client.get(message + 'replies/')
self.assertEqual(resp.data['count'], 4)
self.assertEqual(resp.data['results'][0]['resource_uri'], self.PROJECT_BASE + 'messages/1469192015-16487-2-git-send-email-berrange@redhat.com/')
self.assertEqual(resp.data['results'][0]['subject'], '[Qemu-devel] [PATCH v4 1/2] crypto: add support for querying parameters for block encryption')
self.assertEqual(resp.data['results'][1]['resource_uri'], self.PROJECT_BASE + 'messages/1469192015-16487-3-git-send-email-berrange@redhat.com/')
self.assertEqual(resp.data['results'][1]['subject'], '[Qemu-devel] [PATCH v4 2/2] block: export LUKS specific data to qemu-img info')
self.assertEqual(resp.data['results'][2]['resource_uri'], self.PROJECT_BASE + 'messages/5792265A.5070507@redhat.com/')
self.assertEqual(resp.data['results'][2]['subject'], 'Re: [Qemu-devel] [PATCH v4 0/2] Report format specific info for LUKS block driver')
self.assertEqual(resp.data['results'][3]['resource_uri'], self.PROJECT_BASE + 'messages/e0858c00-ccb6-e533-ee3e-9ba84ca45a7b@redhat.com/')
self.assertEqual(resp.data['results'][3]['subject'], 'Re: [Qemu-devel] [PATCH v4 0/2] Report format specific info for LUKS block driver')
def test_project_filter(self):
resp = self.api_client.get(self.REST_BASE + 'projects/?name=QEMU')
project = resp.data['results'][0]
self.assertEquals(project['name'], "QEMU")
self.assertEquals(project['mailing_list'], "qemu-devel@nongnu.org")
def test_schema(self):
resp = self.api_client.get(self.REST_BASE + 'schema/')
self.assertEqual(resp.status_code, 200)
if __name__ == '__main__':
main()
| 58.679825
| 156
| 0.662419
| 3,447
| 26,758
| 5.003481
| 0.086452
| 0.0603
| 0.100249
| 0.081347
| 0.862179
| 0.831681
| 0.78547
| 0.764191
| 0.707601
| 0.658259
| 0
| 0.051863
| 0.185739
| 26,758
| 455
| 157
| 58.808791
| 0.739719
| 0.007923
| 0
| 0.445876
| 0
| 0.023196
| 0.287749
| 0.116856
| 0
| 0
| 0
| 0
| 0.425258
| 1
| 0.082474
| false
| 0.036082
| 0.033505
| 0
| 0.118557
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
faa728a70a6574486f0ab1610406deacb30011a2
| 276
|
py
|
Python
|
challenges/queue-with-stacks/conftest.py
|
tyler-fishbone/data-structures-and-algorithms
|
29790f2672d3ddb0aadf62725f28180b092f4568
|
[
"MIT"
] | null | null | null |
challenges/queue-with-stacks/conftest.py
|
tyler-fishbone/data-structures-and-algorithms
|
29790f2672d3ddb0aadf62725f28180b092f4568
|
[
"MIT"
] | 4
|
2018-03-22T19:19:11.000Z
|
2018-04-11T00:35:26.000Z
|
challenges/queue-with-stacks/conftest.py
|
tyler-fishbone/data-structures-and-algorithms
|
29790f2672d3ddb0aadf62725f28180b092f4568
|
[
"MIT"
] | null | null | null |
import pytest
# from node import Node
from queue_with_stacks import Queue
@pytest.fixture
def one_nine_queue():
return Queue([1, 2, 3, 4, 5, 6, 7, 8, 9])
@pytest.fixture
def empty_queue():
return Queue([])
@pytest.fixture
def one_node_queue():
return Queue([1])
| 18.4
| 45
| 0.695652
| 45
| 276
| 4.111111
| 0.488889
| 0.210811
| 0.259459
| 0.227027
| 0.259459
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04386
| 0.173913
| 276
| 15
| 46
| 18.4
| 0.767544
| 0.076087
| 0
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| true
| 0
| 0.181818
| 0.272727
| 0.727273
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
faa9d4c3cb09e836de932020266dfa501142d67e
| 53,562
|
py
|
Python
|
objects.py
|
veeral-agarwal/DX-BALL_2.0
|
e97313825207eccb9670c3172c19e1081fc10b7c
|
[
"MIT"
] | null | null | null |
objects.py
|
veeral-agarwal/DX-BALL_2.0
|
e97313825207eccb9670c3172c19e1081fc10b7c
|
[
"MIT"
] | null | null | null |
objects.py
|
veeral-agarwal/DX-BALL_2.0
|
e97313825207eccb9670c3172c19e1081fc10b7c
|
[
"MIT"
] | null | null | null |
import global_variables
import numpy as np
import config
from colorama import Fore, init , Back , Style
init()
import time
from time import time,sleep
import random
import math
import os
class Objects():
def __init__(self , obj , xpos , ypos):
self.position_x = xpos
self.position_y = ypos
self.height = len(obj)
self.width = len(obj[0])
self.shape = obj
def update_x_position(self , x):
if self.position_x<=4:
self.position_x=4
if self.position_x>=90:
self.position_x=90
if self.position_x>1 and self.position_x<=90:
self.position_x += x
def update_y_position(self , y):
self.position_y += y
def current_position_x(self):
return self.position_x
def current_position_y(self):
return self.position_y
def clear(self):
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = " "
def render(self):
if global_variables.active_powerupflag[4] == 1 and global_variables.active_powerupflag[1] == 0:
if global_variables.active_powerupflag[7]==1:
self.shape = config.shrink_p_shooting
else:
self.shape = config.shrink_p
self.width = len(config.shrink_p[0])
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] =( Back.CYAN + self.shape[j][i] )
elif global_variables.active_powerupflag[1] == 1 and global_variables.active_powerupflag[4] == 0:
if global_variables.active_powerupflag[7]==1:
self.shape = config.expand_p_shooting
else:
self.shape = config.expand_p
self.width = len(config.expand_p[0])
# self.position_x -=1
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] =( Back.CYAN + self.shape[j][i] )
elif (global_variables.active_powerupflag[1] == 1 and global_variables.active_powerupflag[4] == 1) or (global_variables.active_powerupflag[1] == 0 and global_variables.active_powerupflag[4] == 0):
if(global_variables.active_powerupflag[7] == 1):
self.shape = config.paddle_shooting
else:
self.shape = config.paddle
self.width = len(config.paddle[0])
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] =( Back.CYAN + self.shape[j][i] )
class Paddle(Objects):
def __init__(self ,obj , xpos , ypos, lives):
self.initial_lives = 5
self.score = 0
super().__init__(obj , xpos , ypos)
def lives(self):
return self.initial_lives
class Ball(Objects):
def __init__(self ,obj , xpos , ypos):
super().__init__(obj , xpos , ypos)
self.speed_x = 0
self.speed_y = 0
self.begin_time = time()
self.onetimetempflag = 0
def speed(self):
self.speed_x = global_variables.ball_privious_speed_x
self.speed_y = global_variables.ball_privious_speed_y
def collision_with_wall(self):
if self.position_x + self.speed_x<=1 or self.position_x+self.speed_x>=96:
self.speed_x *= -1
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/brickball.wav&')
if self.position_y <=4:
self.speed_y *= -1
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/brickball.wav&')
elif self.position_y + self.speed_y>=37:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
default()
self.speed_x = 0
self.speed_y = 0
def clear(self):
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = " "
def render(self):
if global_variables.active_powerupflag[2] == 1 and self.onetimetempflag == 0:
if self.speed_x < 0:
self.speed_x -=1
else:
self.speed_x += 1
self.onetimetempflag = 1
self.collision_with_wall()
self.collision_with_paddle()
self.position_x += self.speed_x
self.position_y -= self.speed_y
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = self.shape[j][i]
def collision_with_paddle(self):
if (global_variables.active_powerupflag[1] == 1 and global_variables.active_powerupflag[4] == 1) or (global_variables.active_powerupflag[1] == 0 and global_variables.active_powerupflag[4] == 0):
if self.position_y == 35 or self.position_y == 36:
if self.position_x == global_variables.main_paddle.position_x:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
self.speed_x -= 2
elif self.position_x == global_variables.main_paddle.position_x+1:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else :
take_down_bricks()
self.speed_y *= -1
self.speed_x -= 1
elif self.position_x == global_variables.main_paddle.position_x+2:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
elif self.position_x == global_variables.main_paddle.position_x+3:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
self.speed_x += 1
elif self.position_x == global_variables.main_paddle.position_x+4:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
self.speed_x +=2
elif global_variables.active_powerupflag[4] == 1 and global_variables.active_powerupflag[1] == 0:
if self.position_y == 35 or self.position_y == 36:
if self.position_x == global_variables.main_paddle.position_x:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
self.speed_x -= -1
elif self.position_x == global_variables.main_paddle.position_x+1:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
# self.speed_x -= 1
elif self.position_x == global_variables.main_paddle.position_x+2:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
self.speed_x += 1
elif global_variables.active_powerupflag[1] == 1 and global_variables.active_powerupflag[4] == 0:
if self.position_y == 35 or self.position_y == 36:
if self.position_x == global_variables.main_paddle.position_x:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
self.speed_x -= 2
elif self.position_x == global_variables.main_paddle.position_x+1:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
self.speed_x -= 1
elif self.position_x == global_variables.main_paddle.position_x+2:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
elif self.position_x == global_variables.main_paddle.position_x+3:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
self.speed_x += 1
elif self.position_x == global_variables.main_paddle.position_x+4:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
self.speed_x +=2
elif self.position_x == global_variables.main_paddle.position_x+5:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
take_down_bricks()
self.speed_y *= -1
self.speed_x +=3
class Brick(Objects):
def __init__(self, obj , xpos , ypos, weight , power ):
super().__init__(obj , xpos , ypos)
self.weight = weight
self.score = 0
self.flag = 0
self.contain_powerup = power
self.isexplosive = False
def render(self):
for i in range(self.width):
for j in range(self.height):
if (self.weight > 0) and ((self.position_x,self.position_y) not in global_variables.explosion_coordinates):
if(self.position_x,self.position_y) not in global_variables.fire_ball_list :
if self.weight == 1:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Back.BLUE + Fore.BLUE + self.shape[j][i] )
elif self.weight == 2:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.GREEN + Back.GREEN + self.shape[j][i])
elif self.weight == 3:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.RED + Back.RED + self.shape[j][i] )
elif self.weight == 4:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.MAGENTA + Back.MAGENTA + self.shape[j][i] )
elif self.weight == np.inf:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.WHITE + Back.WHITE + self.shape[j][i] )
else:
self.weight = 0
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] =' '
else:
self.weight = 0
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] =' '
def collision_ball_brick(self):
if self.isexplosive == False:
if (self.position_x == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y) :
if self.weight>0:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/brickball.wav&')
if (self.weight > 0 and self.weight<4) or (self.weight == np.inf):
if global_variables.active_powerupflag[6] == 1:
fireball_brick(self.position_x,self.position_y)
if global_variables.main_ball.speed_x != 0:
angle = math.degrees( math.atan(global_variables.main_ball.speed_y/global_variables.main_ball.speed_x) )
if ( angle>-45 and angle<45 ) :
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
else:
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
else:
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
if self.weight == 1:
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 6:
global_variables.powerup_objects.append(Powerup(config.fire_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 7:
global_variables.powerup_objects.append(Powerup(config.shooting_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
global_variables.main_paddle.score += 1
if global_variables.active_powerupflag[5] == 1:
self.weight = 0
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
# global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
if self.contain_powerup == 6:
global_variables.powerup_objects.append(Powerup(config.fire_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
# global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 7:
global_variables.powerup_objects.append(Powerup(config.shooting_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
# global_variables.inair_powerupflag[self.contain_powerup] = 1
else:
self.weight -= 1
elif (self.position_x+1 == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y):
if self.weight>0:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/brickball.wav&')
if (self.weight > 0 and self.weight<4) or (self.weight == np.inf):
if global_variables.active_powerupflag[6] == 1:
fireball_brick(self.position_x,self.position_y)
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
if self.weight == 1:
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 6:
global_variables.powerup_objects.append(Powerup(config.fire_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 7:
global_variables.powerup_objects.append(Powerup(config.shooting_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
global_variables.inair_powerupflag[self.contain_powerup] = 1
global_variables.main_paddle.score += 1
if global_variables.active_powerupflag[5] == 1:
self.weight = 0
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
# global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
if self.contain_powerup == 6:
global_variables.powerup_objects.append(Powerup(config.fire_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
# global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 7:
global_variables.powerup_objects.append(Powerup(config.shooting_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
# global_variables.inair_powerupflag[self.contain_powerup] = 1
else:
self.weight -= 1
elif (self.position_x+2 == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y):
if self.weight>0:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/brickball.wav&')
if (self.weight > 0 and self.weight<4) or (self.weight == np.inf) :
if global_variables.active_powerupflag[6] == 1:
fireball_brick(self.position_x,self.position_y)
if global_variables.main_ball.speed_x != 0:
angle = math.degrees( math.atan(global_variables.main_ball.speed_y/global_variables.main_ball.speed_x) )
if ((angle>135 and angle<=180)or(angle>=-180 and angle<-135)) :
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
else:
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
else:
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
if self.weight == 1:
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 6:
global_variables.powerup_objects.append(Powerup(config.fire_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 7:
global_variables.powerup_objects.append(Powerup(config.shooting_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x))
global_variables.inair_powerupflag[self.contain_powerup] = 1
global_variables.main_paddle.score += 1
if global_variables.active_powerupflag[5] == 1:
self.weight = 0
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
# global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
if self.contain_powerup == 6:
global_variables.powerup_objects.append(Powerup(config.fire_ball , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
# global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 7:
global_variables.powerup_objects.append(Powerup(config.shooting_paddle , self.position_x , self.position_y-2 , self.contain_powerup , global_variables.main_ball.speed_x ))
global_variables.inair_powerupflag[self.contain_powerup] = 1
else:
self.weight -= 1
elif (self.position_x == global_variables.main_bullet.position_x and self.position_y == global_variables.main_bullet.position_y) or (self.position_x+1 == global_variables.main_bullet.position_x and self.position_y == global_variables.main_bullet.position_y) or (self.position_x+2 == global_variables.main_bullet.position_x and self.position_y == global_variables.main_bullet.position_y) :
if self.weight>0:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/brickball.wav&')
if self.weight==1:
self.weight-=1
config.score += 1
global_variables.main_bullet.speed_y = 0
global_variables.main_bullet.shape = [[' ']]
global_variables.main_bullet.position_y = 4
elif self.weight>1:
self.weight-=1
global_variables.main_bullet.speed_y = 0
global_variables.main_bullet.shape = [[' ']]
global_variables.main_bullet.position_y = 4
class Exploding_bricks(Objects):
def __init__(self , obj , xpos , ypos):
super().__init__(obj , xpos , ypos)
self.strength = 1
self.isexplosive = True
self.flag = 0
self.onetimetempflag = 0
def clear(self):
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = " "
def render(self):
if self.flag == 0:
if ((self.position_x,self.position_y) in global_variables.explosion_coordinates):
self.flag = 1
self.strength = 0
explosion_coor(self.position_x,self.position_y)
for i in range(self.width):
for j in range(self.height):
if ((self.position_x,self.position_y) not in global_variables.explosion_coordinates) and global_variables.exploding_bricks_flag == 0:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = self.shape[j][i]
else:
# explosion_coor(self.position_x,self.position_y)
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ' '
def collision_ball_brick(self):
if (self.position_x == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y) :
explosion_coor(self.position_x,self.position_y)
# self.onetimetempflag = 1
global_variables.exploding_bricks_flag = 1
self.strength = 0
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ' '
elif (self.position_x+1 == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y) :
self.strength = 0
explosion_coor(self.position_x,self.position_y)
# self.onetimetempflag = 1
global_variables.exploding_bricks_flag = 1
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ' '
elif (self.position_x+2 == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y) :
self.strength = 0
explosion_coor(self.position_x,self.position_y)
# self.onetimetempflag = 1
global_variables.exploding_bricks_flag = 1
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ' '
class Rainbow_bricks(Objects):
def __init__(self , obj , xpos , ypos):
super().__init__(obj , xpos , ypos)
self.collisionhappened = 0
self.weight = 1
def collision_ball_brick(self):
if (self.position_x == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y) :
if self.weight>0:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/brickball.wav&')
self.collisionhappened = 1
if self.weight >0:
global_variables.main_ball.speed_y *= -1
self.weight -= 1
elif (self.position_x+1 == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y) :
if self.weight>0:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/brickball.wav&')
self.collisionhappened = 1
if self.weight >0:
global_variables.main_ball.speed_y *= -1
self.weight -= 1
elif (self.position_x+2 == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y) :
if self.weight>0:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/brickball.wav&')
self.collisionhappened = 1
if self.weight >0:
global_variables.main_ball.speed_y *= -1
self.weight -= 1
elif (self.position_x == global_variables.main_bullet.position_x and self.position_y == global_variables.main_bullet.position_y) or (self.position_x+1 == global_variables.main_bullet.position_x and self.position_y == global_variables.main_bullet.position_y) or (self.position_x+2 == global_variables.main_bullet.position_x and self.position_y == global_variables.main_bullet.position_y) :
if self.weight>0:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/brickball.wav&')
if self.weight==1:
config.score += 1
global_variables.main_bullet.speed_y = 0
global_variables.main_bullet.shape = [[' ']]
global_variables.main_bullet.position_y = 4
elif self.weight>1:
global_variables.main_bullet.speed_y = 0
global_variables.main_bullet.shape = [[' ']]
global_variables.main_bullet.position_y = 4
self.collisionhappened = 1
if self.weight >0:
global_variables.main_ball.speed_y *= -1
self.weight -= 1
def render(self):
if self.collisionhappened == 0:
self.weight = random.randint(1,3)
for i in range(self.width):
for j in range(self.height):
if (self.weight > 0) and ((self.position_x,self.position_y) not in global_variables.explosion_coordinates):
if(self.position_x,self.position_y) not in global_variables.fire_ball_list :
if self.weight == 1:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Back.BLUE + Fore.BLUE + self.shape[j][i] )
elif self.weight == 2:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.GREEN + Back.GREEN + self.shape[j][i])
elif self.weight == 3:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.RED + Back.RED + self.shape[j][i] )
elif self.weight == 4:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.MAGENTA + Back.MAGENTA + self.shape[j][i] )
elif self.weight == np.inf:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.WHITE + Back.WHITE + self.shape[j][i] )
else:
self.weight = 0
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] =' '
else:
self.weight = 0
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] =' '
class UFO(Objects):
def __init__(self,obj,xpos,ypos):
super().__init__(obj,xpos,ypos)
self.position_x = xpos
self.position_y = ypos
self.lives = 10
def collision_with_ball(self):
if global_variables.main_ball.position_y == self.position_y or global_variables.main_ball.position_y == self.position_y+1:
if global_variables.main_ball.position_x>=self.position_x and global_variables.main_ball.position_x <=self.position_x+4:
self.lives-=1
if self.lives == 9 :
for i in range(0,90,3):
global_variables.b1.append(Brick(config.brick,i+3,8,random.randint(1,3),8))
if self.lives == 7:
for i in range(0,90,3):
global_variables.b2.append(Brick(config.brick,i+3,9,random.randint(1,3),8))
global_variables.main_ball.speed_y*=-1
def collision_with_bullet(self):
if global_variables.main_bullet.position_y == self.position_y or global_variables.main_bullet.position_y == self.position_y+1:
if global_variables.main_bullet.position_x>=self.position_x and global_variables.main_bullet.position_x <=self.position_x+4:
self.lives-=1
if self.lives == 50 :
for i in range(0,90,3):
global_variables.b1.append(Brick(config.brick,i+3,8,random.randint(1,3),8))
if self.lives == 25:
for i in range(0,90,3):
global_variables.b2.append(Brick(config.brick,i+3,9,random.randint(1,3),8))
global_variables.main_bullet.speed_y = 0
global_variables.main_bullet.position_y = 4
def render(self):
self.collision_with_ball()
self.collision_with_bullet()
self.position_x = global_variables.main_paddle.position_x
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = self.shape[j][i]
class Bomb(Objects):
def __init__(self,obj,xpos,ypos):
super().__init__(obj,xpos,ypos)
self.speed_y = -1
self.speed_flag = 0
def render(self):
lol = time()
if (int)(lol) % 5 == 0:
self.position_x = global_variables.main_ufo.position_x
self.position_y = global_variables.main_ufo.position_y
self.speed_y = -1
self.speed_flag = 0
self.shape = [['0']]
if self.speed_flag == 0:
self.position_y -= self.speed_y
self.collision_with_paddle()
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = self.shape[j][i]
def collision_with_paddle(self):
if self.position_y == 35 or self.position_y == 36:
if self.position_x <= global_variables.main_paddle.position_x+global_variables.main_paddle.width and self.position_x >= global_variables.main_paddle.position_x:
self.speed_flag = 1
self.position_y = 2
self.shape = [[' ']]
config.lives -= 1
if self.position_y > 36:
self.speed_y = 0
self.speed_flag = 1
class Bullet(Objects):
def __init__(self,obj,xpos,ypos):
super().__init__(obj,xpos,ypos)
self.speed_y = 1
self.speed_flag = 0
def render(self):
lol = time()
if self.position_y <5:
self.speed_y = 0
self.shape = [[' ']]
self.speed_flag = 1
if (int)(lol) % 5 == 0:
if global_variables.wantsound == 1:
os.system('aplay -q ./sounds/losinglife.wav&')
self.position_x = global_variables.main_paddle.position_x
self.position_y = global_variables.main_paddle.position_y
self.speed_y = 1
self.speed_flag = 0
self.shape = [['$']]
if self.speed_flag == 0:
self.position_y -= self.speed_y
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = self.shape[j][i]
class Powerup(Objects):
def __init__(self,obj,xpos,ypos , power,velx):
super().__init__(obj,xpos,ypos)
self.speed_y = -1
self.contain_powerup = power
self.speed_flag = 0
self.speed_x = velx
def render(self):
if self.speed_flag == 0:
# self.shape = [[' ']]
self.position_y -= self.speed_y
self.position_x += self.speed_x
self.collision_with_wall()
self.collision_with_paddle()
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = (self.shape[j][i])
def collision_with_wall(self):
if self.position_x + self.speed_x<=2 or self.position_x+self.speed_x>=96:
self.speed_x *= -1
if self.position_y <=4:
self.speed_y *= -1
elif self.position_y + self.speed_y>=37:
# default()
self.speed_x = 0
self.speed_y = 0
def collision_with_paddle(self):
if self.position_y == 35 or self.position_y == 36:
if self.position_x <= global_variables.main_paddle.position_x+global_variables.main_paddle.width and self.position_x >= global_variables.main_paddle.position_x:
self.speed_flag = 1
self.position_y = 2
global_variables.active_powerupflag[self.contain_powerup] = 1
global_variables.powerup_start_time[self.contain_powerup] = time()
global_variables.inair_powerupflag[self.contain_powerup] = 0
self.shape = [[' ']]
if self.position_y > 36:
self.speed_y = 0
self.speed_flag = 1
def explosion_coor(x,y):
lol = []
lol = [(x-3,y),(x+3,y),(x-3,y-1),(x+3,y-1),(x-3,y+1),(x+3,y+1),(x,y-1),(x,y+1),(x,y)]
for i in lol:
global_variables.explosion_coordinates.append(i)
def fireball_brick(x,y):
lol = []
lol = [(x-3,y),(x+3,y),(x-3,y-1),(x+3,y-1),(x-3,y+1),(x+3,y+1),(x,y-1),(x,y+1),(x,y)]
for i in lol:
global_variables.fire_ball_list.append(i)
def default():
global_variables.main_paddle.clear()
global_variables.main_ball.clear()
config.lives -= 1
global_variables.flag = 0
global_variables.main_paddle.position_x=5
global_variables.main_paddle.position_y=35
global_variables.main_ball.position_x=5
global_variables.main_ball.position_y=33
#for fast ball
global_variables.main_ball.onetimetempflag = 0
global_variables.main_ball.render()
global_variables.main_paddle.render()
for i in range(len(global_variables.active_powerupflag)):
global_variables.active_powerupflag[i] = 0
def levelskip():
global_variables.main_paddle.clear()
global_variables.main_ball.clear()
for j in global_variables.bricks:
for i in j:
i.clear()
for i in global_variables.bricks:
for j in i:
j = []
global_variables.bricks = global_variables.make_bricks()
global_variables.explosion_coordinates = []
global_variables.fire_ball_list = []
global_variables.flag = 0
global_variables.main_paddle.position_x=5
global_variables.main_paddle.position_y=35
global_variables.main_ball.position_x=5
global_variables.main_ball.position_y=33
global_variables.main_ball.speed_x = 0
global_variables.main_ball.speed_y = 0
#for fast ball
global_variables.main_ball.onetimetempflag = 0
global_variables.main_ball.render()
global_variables.main_paddle.render()
for i in range(len(global_variables.active_powerupflag)):
global_variables.active_powerupflag[i] = 0
def take_down_bricks():
global_variables.fire_ball_list = []
global_variables.explosion_coordinates = []
for i in global_variables.bricks:
for j in i:
j.clear()
if j.position_y >= 34 and j.weight > 0 :
global_variables.brick_falldown_flag = 1
else:
if global_variables.level == 0:
if round(time()) - global_variables.level0starttime >15:
j.position_y += 1
elif global_variables.level == 1:
if (round(time()) - global_variables.level1starttime>10):
j.position_y += 1
elif global_variables.level == 2:
if round(time()) - global_variables.level2starttime > 50:
j.position_y += 1
| 52.666667
| 400
| 0.572757
| 6,322
| 53,562
| 4.570389
| 0.024992
| 0.225825
| 0.134146
| 0.085969
| 0.948259
| 0.928082
| 0.92116
| 0.903544
| 0.890877
| 0.878971
| 0
| 0.020043
| 0.336787
| 53,562
| 1,016
| 401
| 52.718504
| 0.793345
| 0.013368
| 0
| 0.843862
| 0
| 0
| 0.016431
| 0.011623
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051251
| false
| 0
| 0.010727
| 0.003576
| 0.077473
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8788fd18b1d13ed26eb0658e57047e7177797373
| 1,417
|
py
|
Python
|
tests/components/character_count/test_character_count.py
|
dalepotter/govuk-frontend-jinja
|
8ea209b3f70d50f9ebc08873396f040caebf12b6
|
[
"MIT"
] | null | null | null |
tests/components/character_count/test_character_count.py
|
dalepotter/govuk-frontend-jinja
|
8ea209b3f70d50f9ebc08873396f040caebf12b6
|
[
"MIT"
] | null | null | null |
tests/components/character_count/test_character_count.py
|
dalepotter/govuk-frontend-jinja
|
8ea209b3f70d50f9ebc08873396f040caebf12b6
|
[
"MIT"
] | null | null | null |
import pytest
def test_character_count(env, similar, template, expected):
template = env.from_string(template)
assert similar(template.render(), expected)
@pytest.mark.xfail(reason="overzealous escaping")
def test_character_count_with_hint(env, similar, template, expected):
template = env.from_string(template)
assert similar(template.render(), expected)
def test_character_count_with_default_value(env, similar, template, expected):
template = env.from_string(template)
assert similar(template.render(), expected)
def test_character_count_with_default_value_exceeding_limit(
env, similar, template, expected
):
template = env.from_string(template)
assert similar(template.render(), expected)
def test_character_count_with_custom_rows(env, similar, template, expected):
template = env.from_string(template)
assert similar(template.render(), expected)
def test_character_count_with_label_as_page_heading(env, similar, template, expected):
template = env.from_string(template)
assert similar(template.render(), expected)
def test_character_count_with_word_count(env, similar, template, expected):
template = env.from_string(template)
assert similar(template.render(), expected)
def test_character_count_with_threshold(env, similar, template, expected):
template = env.from_string(template)
assert similar(template.render(), expected)
| 31.488889
| 86
| 0.778405
| 175
| 1,417
| 6.034286
| 0.188571
| 0.227273
| 0.121212
| 0.159091
| 0.879735
| 0.856061
| 0.856061
| 0.856061
| 0.856061
| 0.856061
| 0
| 0
| 0.125618
| 1,417
| 44
| 87
| 32.204545
| 0.8523
| 0
| 0
| 0.571429
| 0
| 0
| 0.014114
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 1
| 0.285714
| false
| 0
| 0.035714
| 0
| 0.321429
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
87f0a2db3f7a545fb750ca1cfbacffe1682302a4
| 46,406
|
py
|
Python
|
controllers/dances-controller.py
|
pscedu/DANCES-controller
|
eeb9509ba79fb291df7a3810423c8b1d9dd380e4
|
[
"MIT"
] | null | null | null |
controllers/dances-controller.py
|
pscedu/DANCES-controller
|
eeb9509ba79fb291df7a3810423c8b1d9dd380e4
|
[
"MIT"
] | 2
|
2017-01-06T21:53:18.000Z
|
2017-01-09T15:12:49.000Z
|
controllers/dances-controller.py
|
pscedu/DANCES-controller
|
eeb9509ba79fb291df7a3810423c8b1d9dd380e4
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2011 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_3
from ryu.lib.packet import packet, ethernet, arp, vlan
import array
class SimpleSwitch13(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION]
def __init__(self, *args, **kwargs):
super(SimpleSwitch13, self).__init__(*args, **kwargs)
# set dpid of known switches
self.switch = {}
self.switch['mi'] = 1229782937975278821
self.switch['np'] = 15730199386661060610
self.switch['sc'] = 44159981331328
# set inital ip-mac table for all known hosts
self.ip_to_mac = {}
# mi hosts
self.ip_to_mac['10.10.1.10'] = "00:60:dd:44:56:70" #tango
self.ip_to_mac['10.10.1.20'] = "00:0f:53:28:dd:4c" #rumba
self.ip_to_mac['10.10.1.30'] = "00:0f:53:28:dc:00" #mambo
#np hosts
self.ip_to_mac['10.10.2.10'] = "00:60:dd:45:2d:16" #psarch
self.ip_to_mac['10.10.2.50'] = "00:60:dd:45:91:bb" #tenge
self.ip_to_mac['10.10.2.30'] = "00:02:c9:45:20:90" #giu3
self.ip_to_mac['10.10.2.110'] = "00:25:90:14:cc:54" #dennis
self.ip_to_mac['10.10.2.100'] = "00:02:c9:f0:62:b0" #dxcsbb05
self.ip_to_mac['10.10.2.101'] = "00:02:c9:f0:62:b0" #dxcsbb05
self.ip_to_mac['10.10.2.102'] = "00:02:c9:f0:62:b0" #dxcsbb05
self.ip_to_mac['10.10.2.103'] = "00:02:c9:f0:62:b0" #dxcsbb05
self.ip_to_mac['10.10.2.104'] = "00:02:c9:f0:62:b0" #dxcsbb05
self.ip_to_mac['10.10.2.105'] = "00:02:c9:f0:62:b0" #dxcsbb05
self.ip_to_mac['10.10.2.106'] = "00:02:c9:f0:62:b0" #dxcsbb05
self.ip_to_mac['10.10.2.107'] = "00:02:c9:f0:62:b0" #dxcsbb05
#sc hosts
self.ip_to_mac['10.10.2.210'] = "24:8a:07:6f:87:c0" #twostep
self.ip_to_mac['10.10.2.200'] = "00:60:dd:45:67:db" #hiphop
self.ip_to_mac['10.10.2.201'] = "00:60:dd:45:67:db" #hiphop
self.ip_to_mac['10.10.2.202'] = "00:60:dd:45:67:db" #hiphop
self.ip_to_mac['10.10.2.203'] = "00:60:dd:45:67:db" #hiphop
self.ip_to_mac['10.10.2.204'] = "00:60:dd:45:67:db" #hiphop
self.ip_to_mac['10.10.2.205'] = "00:60:dd:45:67:db" #hiphop
self.ip_to_mac['10.10.2.206'] = "00:60:dd:45:67:db" #hiphop
self.ip_to_mac['10.10.2.207'] = "00:60:dd:45:67:db" #hiphop
self.logger.info("init complete")
@set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)
def switch_features_handler(self, ev):
datapath = ev.msg.datapath
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
# install table-miss flow entry
#
# We specify NO BUFFER to max_len of the output action due to
# OVS bug. At this moment, if we specify a lesser number, e.g.,
# 128, OVS will send Packet-In with invalid buffer_id and
# truncated packet data. In that case, we cannot output packets
# correctly. The bug has been fixed in OVS v2.1.0.
#match = parser.OFPMatch()
#actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER,
# ofproto.OFPCML_NO_BUFFER)]
#self.add_flow(datapath, 0, match, actions)
self.logger.info("switch_features_handler: %s", datapath)
#self.clear_flows(datapath) # clear existing flows
self.send_psc_test_config(datapath) # add initial flows
def send_psc_test_config(self, datapath):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
controlTable = 10
dscpTable = 20
l3Table = 30
l2Table = 40
vlanTable = 50
# install table-miss flow entries for table 10 and 20
#self.add_goto_to_table(datapath, 0, 10, 20) # not needed on corsa?
#self.add_goto_to_table(datapath, 0, 20, 30)
# install all meters on any switch that contacts us
self.add_meters(datapath)
# send arp to controller (install on any switch that contacts us)
#match = parser.OFPMatch(eth_type=0x0806)
#actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER)]
#self.add_flow(datapath, 1, controlTable, match, actions)
self.logger.info("dpid: %s, mi-dpid: %s", datapath.id, self.switch['mi'])
if datapath.id == self.switch['mi']:
self.logger.info("dpid is mi")
# # test
# match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.10", vlan_vid=(0x1000 | 4010))
# actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
# self.add_metered_flow(datapath, 100, l3Table, match, actions, 110)
# arp
#match = parser.OFPMatch(eth_type=0x0806)
#actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER)]
#self.add_flow(datapath, 1, controlTable, match, actions)
## MI hosts ##
# tango
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.10", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.10", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# rumba
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.20", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(7), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.20", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(7), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# mambo
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.30", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(3), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.30", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(3), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
## NP hosts ##
# psarch
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.10", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# tenge-70
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.50", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# dennis
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.110", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# giu3
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.30", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# dxcsbb05
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.100", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.101", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.102", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.103", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.104", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.105", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.106", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.107", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
## NP hosts ## From SC
# psarch
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.10", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# tenge-70
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.50", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# dennis
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.110", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# giu3
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.30", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# dxcsbb05
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.100", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.101", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.102", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.103", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.104", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.105", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.106", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.107", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
## SC hosts #
# twostep
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.210", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(10), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# hiphop
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.200", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(16), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
## twostep
#match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.210", vlan_vid=(0x1000 | 4010))
#actions = [parser.OFPActionOutput(22), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
#self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
## hiphop
#match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.200", vlan_vid=(0x1000 | 4010))
#actions = [parser.OFPActionOutput(22), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
#self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
#match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.201", vlan_vid=(0x1000 | 4010))
#actions = [parser.OFPActionOutput(22), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
#self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
#match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.202", vlan_vid=(0x1000 | 4010))
#actions = [parser.OFPActionOutput(22), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
#self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
#match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.203", vlan_vid=(0x1000 | 4010))
#actions = [parser.OFPActionOutput(22), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
#self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
#match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.204", vlan_vid=(0x1000 | 4010))
#actions = [parser.OFPActionOutput(22), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
#self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
#match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.205", vlan_vid=(0x1000 | 4010))
#actions = [parser.OFPActionOutput(22), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
#self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
#match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.206", vlan_vid=(0x1000 | 4010))
#actions = [parser.OFPActionOutput(22), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
#self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
#match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.207", vlan_vid=(0x1000 | 4010))
#actions = [parser.OFPActionOutput(22), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
#self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
if datapath.id == self.switch['np']:
self.logger.info("dpid is np")
## MI hosts ##
# tango
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.10", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# rumba
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.20", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# mambo
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.30", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 2)
## NP hosts ##
# psarch
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.10", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# tenge-70
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.50", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(3), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# dennis
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.110", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(7), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# giu3
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.30", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(5), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# dxcsbb05
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.100", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(12), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.101", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(12), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.102", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(12), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.103", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(12), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.104", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(12), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.105", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(12), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.106", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(12), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.107", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(12), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
## SC hosts ##
# twostep
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.210", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# hiphop
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.200", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.201", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.202", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.203", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.204", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.205", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.206", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.207", vlan_vid=(0x1000 | 4010))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4010)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
if datapath.id == self.switch['sc']:
self.logger.info("dpid is sc")
## MI hosts ##
# tango
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.10", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# rumba
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.20", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# mambo
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.1.30", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
## NP hosts ##
# psarch
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.10", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# tenge-70
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.50", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# dennis
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.110", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# giu3
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.30", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# dxcsbb05
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.100", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.101", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.102", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.103", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.104", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.105", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.106", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.107", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(24), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# SC hosts #
# twostep
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.210", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(2), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
# hiphop
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.200", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.201", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.202", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.203", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.204", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.205", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.206", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
match = parser.OFPMatch(eth_type=0x0800, ipv4_dst="10.10.2.207", vlan_vid=(0x1000 | 4049))
actions = [parser.OFPActionOutput(1), parser.OFPActionSetField(vlan_vid=(0x1000 | 4049)), parser.OFPActionSetQueue(0)]
self.add_metered_flow(datapath, 100, l3Table, match, actions, 1)
def clear_flows(self, datapath):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
match = parser.OFPMatch()
inst = [parser.OFPInstructionActions(ofproto.OFPIT_WRITE_ACTIONS, [])]
mod = parser.OFPFlowMod(datapath=datapath, command=ofproto.OFPFC_DELETE,
match=match, instructions=inst)
datapath.send_msg(mod)
def add_flow(self, datapath, priority, table, match, actions):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
inst = [parser.OFPInstructionActions(ofproto.OFPIT_WRITE_ACTIONS,
actions)]
mod = parser.OFPFlowMod(datapath=datapath, table_id=table, priority=priority,
match=match, instructions=inst)
datapath.send_msg(mod)
def add_metered_flow(self, datapath, priority, table, match, actions, meter):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
inst = [parser.OFPInstructionActions(ofproto.OFPIT_WRITE_ACTIONS,
actions), parser.OFPInstructionMeter(meter)]
mod = parser.OFPFlowMod(datapath=datapath, table_id=table, priority=priority,
match=match, instructions=inst)
datapath.send_msg(mod)
# self.logger.info("flow added {in_port: %s eth_dst: %s}", match['in_port'], match['eth_dst'])
self.logger.info("\tflow mod sent: %s", mod)
def add_goto_to_table(self, datapath, priority, table, goto_table):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
match = parser.OFPMatch()
inst = [parser.OFPInstructionGotoTable(goto_table)]
mod = parser.OFPFlowMod(datapath=datapath, table_id=table, priority=priority,
match=match, instructions=inst)
datapath.send_msg(mod)
def add_meters(self, datapath):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
# flags=9 (OFPMF_KBPS + OFPMF_STATS)
# flags=13 (KBPS + STATS + BURST)
# Examples
## 5 Gbps band - burst for WAN BDP=(5Gb x 106ms)=530Mb
#wan_band = [parser.OFPMeterBandDrop(type_=ofproto.OFPMBT_DROP, rate=5000000, burst_size=530000)]
#
## 5 Gbps band - burst for LAN BDP=(5Gb x 0.2ms)=1Mb
#lan_band = [parser.OFPMeterBandDrop(type_=ofproto.OFPMBT_DROP, rate=5000000, burst_size=1000)]
#
## flow meter 1 (LAN) - limit at 5 Gbps
#mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=1, bands=lan_band)
#datapath.send_msg(mod)
## flow meter 2 (WAN)- limit at 5 Gbps
#mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=2, bands=wan_band)
#datapath.send_msg(mod)
## band definitions
_1G_band = [parser.OFPMeterBandDrop(type_=ofproto.OFPMBT_DROP, rate=1000000, burst_size=530000)]
_2G_band = [parser.OFPMeterBandDrop(type_=ofproto.OFPMBT_DROP, rate=2000000, burst_size=530000)]
_3G_band = [parser.OFPMeterBandDrop(type_=ofproto.OFPMBT_DROP, rate=3000000, burst_size=530000)]
_4G_band = [parser.OFPMeterBandDrop(type_=ofproto.OFPMBT_DROP, rate=4000000, burst_size=530000)]
_5G_band = [parser.OFPMeterBandDrop(type_=ofproto.OFPMBT_DROP, rate=5000000, burst_size=530000)]
max_band = [parser.OFPMeterBandDrop(type_=ofproto.OFPMBT_DROP, rate=4294967295)] # max rate is 0xffffffff == 4294967295
## meter definitions
# default non-priority flow (max rate limit)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=1, bands=max_band)
datapath.send_msg(mod)
# default non-priority flow (max rate limit)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=2, bands=max_band)
datapath.send_msg(mod)
# 1Gpbs meter pool
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=11, bands=_1G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=12, bands=_1G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=13, bands=_1G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=14, bands=_1G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=15, bands=_1G_band)
datapath.send_msg(mod)
# 2Gpbs meter pool
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=21, bands=_2G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=22, bands=_2G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=23, bands=_2G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=24, bands=_2G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=25, bands=_2G_band)
datapath.send_msg(mod)
# 3Gpbs meter pool
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=31, bands=_3G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=32, bands=_3G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=33, bands=_3G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=34, bands=_3G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=35, bands=_3G_band)
datapath.send_msg(mod)
# 4Gpbs meter pool
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=41, bands=_4G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=42, bands=_4G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=43, bands=_4G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=44, bands=_4G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=45, bands=_4G_band)
datapath.send_msg(mod)
# 5Gpbs meter pool
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=51, bands=_5G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=52, bands=_5G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=53, bands=_5G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=54, bands=_5G_band)
datapath.send_msg(mod)
mod = parser.OFPMeterMod(datapath=datapath, command=ofproto.OFPMC_ADD, flags=9, meter_id=55, bands=_5G_band)
datapath.send_msg(mod)
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
# If you hit this you might want to increase
# the "miss_send_length" of your switch
if ev.msg.msg_len < ev.msg.total_len:
self.logger.debug("packet truncated: only %s of %s bytes",
ev.msg.msg_len, ev.msg.total_len)
msg = ev.msg
datapath = msg.datapath
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
in_port = msg.match['in_port']
pkt = packet.Packet(msg.data)
pkt_eth = pkt.get_protocol(ethernet.ethernet)
if not pkt_eth:
return
pkt_vlan = pkt.get_protocol(vlan.vlan)
pkt_arp = pkt.get_protocol(arp.arp)
if pkt_arp:
self._handle_arp(datapath, in_port, pkt_eth, pkt_arp, pkt_vlan)
return
#pkt_ipv4 = pkt.get_protocol(ipv4.ipv4)
#pkt_icmp = pkt.get_protocol(icmp.icmp)
#if pkt_icmp:
# self._handle_icmp(datapath, in_port, pkt_eth, pkt_ipv4, pkt_icmp)
# return
def _handle_arp(self, datapath, port, pkt_ethernet, pkt_arp, pkt_vlan):
if pkt_arp.opcode != arp.ARP_REQUEST:
return
# check for dst_ip's matching MAC #
dst_mac = pkt_ethernet.dst #unknown value
src_mac = pkt_ethernet.src
dst_ip = pkt_arp.dst_ip
src_ip = pkt_arp.src_ip
dpid = datapath.id
self.ip_to_mac.setdefault(dpid, {})
self.logger.info("packet in dpid:%s src_mac:%s dst_mac:%s port:%s", dpid, src_mac, dst_mac, port)
# learn mac address to avoid DROP.
#self.ip_to_mac[dpid][src_ip] = src_mac
if dst_ip in self.ip_to_mac:
dst_mac = self.ip_to_mac[dst_ip]
else:
# Requested MAC address is unknown.
# There is nothing we can do without flood.
return
# build arp reply #
pkt = packet.Packet()
e = ethernet.ethernet(ethertype=pkt_ethernet.ethertype,
dst=src_mac,
src=dst_mac)
v = vlan.vlan(pcp=pkt_vlan.pcp,
cfi=pkt_vlan.cfi,
vid=pkt_vlan.vid,
ethertype=pkt_vlan.ethertype)
a = arp.arp(opcode=arp.ARP_REPLY,
src_mac=dst_mac,
src_ip=dst_ip,
dst_mac=src_mac,
dst_ip=src_ip)
pkt.add_protocol(e)
pkt.add_protocol(v)
pkt.add_protocol(a)
self.logger.info("packet out dpid:%s src_mac:%s dst_mac:%s port:%s", dpid, src_mac, dst_mac, port)
self._send_packet(datapath, port, pkt)
def _handle_icmp(self, datapath, port, pkt_ethernet, pkt_ipv4, pkt_icmp):
if pkt_icmp.type != icmp.ICMP_ECHO_REQUEST:
return
pkt = packet.Packet()
pkt.add_protocol(ethernet.ethernet(ethertype=pkt_ethernet.ethertype,
dst=pkt_ethernet.src,
src=self.hw_addr))
pkt.add_protocol(ipv4.ipv4(dst=pkt_ipv4.src,
src=self.ip_addr,
proto=pkt_ipv4.proto))
pkt.add_protocol(icmp.icmp(type_=icmp.ICMP_ECHO_REPLY,
code=icmp.ICMP_ECHO_REPLY_CODE,
csum=0,
data=pkt_icmp.data))
self._send_packet(datapath, port, pkt)
def _send_packet(self, datapath, port, pkt):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
pkt.serialize()
self.logger.info("packet-out %s" % (pkt,))
data = pkt.data
actions = [parser.OFPActionOutput(port=port)]
out = parser.OFPPacketOut(datapath=datapath,
buffer_id=ofproto.OFP_NO_BUFFER,
in_port=ofproto.OFPP_CONTROLLER,
actions=actions,
data=data)
datapath.send_msg(out)
| 56.592683
| 132
| 0.659096
| 5,972
| 46,406
| 4.954956
| 0.068151
| 0.042817
| 0.079078
| 0.062046
| 0.865263
| 0.848298
| 0.840188
| 0.815113
| 0.80271
| 0.793147
| 0
| 0.119074
| 0.212774
| 46,406
| 819
| 133
| 56.661783
| 0.690929
| 0.143236
| 0
| 0.601253
| 0
| 0
| 0.045484
| 0.000607
| 0
| 0
| 0.036428
| 0
| 0
| 0
| null | null | 0
| 0.014614
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
87fa47f128fdd6f455c3084f08c17e355d7d1d6f
| 17,013
|
py
|
Python
|
src/behavior_tree/subtrees/MovePose.py
|
rirolab/behavior_tree
|
e2cc2c96fbe3af756908da966421667b7e28f821
|
[
"MIT"
] | null | null | null |
src/behavior_tree/subtrees/MovePose.py
|
rirolab/behavior_tree
|
e2cc2c96fbe3af756908da966421667b7e28f821
|
[
"MIT"
] | 1
|
2021-08-25T07:02:39.000Z
|
2021-08-25T07:11:21.000Z
|
src/behavior_tree/subtrees/MovePose.py
|
rirolab/behavior_tree
|
e2cc2c96fbe3af756908da966421667b7e28f821
|
[
"MIT"
] | null | null | null |
import numpy as np
import json
import rospy
import py_trees
import std_msgs.msg as std_msgs
from actionlib_msgs.msg import GoalStatus
from control_msgs.msg import FollowJointTrajectoryResult
import geometry_msgs
from complex_action_client.srv import String_Int, None_String
class MOVEP(py_trees.behaviour.Behaviour):
"""
Move Pose
Note that this behaviour will return with
:attr:`~py_trees.common.Status.SUCCESS`. It will also send a clearing
command to the robot if it is cancelled or interrupted by a higher
priority behaviour.
"""
def __init__(self, name, action_goal=None,
topic_name="", controller_ns=""):
super(MOVEP, self).__init__(name=name)
self.topic_name = topic_name
self.controller_ns = controller_ns
self.action_goal = action_goal
self.sent_goal = False
self.cmd_req = None
def setup(self, timeout):
self.feedback_message = "{}: setup".format(self.name)
rospy.wait_for_service("arm_client/command")
self.cmd_req = rospy.ServiceProxy("arm_client/command", String_Int)
rospy.wait_for_service("arm_client/status")
self.status_req = rospy.ServiceProxy("arm_client/status", None_String)
return True
def initialise(self):
self.logger.debug("{0}.initialise()".format(self.__class__.__name__))
self.sent_goal = False
def update(self):
self.logger.debug("%s.update()" % self.__class__.__name__)
if self.cmd_req is None:
self.feedback_message = \
"no action client, did you call setup() on your tree?"
return py_trees.Status.FAILURE
if not self.sent_goal:
if type(self.action_goal['pose']) is geometry_msgs.msg._Pose.Pose:
goal = {'x': self.action_goal['pose'].position.x,
'y': self.action_goal['pose'].position.y,
'z': self.action_goal['pose'].position.z,
'qx': self.action_goal['pose'].orientation.x,
'qy': self.action_goal['pose'].orientation.y,
'qz': self.action_goal['pose'].orientation.z,
'qw': self.action_goal['pose'].orientation.w,}
else:
blackboard = py_trees.Blackboard()
ps = blackboard.get(self.action_goal['pose'])
goal = {'x': ps.position.x,
'y': ps.position.y,
'z': ps.position.z,
'qx': ps.orientation.x,
'qy': ps.orientation.y,
'qz': ps.orientation.z,
'qw': ps.orientation.w,}
cmd_str = json.dumps({'action_type': 'movePose',
'goal': json.dumps(goal),
'timeout': 3.,
'no_wait': True})
ret = self.cmd_req(cmd_str)
if ret.data==GoalStatus.REJECTED or ret.data==GoalStatus.ABORTED:
self.feedback_message = "failed to execute"
self.logger.debug("%s.update()[%s]" % (self.__class__.__name__, self.feedback_message))
return py_trees.common.Status.FAILURE
self.sent_goal = True
self.feedback_message = "Sending a joint goal"
return py_trees.common.Status.RUNNING
msg = self.status_req()
d = json.loads(msg.data)
state = d['state']
ret = d['result']
if state in [GoalStatus.ABORTED,
GoalStatus.PREEMPTED,
GoalStatus.REJECTED] and \
ret != FollowJointTrajectoryResult.SUCCESSFUL:
self.feedback_message = "FAILURE"
return py_trees.common.Status.FAILURE
if ret == FollowJointTrajectoryResult.SUCCESSFUL:
self.feedback_message = "SUCCESSFUL"
return py_trees.common.Status.SUCCESS
else:
return py_trees.common.Status.RUNNING
def terminate(self, new_status):
msg = self.status_req()
d = json.loads(msg.data)
if d['state'] == GoalStatus.ACTIVE:
self.cmd_req( json.dumps({'action_type': 'cancel_goal'}) )
return
class MOVES(py_trees.behaviour.Behaviour):
"""
Move Pose following a straight pose trajectory
Note that this behaviour will return with
:attr:`~py_trees.common.Status.SUCCESS`. It will also send a clearing
command to the robot if it is cancelled or interrupted by a higher
priority behaviour.
"""
def __init__(self, name, action_goal=None,
topic_name="", controller_ns=""):
super(MOVES, self).__init__(name=name)
self.topic_name = topic_name
self.controller_ns = controller_ns
self.action_goal = action_goal
self.sent_goal = False
self.cmd_req = None
def setup(self, timeout):
self.feedback_message = "{}: setup".format(self.name)
rospy.wait_for_service("arm_client/command")
self.cmd_req = rospy.ServiceProxy("arm_client/command", String_Int)
rospy.wait_for_service("arm_client/status")
self.status_req = rospy.ServiceProxy("arm_client/status", None_String)
return True
def initialise(self):
self.logger.debug("{0}.initialise()".format(self.__class__.__name__))
self.sent_goal = False
def update(self):
self.logger.debug("%s.update()" % self.__class__.__name__)
if self.cmd_req is None:
self.feedback_message = \
"no action client, did you call setup() on your tree?"
return py_trees.Status.FAILURE
if not self.sent_goal:
# reference frame: arm_baselink
if type(self.action_goal['pose']) is geometry_msgs.msg._Pose.Pose:
goal = {'x': self.action_goal['pose'].position.x,
'y': self.action_goal['pose'].position.y,
'z': self.action_goal['pose'].position.z,
'qx': self.action_goal['pose'].orientation.x,
'qy': self.action_goal['pose'].orientation.y,
'qz': self.action_goal['pose'].orientation.z,
'qw': self.action_goal['pose'].orientation.w,}
else:
blackboard = py_trees.Blackboard()
ps = blackboard.get(self.action_goal['pose'])
goal = {'x': ps.position.x,
'y': ps.position.y,
'z': ps.position.z,
'qx': ps.orientation.x,
'qy': ps.orientation.y,
'qz': ps.orientation.z,
'qw': ps.orientation.w,}
cmd_str = json.dumps({'action_type': 'movePoseStraight',
'goal': json.dumps(goal),
'timeout': 3.,
'no_wait': True})
ret = self.cmd_req(cmd_str)
if ret.data==GoalStatus.REJECTED or ret.data==GoalStatus.ABORTED:
self.feedback_message = "failed to execute"
self.logger.debug("%s.update()[%s]" % (self.__class__.__name__, self.feedback_message))
return py_trees.common.Status.FAILURE
self.sent_goal = True
self.feedback_message = "Sending a joint goal"
return py_trees.common.Status.RUNNING
msg = self.status_req()
d = json.loads(msg.data)
state = d['state']
ret = d['result']
if state in [GoalStatus.ABORTED,
GoalStatus.PREEMPTED,
GoalStatus.REJECTED] and \
ret != FollowJointTrajectoryResult.SUCCESSFUL:
self.feedback_message = "FAILURE"
return py_trees.common.Status.FAILURE
if ret == FollowJointTrajectoryResult.SUCCESSFUL:
self.feedback_message = "SUCCESSFUL"
return py_trees.common.Status.SUCCESS
else:
return py_trees.common.Status.RUNNING
def terminate(self, new_status):
msg = self.status_req()
d = json.loads(msg.data)
if d['state'] == GoalStatus.ACTIVE:
self.cmd_req( json.dumps({'action_type': 'cancel_goal'}) )
return
class MOVEPR(py_trees.behaviour.Behaviour):
"""
Move Pose Relative with a certain frame
Note that this behaviour will return with
:attr:`~py_trees.common.Status.SUCCESS`. It will also send a clearing
command to the robot if it is cancelled or interrupted by a higher
priority behaviour.
"""
def __init__(self, name, action_goal=None,
topic_name="", controller_ns="", cont=False):
super(MOVEPR, self).__init__(name=name)
self.topic_name = topic_name
self.controller_ns = controller_ns
# Set the goal pose
self.action_goal = action_goal
# Enable continuous motion
self.action_cont = cont
# Set the goal flag
self.sent_goal = False
self.cmd_req = None
def setup(self, timeout):
## self.publisher = rospy.Publisher(self.topic_name, std_msgs.String, queue_size=10, latch=True)
self.feedback_message = "{}: setup".format(self.name)
rospy.wait_for_service("arm_client/command")
self.cmd_req = rospy.ServiceProxy("arm_client/command", String_Int)
rospy.wait_for_service("arm_client/status")
self.status_req = rospy.ServiceProxy("arm_client/status", None_String)
if self.action_cont:
timeout_scale = 0.5
else:
timeout_scale = 1.
self.cmd_req(json.dumps({'action_type': 'setSpeed', 'goal': timeout_scale}))
return True
def initialise(self):
self.logger.debug("{0}.initialise()".format(self.__class__.__name__))
self.sent_goal = False
def update(self):
self.logger.debug("%s.update()" % self.__class__.__name__)
if self.cmd_req is None:
self.feedback_message = \
"no action client, did you call setup() on your tree?"
return py_trees.Status.FAILURE
if not self.sent_goal or (self.action_cont and self.action_goal['pose'] is not None):
goal = {'x': self.action_goal['pose'].position.x,
'y': self.action_goal['pose'].position.y,
'z': self.action_goal['pose'].position.z,
'qx': self.action_goal['pose'].orientation.x,
'qy': self.action_goal['pose'].orientation.y,
'qz': self.action_goal['pose'].orientation.z,
'qw': self.action_goal['pose'].orientation.w,}
cmd_str = json.dumps({'action_type': 'movePoseRelative',
'goal': json.dumps(goal),
'frame': self.action_goal['frame'],
'timeout': 3.,
'no_wait': True})
ret = self.cmd_req(cmd_str)
if ret.data==GoalStatus.REJECTED or ret.data==GoalStatus.ABORTED:
self.feedback_message = \
"failed to execute"
self.logger.debug("%s.update()[%s]" % (self.__class__.__name__, self.feedback_message))
return py_trees.common.Status.FAILURE
self.sent_goal = True
self.feedback_message = "Sending a pose goal"
return py_trees.common.Status.RUNNING
msg = self.status_req()
d = json.loads(msg.data)
state = d['state']
ret = d['result']
if state in [GoalStatus.ABORTED,
GoalStatus.PREEMPTED,
GoalStatus.REJECTED] and \
ret != FollowJointTrajectoryResult.SUCCESSFUL:
self.feedback_message = "FAILURE"
return py_trees.common.Status.FAILURE
if ret == FollowJointTrajectoryResult.SUCCESSFUL:
self.feedback_message = "SUCCESSFUL"
return py_trees.common.Status.SUCCESS
else:
return py_trees.common.Status.RUNNING
def terminate(self, new_status):
msg = self.status_req()
d = json.loads(msg.data)
if d['state'] == GoalStatus.ACTIVE:
self.cmd_req( json.dumps({'action_type': 'cancel_goal'}) )
return
class MOVEPROOT(py_trees.behaviour.Behaviour):
"""
Note that this behaviour will return with
:attr:`~py_trees.common.Status.SUCCESS`. It will also send a clearing
command to the robot if it is cancelled or interrupted by a higher
priority behaviour.
"""
def __init__(self, name, action_goal=None,
topic_name="", controller_ns=""):
super(MOVEPROOT, self).__init__(name=name)
self.topic_name = topic_name
self.controller_ns = controller_ns
self.action_goal = action_goal
self.sent_goal = False
self.cmd_req = None
def setup(self, timeout):
self.feedback_message = "{}: setup".format(self.name)
rospy.wait_for_service("arm_client/command")
self.cmd_req = rospy.ServiceProxy("arm_client/command", String_Int)
rospy.wait_for_service("arm_client/status")
self.status_req = rospy.ServiceProxy("arm_client/status", None_String)
return True
def initialise(self):
self.logger.debug("{0}.initialise()".format(self.__class__.__name__))
self.sent_goal = False
def update(self):
self.logger.debug("%s.update()" % self.__class__.__name__)
if self.cmd_req is None:
self.feedback_message = \
"no action client, did you call setup() on your tree?"
return py_trees.Status.FAILURE
if not self.sent_goal:
# reference frame: arm_baselink
if type(self.action_goal['pose']) is geometry_msgs.msg._Pose.Pose:
goal = {'x': self.action_goal['pose'].position.x,
'y': self.action_goal['pose'].position.y,
'z': self.action_goal['pose'].position.z,
'qx': self.action_goal['pose'].orientation.x,
'qy': self.action_goal['pose'].orientation.y,
'qz': self.action_goal['pose'].orientation.z,
'qw': self.action_goal['pose'].orientation.w,}
else:
blackboard = py_trees.Blackboard()
ps = blackboard.get(self.action_goal['pose'])
goal = {'x': ps.position.x,
'y': ps.position.y,
'z': ps.position.z,
'qx': ps.orientation.x,
'qy': ps.orientation.y,
'qz': ps.orientation.z,
'qw': ps.orientation.w,}
cmd_str = json.dumps({'action_type': 'movePoseRoot',
'goal': json.dumps(goal),
'timeout': 3.,
'no_wait': True})
ret = self.cmd_req(cmd_str)
if ret.data==GoalStatus.REJECTED or ret.data==GoalStatus.ABORTED:
self.feedback_message = "failed to execute"
self.logger.debug("%s.update()[%s]" % (self.__class__.__name__, self.feedback_message))
return py_trees.common.Status.FAILURE
self.sent_goal = True
self.feedback_message = "Sending a joint goal"
return py_trees.common.Status.RUNNING
msg = self.status_req()
d = json.loads(msg.data)
state = d['state']
ret = d['result']
if state in [GoalStatus.ABORTED,
GoalStatus.PREEMPTED,
GoalStatus.REJECTED] and \
ret != FollowJointTrajectoryResult.SUCCESSFUL:
self.feedback_message = "FAILURE"
return py_trees.common.Status.FAILURE
if ret == FollowJointTrajectoryResult.SUCCESSFUL:
self.feedback_message = "SUCCESSFUL"
return py_trees.common.Status.SUCCESS
else:
return py_trees.common.Status.RUNNING
def terminate(self, new_status):
msg = self.status_req()
d = json.loads(msg.data)
if d['state'] == GoalStatus.ACTIVE:
self.cmd_req( json.dumps({'action_type': 'cancel_goal'}) )
self.logger.debug("%s.terminate()[%s->%s]" % (self.__class__.__name__, self.status, new_status))
return
| 39.200461
| 116
| 0.556809
| 1,888
| 17,013
| 4.79661
| 0.086864
| 0.053004
| 0.061837
| 0.069567
| 0.916188
| 0.909342
| 0.89841
| 0.895208
| 0.892447
| 0.892447
| 0
| 0.001146
| 0.333039
| 17,013
| 433
| 117
| 39.290993
| 0.796951
| 0.065303
| 0
| 0.898413
| 0
| 0
| 0.095096
| 0.001396
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063492
| false
| 0
| 0.028571
| 0
| 0.206349
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3597ab9dc8fefb423f755fa61ac9a27ea2dac796
| 66,513
|
py
|
Python
|
function.py
|
manhon95/QMG_bot
|
3409bb3d3031aa22e3dd27cbbc37c6b0bf20fc42
|
[
"BSD-3-Clause"
] | null | null | null |
function.py
|
manhon95/QMG_bot
|
3409bb3d3031aa22e3dd27cbbc37c6b0bf20fc42
|
[
"BSD-3-Clause"
] | 4
|
2021-12-15T05:06:13.000Z
|
2021-12-15T05:07:32.000Z
|
function.py
|
manhon95/QMG_bot
|
3409bb3d3031aa22e3dd27cbbc37c6b0bf20fc42
|
[
"BSD-3-Clause"
] | null | null | null |
import telegram
import sqlite3
import thread_lock
import status_handler
import random
from telegram import InlineKeyboardButton, InlineKeyboardMarkup
from telegram.ext import Updater, CommandHandler, CallbackQueryHandler, MessageHandler, Filters
terrain2type = {'land':'army', 'sea':'navy'}
countryid2name = {'ge':'Germany', 'jp':'Japan', 'it':'Italy', 'uk':'United Kingdom', 'su':'Soviet Union', 'us':'United States', 'fr':'France', 'ch':'China'}
getside = {'ge':'Axis', 'jp':'Axis', 'it':'Axis', 'uk':'Allied', 'su':'Allied', 'us':'Allied', 'fr':'Allied', 'ch':'Allied'}
piece_type_name = {'army':'Army', 'navy':'Navy', 'air':'Air Force'}
player_country_list = ['ge', 'jp', 'it', 'uk', 'su', 'us']
country_list = ['ge', 'jp', 'it', 'uk', 'su', 'us', 'fr', 'ch']
axis_list = ['ge', 'jp', 'it']
allied_list = ['uk', 'su', 'us', 'fr', 'ch']
#------------------------------------------Supply------------------------------------------
def updatesupply(db):
print('update supply')
#country_list = ['ge', 'jp', 'it', 'uk', 'su', 'us']
axis_list = ['ge', 'jp', 'it']
allies_list = ['uk', 'su', 'us', 'fr', 'ch']
db.execute("update piece set supply = 0;")
vp_space_list = db.execute("select distinct spaceid from space where supply = 1").fetchall()
vp_space_list = [space[0] for space in vp_space_list]
#db.execute("update piece set supply = 1 where pieceid in (select piece.pieceid from piece inner join space on piece.location = space.spaceid where space.supply = 1 group by piece.pieceid);")
status_handler.status_supply(db)
for country in axis_list:
country_vp_space_list = list(status_handler.status_vp_location(country, vp_space_list, db))
questionmarks = '?' * len(country_vp_space_list)
db.execute("update piece set supply = 1 where control = '{}' and location in ({});".format(country, ','.join(questionmarks)), (country_vp_space_list))
while db.execute("select count(*) from piece where supply = 0 and control = :country and location in (select distinct adjacency from space where spaceid in (select location from piece where supply = 1 and control = :country) and (straits in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or straits = 'none') and (status in (select cardid from card where location = 'used') or status = 'none')) and pieceid not in (select pieceid from piece where type = 'navy' and control = :country and location not in (select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.control in (select id from country where side = 'Axis') and piece.type = 'army' and (space.straits in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or space.straits = 'none')));", {'country':country}).fetchall()[0][0] > 0:
db.execute("update piece set supply = 1 where supply = 0 and control = :country and location in (select distinct adjacency from space where spaceid in (select location from piece where supply = 1 and control = :country) and (straits in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or straits = 'none') and (status in (select cardid from card where location = 'used') or status = 'none')) and pieceid not in (select pieceid from piece where type = 'navy' and control = :country and location not in (select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.control in (select id from country where side = 'Axis') and piece.type = 'army' and (space.straits in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or space.straits = 'none')));", {'country':country})
for country in allies_list:
country_vp_space_list = list(status_handler.status_vp_location(country, vp_space_list, db))
questionmarks = '?' * len(country_vp_space_list)
db.execute("update piece set supply = 1 where control = '{}' and location in ({});".format(country, ','.join(questionmarks)), (country_vp_space_list))
if country in ['uk', 'us', 'fr'] and db.execute("select location from card where cardid = 352;").fetchall()[0][0] == 'played':
while db.execute("select count(*) from piece where supply = 0 and control = :country and location in (select distinct adjacency from space where spaceid in (select location from piece where supply = 1 and control in ('uk', 'us', 'fr')) and (straits not in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or straits = 'none') and (status not in (select cardid from card where location = 'used' and cardid = 165) or status = 'none')) and pieceid not in (select pieceid from piece where type = 'navy' and control = :country and location not in (select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.control in (select id from country where side = 'Allied') and piece.type = 'army' and (space.straits not in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or space.straits = 'none')));", {'country':country}).fetchall()[0][0] > 0:
db.execute("update piece set supply = 1 where supply = 0 and control = :country and location in (select distinct adjacency from space where spaceid in (select location from piece where supply = 1 and control in ('uk', 'us', 'fr')) and (straits not in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or straits = 'none') and (status not in (select cardid from card where location = 'used' and cardid = 165) or status = 'none')) and pieceid not in (select pieceid from piece where type = 'navy' and control = :country and location not in (select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.control in (select id from country where side = 'Allied') and piece.type = 'army' and (space.straits not in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or space.straits = 'none')));", {'country':country})
else:
while db.execute("select count(*) from piece where supply = 0 and control = :country and location in (select distinct adjacency from space where spaceid in (select location from piece where supply = 1 and control = :country) and (straits not in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or straits = 'none') and (status not in (select cardid from card where location = 'used' and cardid = 165) or status = 'none')) and pieceid not in (select pieceid from piece where type = 'navy' and control = :country and location not in (select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.control in (select id from country where side = 'Allied') and piece.type = 'army' and (space.straits not in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or space.straits = 'none')));", {'country':country}).fetchall()[0][0] > 0:
db.execute("update piece set supply = 1 where supply = 0 and control = :country and location in (select distinct adjacency from space where spaceid in (select location from piece where supply = 1 and control = :country) and (straits not in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or straits = 'none') and (status not in (select cardid from card where location = 'used' and cardid = 165) or status = 'none')) and pieceid not in (select pieceid from piece where type = 'navy' and control = :country and location not in (select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.control in (select id from country where side = 'Allied') and piece.type = 'army' and (space.straits not in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or space.straits = 'none')));", {'country':country})
#for country in country_list:
# extra_space = status_handler.status_vp_location(country, db)
# if extra_space != None:
# questionmarks = '?' * len(extra_space)
# db.execute("update piece set supply = 1 where control = {} and location in ({});".format(','.join(questionmarks)), (country, extra_space))
# while db.execute("select count(*) from piece where supply = 0 and control = :country and location in (select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.supply = 1 and piece.control = :country and (space.straits in (select location from piece where control = :country) or space.straits = 'none')) and pieceid not in (select pieceid from piece where type = 'navy' and control = :country and location not in (select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.control in (select id from country where side = (select side from country where id = :country)) and piece.type = 'army' and (space.straits in (select location from piece where control = :country) or space.straits = 'none')));", {'country':country}).fetchall()[0][0] > 0:
# db.execute("update piece set supply = 1 where supply = 0 and control = :country and location in (select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.supply = 1 and piece.control = :country and (space.straits in (select location from piece where control = :country) or space.straits = 'none')) and pieceid not in (select pieceid from piece where type = 'navy' and control = :country and location not in (select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.control in (select id from country where side = (select side from country where id = :country)) and piece.type = 'army' and (space.straits in (select location from piece where control = :country) or space.straits = 'none')));", {'country':country})
db.commit()
#------------------------------------------Control------------------------------------------
def updatecontrol(bot, db):
if db.execute("select location from card where cardid = 222;").fetchall()[0][0] == 'played' and 12 in control_side_space_list('Axis', db, space_type = 'all'):
db.execute("update country set home = 8 where id = 'fr';")
else:
db.execute("update country set home = 12 where id = 'fr';")
if db.execute("select location from card where cardid = 277;").fetchall()[0][0] == 'played':
db.execute("update space set supply = 0 where spaceid = 28;")
db.execute("update country set home = 30 where id = 'su';")
else:
db.execute("update space set supply = 1 where spaceid = 28;")
db.execute("update country set home = 28 where id = 'su';")
if db.execute("select location from card where cardid = 345;").fetchall()[0][0] == 'played':
db.execute("update country set home = 35 where id = 'ch';")
else:
db.execute("update country set home = 37 where id = 'ch';")
print('update control')
db.execute("update space set control = 'neutral'")
db.execute("update space set control = 'Axis' where spaceid in (select location from piece where control in ('ge', 'it', 'jp') and type in ('army', 'navy'));")
db.execute("update space set control = 'Allied' where spaceid in (select location from piece where control in ('uk', 'su', 'us', 'fr', 'ch') and type in ('army', 'navy'));")
db.commit()
#------------------------------------------Shuffle------------------------------------------
def shuffledeck(bot, country, db):
group_chat_id = db.execute("select chatid from game;").fetchall()
text = "<b>" + countryid2name[country] + "</b> shuffle his deck"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
cardid = db.execute("select cardid from card where location = 'deck' and control = :country;", {'country':country}).fetchall()
ransq = [x for x in range(1, len(cardid)+1)]
random.shuffle(ransq)
for i in range (len(cardid)):
db.execute('update card set sequence =:sq where cardid =:id and control =:country;', {'sq': ransq[i], 'id': cardid[i][0], 'country':country})
def shufflediscard(bot, country, db):
group_chat_id = db.execute("select chatid from game;").fetchall()
text = "<b>" + countryid2name[country] + "</b> shuffle his deck"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
discard = db.execute("select cardid from card where ((location in ('discardd', 'discardu')) or (location = 'played' and type not in ('Status', 'Response', 'Bolster')) or (location = 'used' and type in ('Response', 'Bolster'))) and control =:country;", {'country':country}).fetchall()
ransq = [x for x in range(1, len(discard)+1)]
random.shuffle(ransq)
for i in range (len(discard)):
db.execute('update card set sequence =:sq where cardid =:id and control =:country;', {'sq': ransq[i], 'id': discard[i][0], 'country':country})
#------------------------------------------Draw------------------------------------------
def reorderdeck(bot, country, db):
cardid = db.execute("select cardid from card where location = 'deck' and control = :country order by sequence;", {'country':country}).fetchall()
for i in range (len(cardid)):
db.execute('update card set sequence =:sq where cardid =:id and control =:country;', {'sq': i + 1, 'id': cardid[i][0], 'country':country})
def drawdeck(bot, country, number, db):
reorderdeck(bot, country, db)
group_chat_id = db.execute("select chatid from game;").fetchall()
cardcount = db.execute("select count(*) from card where location = 'deck' and control =:country;", {'country':country}).fetchall()
if number > cardcount[0][0]:
db.execute("update card set location = 'hand' where location = 'deck' and control =:country;", {'country':country})
text = "<b>" + countryid2name[country] + "</b> finished his deck"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
number = cardcount[0][0]
else:
db.execute("update card set location = 'hand' where location = 'deck' and sequence <= :number and control =:country;", {'number':number, 'country':country})
for j in range (1, cardcount[0][0] - number + 1):
db.execute("update card set sequence =:new where sequence =:old and location = 'deck' and control =:country;", {'new': j, 'old': j + number, 'country':country})
text = "<b>" + countryid2name[country] + "</b> draw " + str(number) + " card(s) from his deck"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
db.commit()
#------------------------------------------Move Card------------------------------------------
def movecardbottom(bot, cardid, db):
group_chat_id = db.execute("select chatid from game;").fetchall()
card_name = db.execute("select name from card where cardid = :cardid;", {'cardid':cardid}).fetchall()
text = "<b>" + card_name[0][0] + "</b> move to bottom of deck"
#bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
db.execute("update card set location = 'deck', sequence = (select max(sequence) from card where control = (select control from card where cardid = :cardid) and location = 'deck') + 1 where cardid = :cardid;", {'cardid':cardid})
db.commit()
def movecardtop(bot, cardid, db):
group_chat_id = db.execute("select chatid from game;").fetchall()
card_name = db.execute("select name from card where cardid = :cardid;", {'cardid':cardid}).fetchall()
text = "<b>" + card_name[0][0] + "</b> move to top of deck"
#bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
cardcount = db.execute("select count(*) from card where location = 'deck' and control = (select control from card where cardid = :cardid);", {'cardid':cardid}).fetchall()
for j in reversed(range(1, cardcount[0][0] + 1)):
db.execute("update card set sequence =:new where sequence =:old and location = 'deck' and control = (select control from card where cardid = :cardid);", {'new': j + 1, 'old': j, 'cardid':cardid})
db.execute("update card set location = 'deck', sequence = 1 where cardid = :cardid;", {'cardid':cardid})
db.commit()
def movecardhand(bot, cardid, db):
#chat_id = db.execute("select playerid from country where id = :country;", {'country':country}).fetchall()
card_name = db.execute("select name from card where cardid = :cardid;", {'cardid':cardid}).fetchall()
text = "<b>" + card_name[0][0] + "</b> move to your hand"
#bot.send_message(chat_id = chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
db.execute("update card set location = 'hand', sequence = (select max(sequence) from card where control = (select control from card where cardid = :cardid) and location = 'hand') + 1 where cardid = :cardid;", {'cardid':cardid})
db.commit()
#------------------------------------------Discard Hand------------------------------------------
def discardhand(bot, country, number, session):
db = sqlite3.connect(session.get_db_dir())
hand_list = db.execute("select cardid, name, type, text from card where location = 'hand' and control =:country order by sequence;", {'country':country}).fetchall()
group_chat_id = db.execute("select chatid from game;").fetchall()
if len(hand_list) <= number:
if len(hand_list) != 0:
db.execute("update card set location = 'discardd' where location = 'hand' and control =:country;", {'country':country})
db.commit()
text = "<b>" + countryid2name[country] + "</b> finished his hand"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
if len(hand_list) < number:
short = number - len(hand_list)
discarddeck_no_hand(bot, country, short, db)
else:
text = "<b>" + countryid2name[country] + "</b> discarded " + str(number) + " card(s) from his hand"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
lock_id = session.add_lock()
chat_id = db.execute("select playerid from country where id = :country;", {'country':country}).fetchall()
keyboard = [[InlineKeyboardButton(hand[1], callback_data="['dh', '{}', {}, {}, {}]".format(country, hand[0], number, lock_id))]for hand in hand_list]
text = "Discard " + str(number) + " card(s):\n\n"
for card in hand_list:
text += "<b>" + card[1] + "</b> - " + card[2] + " - " + card[3] + "\n"
reply_markup = InlineKeyboardMarkup(keyboard)
bot.send_message(chat_id = chat_id[0][0], text = text, reply_markup = reply_markup, parse_mode=telegram.ParseMode.HTML)
session.thread_lock(lock_id)
def discardhand_cb(bot, query, query_list, session):
db = sqlite3.connect(session.get_db_dir())
if query_list[2] == 'confirm':
selected = db.execute("select name, sequence from card where location = 'selected' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
db.execute("update card set location = 'discardd' where location = 'selected' and control =:country;", {'country':query_list[1]})
text = 'Discarded:\n'
for card in selected:
text += str(card[0]) + '\n'
bot.edit_message_text(chat_id=query.message.chat_id, message_id=query.message.message_id, text=text, parse_mode=telegram.ParseMode.HTML)
db.commit()
session.release_lock(query_list[-1])
else:
if query_list[2] == 'back':
db.execute("update card set location = 'hand' where location = 'selected' and control =:country;", {'country':query_list[1]})
hand_list = db.execute("select name, cardid, type, text from card where location = 'hand' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
text = "Discard " + str(query_list[3]) + " card(s):\n\n"
for card in hand_list:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
keyboard = [[InlineKeyboardButton(hand[0], callback_data="['dh', '{}', {}, {}, {}]".format(query_list[1], hand[1], query_list[3], query_list[4]))]for hand in hand_list]
else:
db.execute("update card set location = 'selected' where cardid =:id and control =:country;", {'id': query_list[2], 'country':query_list[1]})
hand_list = db.execute("select name, cardid, type, text from card where location = 'hand' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
selected = db.execute("select name, cardid, type, text from card where location = 'selected' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
text = 'Hand:\n'
for card in hand_list:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
text += '\nDiscarded:\n'
for card in selected:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
if len(selected) < query_list[3]:
keyboard = [[InlineKeyboardButton(hand[0], callback_data="['dh', '{}', {}, {}, {}]".format(query_list[1], hand[1], query_list[3], query_list[4]))]for hand in hand_list]
else:
keyboard = [[InlineKeyboardButton('Confirm', callback_data="['dh', '{}', 'confirm', {}]".format(query_list[1], query_list[4]))],
[InlineKeyboardButton('Back', callback_data="['dh', '{}', 'back', {}, {}]".format(query_list[1], query_list[3], query_list[4]))]]
reply_markup = InlineKeyboardMarkup(keyboard)
bot.edit_message_text(chat_id=query.message.chat_id, message_id=query.message.message_id, text=text, reply_markup=reply_markup, parse_mode=telegram.ParseMode.HTML)
db.commit()
db.close()
def discardhand_no_deck(bot, country, number, session):
db = sqlite3.connect(session.get_db_dir())
hand_list = db.execute("select cardid, name, type, text from card where location = 'hand' and control =:country order by sequence;", {'country':country}).fetchall()
group_chat_id = db.execute("select chatid from game;").fetchall()
if len(hand_list) <= number:
if len(hand_list) != 0:
db.execute("update card set location = 'discardd' where location = 'hand' and control =:country;", {'country':country})
db.commit()
text = "<b>" + countryid2name[country] + "</b> finished his hand"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
if len(hand_list) < number:
short = number - len(hand_list)
deduct_vp(bot, country, short, db)
else:
text = "<b>" + countryid2name[country] + "</b> discarded " + str(number) + " card(s) from his hand"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
lock_id = session.add_lock()
chat_id = db.execute("select playerid from country where id = :country;", {'country':country}).fetchall()
keyboard = [[InlineKeyboardButton(hand[1], callback_data="['dh_nd', '{}', {}, {}, {}]".format(country, hand[0], number, lock_id))]for hand in hand_list]
text = "Discard " + str(number) + " card(s):\n\n"
for card in hand_list:
text += "<b>" + card[1] + "</b> - " + card[2] + " - " + card[3] + "\n"
reply_markup = InlineKeyboardMarkup(keyboard)
bot.send_message(chat_id = chat_id[0][0], text = text, reply_markup = reply_markup, parse_mode=telegram.ParseMode.HTML)
session.thread_lock(lock_id)
def discardhand_no_deck_cb(bot, query, query_list, session):
db = sqlite3.connect(session.get_db_dir())
if query_list[2] == 'confirm':
selected = db.execute("select name, sequence from card where location = 'selected' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
db.execute("update card set location = 'discardd' where location = 'selected' and control =:country;", {'country':query_list[1]})
text = 'Discarded:\n'
for card in selected:
text += str(card[0]) + '\n'
bot.edit_message_text(chat_id=query.message.chat_id, message_id=query.message.message_id, text=text, parse_mode=telegram.ParseMode.HTML)
db.commit()
session.release_lock(query_list[-1])
else:
if query_list[2] == 'back':
db.execute("update card set location = 'hand' where location = 'selected' and control =:country;", {'country':query_list[1]})
hand_list = db.execute("select name, cardid, type, text from card where location = 'hand' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
text = "Discard " + str(query_list[3]) + " card(s):\n\n"
for card in hand_list:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
keyboard = [[InlineKeyboardButton(hand[0], callback_data="['dh_nd', '{}', {}, {}, {}]".format(query_list[1], hand[1], query_list[3], query_list[4]))]for hand in hand_list]
else:
db.execute("update card set location = 'selected' where cardid =:id and control =:country;", {'id': query_list[2], 'country':query_list[1]})
hand_list = db.execute("select name, cardid, type, text from card where location = 'hand' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
selected = db.execute("select name, cardid, type, text from card where location = 'selected' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
text = 'Hand:\n'
for card in hand_list:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
text += '\nDiscarded:\n'
for card in selected:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
if len(selected) < query_list[3]:
keyboard = [[InlineKeyboardButton(hand[0], callback_data="['dh_nd', '{}', {}, {}, {}]".format(query_list[1], hand[1], query_list[3], query_list[4]))]for hand in hand_list]
else:
keyboard = [[InlineKeyboardButton('Confirm', callback_data="['dh_nd', '{}', 'confirm', {}]".format(query_list[1], query_list[4]))],
[InlineKeyboardButton('Back', callback_data="['dh_nd', '{}', 'back', {}, {}]".format(query_list[1], query_list[3], query_list[4]))]]
reply_markup = InlineKeyboardMarkup(keyboard)
bot.edit_message_text(chat_id=query.message.chat_id, message_id=query.message.message_id, text=text, reply_markup=reply_markup, parse_mode=telegram.ParseMode.HTML)
db.commit()
db.close()
#------------------------------------------Discard Response------------------------------------------
def discardresponse(bot, country, number, session):
db = sqlite3.connect(session.get_db_dir())
response_list = db.execute("select cardid, name, type, text from card where location = 'hand' and type = 'Response' and control =:country order by sequence;", {'country':country}).fetchall()
group_chat_id = db.execute("select chatid from game;").fetchall()
if len(response_list) != 0:
text = "<b>" + countryid2name[country] + "</b> discarded " + str(number) + " Response card(s) from his hand"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
lock_id = session.add_lock()
chat_id = db.execute("select playerid from country where id = :country;", {'country':country}).fetchall()
keyboard = [[InlineKeyboardButton(response[1], callback_data="['dr', '{}', {}, {}, {}]".format(country, response[0], number, lock_id))]for response in response_list]
text = "Discard " + str(number) + " Response card(s):\n\n"
for card in response_list:
text += "<b>" + card[1] + "</b> - " + card[2] + " - " + card[3] + "\n"
reply_markup = InlineKeyboardMarkup(keyboard)
bot.send_message(chat_id = chat_id[0][0], text = text, reply_markup = reply_markup, parse_mode=telegram.ParseMode.HTML)
session.thread_lock(lock_id)
def discardresponse_cb(bot, query, query_list, session):
db = sqlite3.connect(session.get_db_dir())
if query_list[2] == 'confirm':
selected = db.execute("select name, sequence from card where location = 'selected' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
db.execute("update card set location = 'discardd' where location = 'selected' and control =:country;", {'country':query_list[1]})
text = 'Discarded:\n'
for card in selected:
text += str(card[0]) + '\n'
bot.edit_message_text(chat_id=query.message.chat_id, message_id=query.message.message_id, text=text, parse_mode=telegram.ParseMode.HTML)
db.commit()
session.release_lock(query_list[-1])
else:
if query_list[2] == 'back':
db.execute("update card set location = 'hand' where location = 'selected' and control =:country;", {'country':query_list[1]})
response_list = db.execute("select name, cardid, type, text from card where location = 'hand' and type = 'Response' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
text = "Discard " + query_list[3] + " Response card(s):\n\n"
for card in response_list:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
keyboard = [[InlineKeyboardButton(response[0], callback_data="['dr', '{}', {}, {}, {}]".format(query_list[1], response[1], query_list[3], query_list[4]))]for response in response_list]
else:
db.execute("update card set location = 'selected' where cardid =:id and control =:country;", {'id': query_list[2], 'country':query_list[1]})
response_list = db.execute("select name, cardid, type, text from card where location = 'hand' and type = 'Response' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
selected = db.execute("select name, cardid, type, text from card where location = 'selected' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
text = 'Hand:\n'
for card in response_list:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
text += '\nDiscarded:\n'
for card in selected:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
if len(selected) < query_list[3]:
keyboard = [[InlineKeyboardButton(response[0], callback_data="['dr', '{}', {}, {}, {}]".format(query_list[1], response[1], query_list[3], query_list[4]))]for response in response_list]
else:
keyboard = [[InlineKeyboardButton('Confirm', callback_data="['dr', '{}', 'confirm', {}]".format(query_list[1], query_list[4]))],
[InlineKeyboardButton('Back', callback_data="['dr', '{}', 'back', {}, {}]".format(query_list[1], query_list[3], query_list[4]))]]
reply_markup = InlineKeyboardMarkup(keyboard)
bot.edit_message_text(chat_id=query.message.chat_id, message_id=query.message.message_id, text=text, reply_markup=reply_markup, parse_mode=telegram.ParseMode.HTML)
db.commit()
db.close()
#------------------------------------------Discard EW------------------------------------------
def discardew(bot, country, number, session):
db = sqlite3.connect(session.get_db_dir())
response_list = db.execute("select cardid, name, type, text from card where location = 'hand' and type = 'Economic Warfare' and control =:country order by sequence;", {'country':country}).fetchall()
group_chat_id = db.execute("select chatid from game;").fetchall()
if len(response_list) != 0:
text = "<b>" + countryid2name[country] + "</b> discarded " + str(number) + " Economic Warfare card(s) from his hand"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
lock_id = session.add_lock()
chat_id = db.execute("select playerid from country where id = :country;", {'country':country}).fetchall()
keyboard = [[InlineKeyboardButton(response[1], callback_data="['dec', '{}', {}, {}, {}]".format(country, response[0], number, lock_id))]for response in response_list]
text = "Discard " + str(number) + " Economic Warfare card(s):\n\n"
for card in response_list:
text += "<b>" + card[1] + "</b> - " + card[2] + " - " + card[3] + "\n"
reply_markup = InlineKeyboardMarkup(keyboard)
bot.send_message(chat_id = chat_id[0][0], text = text, reply_markup = reply_markup, parse_mode=telegram.ParseMode.HTML)
session.thread_lock(lock_id)
def discardew_cb(bot, query, query_list, session):
db = sqlite3.connect(session.get_db_dir())
if query_list[2] == 'confirm':
selected = db.execute("select name, sequence from card where location = 'selected' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
db.execute("update card set location = 'discardd' where location = 'selected' and control =:country;", {'country':query_list[1]})
text = 'Discarded:\n'
for card in selected:
text += str(card[0]) + '\n'
bot.edit_message_text(chat_id=query.message.chat_id, message_id=query.message.message_id, text=text, parse_mode=telegram.ParseMode.HTML)
db.commit()
session.release_lock(query_list[-1])
else:
if query_list[2] == 'back':
db.execute("update card set location = 'hand' where location = 'selected' and control =:country;", {'country':query_list[1]})
response_list = db.execute("select name, cardid, type, text from card where location = 'hand' and type = 'Economic Warfare' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
text = "Discard " + query_list[3] + " Economic Warfare card(s):\n\n"
for card in response_list:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
keyboard = [[InlineKeyboardButton(response[0], callback_data="['dec', '{}', {}, {}, {}]".format(query_list[1], response[1], query_list[3], query_list[4]))]for response in response_list]
else:
db.execute("update card set location = 'selected' where cardid =:id and control =:country;", {'id': query_list[2], 'country':query_list[1]})
response_list = db.execute("select name, cardid, type, text from card where location = 'hand' and type = 'Economic Warfare' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
selected = db.execute("select name, cardid, type, text from card where location = 'selected' and control =:country order by sequence;", {'country':query_list[1]}).fetchall()
text = 'Hand:\n'
for card in response_list:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
text += '\nDiscarded:\n'
for card in selected:
text += "<b>" + card[0] + "</b> - " + card[2] + " - " + card[3] + "\n"
if len(selected) < query_list[3]:
keyboard = [[InlineKeyboardButton(response[0], callback_data="['dec', '{}', {}, {}, {}]".format(query_list[1], response[1], query_list[3], query_list[4]))]for response in response_list]
else:
keyboard = [[InlineKeyboardButton('Confirm', callback_data="['dec', '{}', 'confirm', {}]".format(query_list[1], query_list[4]))],
[InlineKeyboardButton('Back', callback_data="['dec', '{}', 'back', {}, {}]".format(query_list[1], query_list[3], query_list[4]))]]
reply_markup = InlineKeyboardMarkup(keyboard)
bot.edit_message_text(chat_id=query.message.chat_id, message_id=query.message.message_id, text=text, reply_markup=reply_markup, parse_mode=telegram.ParseMode.HTML)
db.commit()
db.close()
#------------------------------------------Discard------------------------------------------
def ewdiscard(bot, cardid, active_country, passive_country, number, session): #ew discard that call respone
db = sqlite3.connect(session.get_db_dir())
group_chat_id = db.execute("select chatid from game;").fetchall()
lock_id = session.add_lock()
status_handler.send_status_card(bot, active_country, 'Economic Warfare', lock_id, session, passive_country_id = passive_country, card_id = cardid)
extra_number = status_handler.status_ew_handler(bot, cardid, active_country, passive_country, session)
number += extra_number
if number > 0:
import cardfunction
card_name = db.execute("select name from card where cardid = :cardid;", {'cardid':cardid}).fetchall()
if cardfunction.c171_used or cardfunction.c179_used:
cardfunction.c171_used = False
cardfunction.c179_used = False
text = "<b>" + card_name[0][0] + "</b> ignored"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
else:
text = "<b>" + countryid2name[passive_country] + "</b> is attacked by " + card_name[0][0]
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
discarddeck(bot, passive_country, number, session)
def discarddeck(bot, country, number, session):
db = sqlite3.connect(session.get_db_dir())
cardcount = db.execute("select count(*) from card where location = 'deck' and control =:country;", {'country':country}).fetchall()
group_chat_id = db.execute("select chatid from game;").fetchall()
text = "<b>" + countryid2name[country] + "</b> discarded " + str(number) + " card(s) from his deck"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
if number > cardcount[0][0]:
db.execute("update card set location = 'discardd' where location = 'deck' and control =:country;", {'number':number, 'country':country})
db.commit()
text = "<b>" + countryid2name[country] + "</b> finished his deck"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
short = number - cardcount[0][0]
discardhand_no_deck(bot, country, short, session)
else:
db.execute("update card set location = 'discardd' where location = 'deck' and sequence <= :number and control =:country;", {'number':number, 'country':country})
for j in range (1, cardcount[0][0] - number + 1):
db.execute("update card set sequence =:new where sequence =:old and location = 'deck' and control =:country;", {'new': j, 'old': j + number, 'country':country})
db.commit()
def discarddeck_no_hand(bot, country, number, db):
cardcount = db.execute("select count(*) from card where location = 'deck' and control =:country;", {'country':country}).fetchall()
group_chat_id = db.execute("select chatid from game;").fetchall()
text = "<b>" + countryid2name[country] + "</b> discarded " + str(number) + " card(s) from his deck"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
if number > cardcount[0][0]:
db.execute("update card set location = 'discardd' where location = 'deck' and control =:country;", {'number':number, 'country':country})
db.commit()
text = "<b>" + countryid2name[country] + "</b> finished his deck"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
short = number - cardcount[0][0]
deduct_vp(bot, country, short, db)
else:
db.execute("update card set location = 'discardd' where location = 'deck' and sequence <= :number and control =:country;", {'number':number, 'country':country})
for j in range (1, cardcount[0][0] - number + 1):
db.execute("update card set sequence =:new where sequence =:old and location = 'deck' and control =:country;", {'new': j, 'old': j + number, 'country':country})
db.commit()
def discardcard(bot, cardid, db):
group_chat_id = db.execute("select chatid from game;").fetchall()
card_name = db.execute("select name from card where cardid = :cardid;", {'cardid':cardid}).fetchall()
text = "<b>" + card_name[0][0] + "</b> discarded"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
db.execute("update card set location = 'discardu' where cardid = :cardid;", {'cardid':cardid})
db.commit()
def facedowndiscardcard(bot, cardid, db):
group_chat_id = db.execute("select chatid from game;").fetchall()
card_control = db.execute("select control, type from card where cardid = :cardid;", {'cardid':cardid}).fetchall()
text = "<b>" + countryid2name[card_control[0][0]] + "</b> " + card_control[0][1] + " discarded"
card_name = db.execute("select name from card where cardid = :cardid;", {'cardid':cardid}).fetchall()
db.execute("update card set location = 'discardd' where cardid = :cardid;", {'cardid':cardid})
db.commit()
#------------------------------------------Add/Deduct vp------------------------------------------
def add_vp(bot, country, vp, db):
group_chat_id = db.execute("select chatid from game;").fetchall()
text = "<b>" + countryid2name[country] + "</b> gain " + str(vp) + " point"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
db.execute("update country set point = point + :vp where id = :country;", {'vp':vp, 'country':country})
db.commit()
def deduct_vp(bot, country, vp, db):
group_chat_id = db.execute("select chatid from game;").fetchall()
text = "<b>" + countryid2name[country] + "</b> lose " + str(vp) + " point"
bot.send_message(chat_id = group_chat_id[0][0], text = text, parse_mode=telegram.ParseMode.HTML)
db.execute("update country set point = point - :vp where id = :country;", {'vp':vp, 'country':country})
db.commit()
#------------------------------------------Can Build------------------------------------------
def can_build(country, space, db):
space_list = within(getside[country], control_supplied_space_list(country, db), 1, db)
xspace = db.execute("select distinct location from piece where control in (select id from country where id = :country or side = (select enemy from country where id = :country)) and location != 'none' and type != 'air';", {'country':country}).fetchall()
xspace_list = [eval(s[0]) for s in xspace]
return (space not in xspace_list and space in space_list)
def build_list(country, db, space_type = 'all'):
axis_list = ['ge', 'jp', 'it']
allies_list = ['uk', 'su', 'us']
space_list = within(getside[country], control_supplied_space_list(country, db), 1, db)
home_country = db.execute("select home from country where id = :country;", {'country':country}).fetchall()
space_list.append(home_country[0][0])
extra_list = status_handler.status_build_location(country, db)
if extra_list != None:
space_list += extra_list
xspace = db.execute("select distinct location from piece where control in (select id from country where id = :country or side = (select enemy from country where id = :country)) and location != 'none' and type != 'air';", {'country':country}).fetchall()
xspace_list = [eval(s[0]) for s in xspace]
space_list = list(set(space_list) - set(xspace_list))
space_list2 =[]
questionmarks = '?' * len(space_list)
if space_type != 'sea':
space = db.execute("select distinct spaceid from space where type = 'land' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list2 += [s[0] for s in space]
if space_type != 'land':
space = db.execute("select distinct spaceid from space where type = 'sea' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
if country in axis_list:
supply_space = db.execute("select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.control in (select id from country where side = 'Axis') and piece.type = 'army' and (space.straits in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or space.straits = 'none');").fetchall()
else:
supply_space = db.execute("select distinct space.adjacency from piece inner join space on piece.location = space.spaceid where piece.control in (select id from country where side = 'Allied') and piece.type = 'army' and (space.straits not in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or space.straits = 'none');").fetchall()
supply_space_list = [s[0] for s in supply_space]
space_list2 += list(set(space_list) & set(supply_space_list))
return space_list2
#------------------------------------------Can Recuit------------------------------------------
def can_recuit(country, space, db):
xspace = db.execute("select distinct location from piece where control in (select id from country where id = :country or side = (select enemy from country where id = :country)) and location != 'none' and type != 'air';", {'country':country}).fetchall()
xspace_list = [eval(s[0]) for s in xspace]
return space not in xspace_list
def recuit_list(country, db, space_type = 'all'):
space = db.execute("select distinct spaceid from space;").fetchall()
space_list = [s[0] for s in space]
xspace = db.execute("select distinct location from piece where control in (select id from country where id = :country or side = (select enemy from country where id = :country)) and location != 'none' and type != 'air';", {'country':country}).fetchall()
xspace_list = [eval(s[0]) for s in xspace]
space_list = list(set(space_list) - set(xspace_list))
questionmarks = '?' * len(space_list)
if space_type == 'land':
space = db.execute("select distinct spaceid from space where type = 'land' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
if space_type == 'sea':
space = db.execute("select distinct spaceid from space where type = 'sea' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
return space_list
#------------------------------------------Can Battle------------------------------------------
def can_battle(country, space, db):
space_list = within(getside[country], control_supplied_space_list(country, db), 1, db)
xspace = db.execute("select distinct location from piece where control in (select id from country where side = (select side from country where id = :country)) and location != 'none' and type != 'air';", {'country':country}).fetchall()
xspace_list = [eval(s[0]) for s in xspace]
return (space not in xspace_list and space in space_list)
def battle_list(country, db, space_type = 'all'):
space_list = within(getside[country], control_supplied_space_list(country, db), 1, db)
extra_list = status_handler.status_battle_location(country, db)
if extra_list != None:
space_list += extra_list
xspace = db.execute("select distinct location from piece where control in (select id from country where side = (select side from country where id = :country)) and location != 'none' and type != 'air';", {'country':country}).fetchall()
xspace_list = [eval(s[0]) for s in xspace]
space_list = list(set(space_list) - set(xspace_list))
questionmarks = '?' * len(space_list)
if space_type == 'land':
space = db.execute("select distinct spaceid from space where type = 'land' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
if space_type == 'sea':
space = db.execute("select distinct spaceid from space where type = 'sea' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
return space_list
#------------------------------------------Can Remove------------------------------------------
def can_remove(country, space, db):
xspace = db.execute("select distinct location from piece where control in (select id from country where side = (select side from country where id = :country)) and location != 'none' and type != 'air';", {'country':country}).fetchall()
xspace_list = [eval(s[0]) for s in xspace]
return space not in xspace_list
def remove_list(country, db, space_type = 'all'):
space = db.execute("select distinct location from piece where control in (select id from country where side = (select enemy from country where id = :country)) and location != 'none' and type != 'air';", {'country':country}).fetchall()
space_list = [eval(s[0]) for s in space]
questionmarks = '?' * len(space_list)
if space_type == 'land':
space = db.execute("select distinct spaceid from space where type = 'land' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
if space_type == 'sea':
space = db.execute("select distinct spaceid from space where type = 'sea' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
return space_list
#------------------------------------------Can Deploy------------------------------------------
def deploy_list(country, db, space_type = 'all'):
space_list = control_supplied_space_list(country, db, space_type = space_type)
xspace_list = control_air_space_list(country, db, space_type = space_type)
space_list = list(set(space_list) - set(xspace_list))
return space_list
#------------------------------------------Is in------------------------------------------
def isin_list(country, space_list, db):
#space_list = array
questionmarks = '?' * len(space_list)
count = db.execute("select count(*) from piece where control = ? and location in ({});".format(','.join(questionmarks)), (country, space_list)).fetchall()
#if count[0][0] == 0:
# return False
#else:
# return True
return count[0][0] != 0
def isin(country, space, db):
count = db.execute("select count(*) from piece where control = :country and location = :location", {'country':country, 'location':space}).fetchall()
#if count[0][0] == 0:
# return False
#else:
# return True
return count[0][0] != 0
#------------------------------------------Within------------------------------------------
def within(side, space_list, number, db):
#space_list = array
for i in range(number):
questionmarks = '?' * len(space_list)
if side == 'Allied':
adjacency = db.execute("select spaceid from space where adjacency in ({}) and (straits not in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or straits = 'none') and (status not in (select cardid from card where location = 'used') or status = 'none');".format(','.join(questionmarks)), (space_list)).fetchall()
else:
adjacency = db.execute("select spaceid from space where adjacency in ({}) and (straits in (select location from piece where control in (select id from country where side = 'Axis') and location != 'none') or straits = 'none') and (status in (select cardid from card where location = 'used') or status in ('none', '165'));".format(','.join(questionmarks)), (space_list)).fetchall()
for j in adjacency:
space_list.append(j[0])
space_list = list(set(space_list))
return space_list
#------------------------------------------Control list------------------------------------------
def control_space_list(country, db, space_type = 'all'):
if space_type == 'land':
control = db.execute("select distinct location from piece where location != 'none' and type = 'army' and control = :country;", {'country':country}).fetchall()
elif space_type == 'sea':
control = db.execute("select distinct location from piece where location != 'none' and type = 'navy' and control = :country;", {'country':country}).fetchall()
else:
control = db.execute("select distinct location from piece where location != 'none' and control = :country;", {'country':country}).fetchall()
control_list = [eval(space[0]) for space in control]
return control_list
def control_side_space_list(side, db, space_type = 'all'):
if space_type == 'land':
control = db.execute("select distinct location from piece where location != 'none' and type = 'army' and control in (select id from country where side = :side);", {'side':side}).fetchall()
elif space_type == 'sea':
control = db.execute("select distinct location from piece where location != 'none' and type = 'navy' and control in (select id from country where side = :side);", {'side':side}).fetchall()
else:
control = db.execute("select distinct location from piece where location != 'none' and in (select id from country where side = :side);", {'side':side}).fetchall()
control_list = [eval(space[0]) for space in control]
return control_list
def control_air_space_list(country, db, space_type = 'all'):
control = db.execute("select distinct location from piece where location != 'none' and type = 'air' and control = :country;", {'country':country}).fetchall()
control_list = [eval(space[0]) for space in control]
questionmarks = '?' * len(control_list)
if space_type == 'land':
space = db.execute("select distinct spaceid from space where type = 'land' and spaceid in ({});".format(','.join(questionmarks)), (control_list)).fetchall()
space_list = [s[0] for s in space]
elif space_type == 'sea':
space = db.execute("select distinct spaceid from space where type = 'sea' and spaceid in ({});".format(','.join(questionmarks)), (control_list)).fetchall()
space_list = [s[0] for s in space]
else:
space_list = control_list
return space_list
def control_side_air_space_list(side, db, space_type = 'all'):
control = db.execute("select distinct location from piece where control in (select id from country where side = :side) and type = 'air' and location != 'none';", {'side':side}).fetchall()
control_list = [eval(space[0]) for space in control]
questionmarks = '?' * len(control_list)
if space_type == 'land':
space = db.execute("select distinct spaceid from space where type = 'land' and spaceid in ({});".format(','.join(questionmarks)), (control_list)).fetchall()
space_list = [s[0] for s in space]
elif space_type == 'sea':
space = db.execute("select distinct spaceid from space where type = 'sea' and spaceid in ({});".format(','.join(questionmarks)), (control_list)).fetchall()
space_list = [s[0] for s in space]
else:
space_list = control_list
return space_list
def control_supplied_space_list(country, db, space_type = 'all'):
updatesupply(db)
if space_type == 'land':
control = db.execute("select location from piece where supply = 1 and location != 'none' and type = 'army' and control = :country;", {'country':country}).fetchall()
elif space_type == 'sea':
control = db.execute("select location from piece where supply = 1 and location != 'none' and type = 'navy' and control = :country;", {'country':country}).fetchall()
else:
control = db.execute("select location from piece where supply = 1 and location != 'none' and type != 'air' and control = :country;", {'country':country}).fetchall()
control_list = [eval(space[0]) for space in control]
print('---control_supplied_space_list---')
print(control_list)
return control_list
def control_vp_space_list(country, db):
db.execute("update piece set location = 'none' where pieceid = 0;")
db.commit()
vp_space = db.execute("select distinct spaceid from space where supply = 1;").fetchall()
vp_space_list = [space[0] for space in vp_space]
vp_space_list = status_handler.status_vp_location(country, vp_space_list, db)
shared = shared_vp_space_list(country, vp_space_list, db)
if shared != None:
vp_space_list = list(set(vp_space_list) - set(shared))
control = db.execute("select location from piece where location != 'none' and control = :country;", {'country':country}).fetchall()
control_list = [eval(space[0]) for space in control]
control_vp_list = list(set(control_list) & set(vp_space_list))
if shared != None:
control_shared_vp_list = list(set(control_list) & set(shared))
return control_vp_list, control_shared_vp_list
else:
return control_vp_list, 'none'
def shared_vp_space_list(country, vp_space_list, db):
questionmarks = '?' * len(vp_space_list)
shared_vp_space = db.execute("select location from (select location , count(location) as c from piece where location in ({}) and type != 'air' group by location) where c > 1;".format(','.join(questionmarks)), (vp_space_list)).fetchall()
shared_vp_space_list = [eval(space[0]) for space in shared_vp_space]
return shared_vp_space_list
#------------------------------------------Filter list------------------------------------------
def filter_space_list(space_list, db, control = 'all', space_type = 'all'):
questionmarks = '?' * len(space_list)
if space_type == 'land':
space = db.execute("select spaceid from space where type = 'land' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
if space_type == 'sea':
space = db.execute("select spaceid from space where type = 'sea' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
questionmarks = '?' * len(space_list)
if control == 'Axis':
space = db.execute("select spaceid from space where spaceid in (select location from piece where control in ('ge', 'jp', 'it') and location != 'none') and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
if control == 'Allied':
space = db.execute("select spaceid from space where spaceid in (select location from piece where control in ('uk', 'su', 'us') and location != 'none') and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
if control == 'neutral':
space = db.execute("select spaceid from space where spaceid not in (select location from piece where control in ('ge', 'jp', 'it', 'uk', 'su', 'us') and location != 'none') and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
space_list = list(set(space_list))
return space_list
def filter_build_list(space_list, country, db, space_type = 'all'):
questionmarks = '?' * len(space_list)
if space_type == 'land':
space = db.execute("select distinct spaceid from space where type = 'land' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
if space_type == 'sea':
space = db.execute("select distinct spaceid from space where type = 'sea' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
xspace = db.execute("select distinct location from piece where control in (select id from country where id = :country or side = (select enemy from country where id = :country)) and location != 'none';", {'country':country}).fetchall()
xspace_list = [eval(s[0]) for s in xspace]
space_list = [space for space in space_list if space not in xspace_list]
space_list2 = within(getside[country], control_supplied_space_list(country, db), 1, db)
space_list = list(set(space_list) & set(space_list2))
return space_list
def filter_recuit_list(space_list, country, db, space_type = 'all'):
questionmarks = '?' * len(space_list)
if space_type == 'land':
space = db.execute("select distinct spaceid from space where type = 'land' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
if space_type == 'sea':
space = db.execute("select distinct spaceid from space where type = 'sea' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
xspace = db.execute("select distinct location from piece where control in (select id from country where id = :country or side = (select enemy from country where id = :country)) and location != 'none';", {'country':country}).fetchall()
xspace_list = [eval(s[0]) for s in xspace]
space_list = [space for space in space_list if space not in xspace_list]
space_list = list(set(space_list))
return space_list
def filter_battle_list(space_list, country, db, space_type = 'all'):
questionmarks = '?' * len(space_list)
if space_type == 'land':
space = db.execute("select distinct spaceid from space where type = 'land' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
if space_type == 'sea':
space = db.execute("select distinct spaceid from space where type = 'sea' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
#xspace = db.execute("select distinct spaceid from space where control in (select side from country where id = :country);", {'country':country}).fetchall()
xspace = db.execute("select distinct location from piece where control in (select id from country where side = (select side from country where id = :country)) and location != 'none';", {'country':country}).fetchall()
xspace_list = [eval(s[0]) for s in xspace]
space_list = [space for space in space_list if space not in xspace_list]
space_list2 = within(getside[country], control_supplied_space_list(country, db), 1, db)
space_list = list(set(space_list) & set(space_list2))
return space_list
def filter_remove_list(space_list, country, db, space_type = 'all'):
questionmarks = '?' * len(space_list)
if space_type == 'land':
space = db.execute("select distinct spaceid from space where type = 'land' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
if space_type == 'sea':
space = db.execute("select distinct spaceid from space where type = 'sea' and spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
space_list = [s[0] for s in space]
#xspace = db.execute("select distinct spaceid from space where control in (select side from country where id = :country);", {'country':country}).fetchall()
xspace = db.execute("select distinct location from piece where control in (select id from country where side = (select side from country where id = :country)) and location != 'none';", {'country':country}).fetchall()
xspace_list = [eval(s[0]) for s in xspace]
space_list = [space for space in space_list if space not in xspace_list]
space_list = list(set(space_list))
return space_list
#------------------------------------------Get name list------------------------------------------
def get_name_list(space_list, db):
questionmarks = '?' * len(space_list)
name_list = db.execute("select distinct spaceid, name from space where spaceid in ({});".format(','.join(questionmarks)), (space_list)).fetchall()
return name_list
#------------------------------------------Side VP------------------------------------------
def side_pt(db):
db.execute("update game set axispt = (select sum(point) from country where id in ('ge', 'it', 'jp'));")
db.execute("update game set alliespt = (select sum(point) from country where id in ('uk', 'su', 'us', 'fr', 'ch'));")
axispt = db.execute("select axispt from game;").fetchall()
alliespt = db.execute("select alliespt from game;").fetchall()
return axispt[0][0], alliespt[0][0]
def update_allies_pt(db):
db.execute("update game set alliespt = (select sum(point) from country where id in ('uk', 'su', 'us', 'fr', 'ch'));")
alliespt = db.execute("select alliespt from game;")
return alliespt[0][0]
#------------------------------------------Supplied list------------------------------------------
def supplied_space_list(country, db, space_type = 'all'):
supplied_space_list = list(set(control_supplied_space_list(country, db, space_type = space_type)).union(set(build_list(country, db, space_type = space_type))))
print('---supplied_space_list---')
print(supplied_space_list)
return supplied_space_list
| 81.113415
| 1,022
| 0.640912
| 8,869
| 66,513
| 4.696809
| 0.028301
| 0.040402
| 0.046452
| 0.029047
| 0.914898
| 0.903807
| 0.890628
| 0.874976
| 0.85798
| 0.838247
| 0
| 0.009421
| 0.186114
| 66,513
| 819
| 1,023
| 81.212454
| 0.760077
| 0.077263
| 0
| 0.66573
| 0
| 0.095506
| 0.403173
| 0.000946
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073034
| false
| 0.007022
| 0.011236
| 0
| 0.125
| 0.008427
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ea9339a5c9c1784deeff386ac9a491c89a379403
| 11,724
|
py
|
Python
|
test_channel.py
|
navin-a/lankan-vibe-kodi-video-addon
|
81424c995fd66f6dc1bebe33f6f51919339984f2
|
[
"Apache-2.0"
] | null | null | null |
test_channel.py
|
navin-a/lankan-vibe-kodi-video-addon
|
81424c995fd66f6dc1bebe33f6f51919339984f2
|
[
"Apache-2.0"
] | null | null | null |
test_channel.py
|
navin-a/lankan-vibe-kodi-video-addon
|
81424c995fd66f6dc1bebe33f6f51919339984f2
|
[
"Apache-2.0"
] | null | null | null |
__author__ = 'Navin'
import unittest
from channel import ITN
from channel import Rupavahini
from channel import Derana
from channel import Swarnavahini
class TestITN(unittest.TestCase):
def setUp(self):
self.channel = ITN()
def testGetSource(self):
source = self.channel.getSource("/")
self.assertIsNotNone(source)
self.assertGreater(source, 0, "ITN Home page source not available")
self.assertTrue(len(self.channel.getSource("/")) > 10000, "ITN home page source length is too short")
def testGetCategories(self):
self.assertEquals(self.channel.getCategories(), ('Drama', 'Entertainment'))
def testGetProgrammesForDramaCategory(self):
programmes = self.channel.getProgrammes('Drama')
self.assertIsNotNone(programmes)
noOfProgrammes = 0
for prg in programmes:
self.assertIsInstance(prg, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(prg) == 2, "Does not provide the expected details of the programme")
self.assertIsNotNone(prg[0])
self.assertIsNotNone(prg[1])
noOfProgrammes += 1
self.assertGreater(noOfProgrammes, 5, "Number of programmes found is too low. Could be an error")
self.assertLess(noOfProgrammes, 40, "Number of programmes found is too high. Could be an error")
def testGetEpisodesForDrama(self):
programmes = self.channel.getProgrammes("Drama")
programme = programmes.next()
episodes = self.channel.getEpisodes(programme[1])
noOfEpisodes = 0
for episode in episodes:
self.assertIsInstance(episode, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(episode) == 3, "Does not provide the expected details of the episode")
self.assertIsNotNone(episode[0])
self.assertIsNotNone(episode[1])
self.assertIsNotNone(episode[2])
noOfEpisodes += 1
self.assertGreater(noOfEpisodes, 0, "Episodes for programme " + programme[1] + " not found")
self.assertLess(noOfEpisodes, 14, "Too many episodes found. Could be an error")
def testGetProgrammesForEntertainmentCategory(self):
programmes = self.channel.getProgrammes('Entertainment')
self.assertIsNotNone(programmes)
noOfProgrammes = 0
for prg in programmes:
self.assertIsInstance(prg, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(prg) == 2, "Does not provide the expected details of the programme")
self.assertIsNotNone(prg[0])
self.assertIsNotNone(prg[1])
noOfProgrammes += 1
self.assertGreater(noOfProgrammes, 5, "Number of programmes found is too low. Could be an error")
self.assertLess(noOfProgrammes, 40, "Number of programmes found is too high. Could be an error")
class TestRupavahini(unittest.TestCase):
def setUp(self):
self.channel = Rupavahini()
def testGetSource(self):
source = self.channel.getSource("/")
self.assertIsNotNone(source)
self.assertGreater(source, 0, "Rupavahini Home page source not available")
self.assertTrue(len(source) > 10000, "Rupavahini home page source length is too short")
def testGetCategories(self):
self.assertEquals(self.channel.getCategories(), ('Drama', 'News'))
def testGetProgrammesForDramaCategory(self):
programmes = self.channel.getProgrammes('Drama')
self.assertIsNotNone(programmes)
noOfProgrammes = 0
for prg in programmes:
self.assertIsInstance(prg, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(prg) == 2, "Does not provide the expected details of the programme")
self.assertIsNotNone(prg[0])
self.assertIsNotNone(prg[1])
noOfProgrammes += 1
self.assertGreater(noOfProgrammes, 5, "Number of programmes found is too low. Could be an error")
self.assertLess(noOfProgrammes, 40, "Number of programmes found is too high. Could be an error")
def testGetEpisodesForDrama(self):
programmes = self.channel.getProgrammes('Drama')
programme = programmes.next()
episodes = self.channel.getEpisodes(programme[1])
noOfEpisodes = 0
for episode in episodes:
self.assertIsInstance(episode, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(episode) == 3, "Does not provide the expected details of the episode")
self.assertIsNotNone(episode[0])
self.assertIsNotNone(episode[1])
self.assertIsNotNone(episode[2])
noOfEpisodes += 1
self.assertGreater(noOfEpisodes, 0, "Episodes for programme " + programme[1] + " not found")
self.assertLess(noOfEpisodes, 21, "Too many episodes found. Could be an error")
def testGetProgrammesForNewsCategory(self):
programmes = self.channel.getProgrammes('News')
self.assertIsNotNone(programmes)
noOfProgrammes = 0
for prg in programmes:
self.assertIsInstance(prg, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(prg) == 2, "Does not provide the expected details of the programme")
self.assertIsNotNone(prg[0])
self.assertIsNotNone(prg[1])
noOfProgrammes += 1
self.assertGreater(noOfProgrammes, 2, "Number of programmes found is too low. Could be an error")
self.assertLess(noOfProgrammes, 40, "Number of programmes found is too high. Could be an error")
class TestDerana(unittest.TestCase):
def setUp(self):
self.channel = Derana()
def testGetSource(self):
source = self.channel.getSource("/")
self.assertIsNotNone(source)
self.assertGreater(source, 0, "Derana Home page source not available")
self.assertTrue(len(source) > 10000, "Derana home page source length is too short")
def testGetCategories(self):
self.assertEquals(self.channel.getCategories(), ('Music','Magazine & Variety', 'Talk Shows', 'Reality Shows'))
def testGetProgrammesForMusicCategory(self):
programmes = self.channel.getProgrammes('Music')
self.assertIsNotNone(programmes)
noOfProgrammes = 0
for prg in programmes:
self.assertIsInstance(prg, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(prg) == 2, "Does not provide the expected details of the programme")
self.assertIsNotNone(prg[0])
self.assertIsNotNone(prg[1])
noOfProgrammes += 1
self.assertGreater(noOfProgrammes, 0, "Number of programmes found is too low. Could be an error")
self.assertLess(noOfProgrammes, 40, "Number of programmes found is too high. Could be an error")
def testGetEpisodesForMusic(self):
programmes = self.channel.getProgrammes('Music')
programme = programmes.next()
episodes = self.channel.getEpisodes(programme[1])
noOfEpisodes = 0
for episode in episodes:
self.assertIsInstance(episode, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(episode) == 3, "Does not provide the expected details of the episode")
self.assertIsNotNone(episode[0])
self.assertIsNotNone(episode[1])
self.assertIsNotNone(episode[2])
noOfEpisodes += 1
self.assertGreater(noOfEpisodes, 0, "Episodes for programme " + programme[1] + " not found")
self.assertLess(noOfEpisodes, 21, "Too many episodes found. Could be an error")
def testVideo(self):
self.channel.getVideo('/Dell-Studio-Dell-Studio-Studiyo-Songs&vid=13532&page=1')
def testGetProgrammesForTalkShowCategory(self):
programmes = self.channel.getProgrammes('Talk Shows')
self.assertIsNotNone(programmes)
noOfProgrammes = 0
for prg in programmes:
self.assertIsInstance(prg, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(prg) == 2, "Does not provide the expected details of the programme")
self.assertIsNotNone(prg[0])
self.assertIsNotNone(prg[1])
noOfProgrammes += 1
self.assertGreater(noOfProgrammes, 0, "Number of programmes found is too low. Could be an error")
self.assertLess(noOfProgrammes, 40, "Number of programmes found is too high. Could be an error")
def testGetEpisodesForTalkShows(self):
programmes = self.channel.getProgrammes('Talk Shows')
programme = programmes.next()
episodes = self.channel.getEpisodes(programme[1])
noOfEpisodes = 0
for episode in episodes:
self.assertIsInstance(episode, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(episode) == 3, "Does not provide the expected details of the episode")
self.assertIsNotNone(episode[0])
self.assertIsNotNone(episode[1])
self.assertIsNotNone(episode[2])
noOfEpisodes += 1
self.assertGreater(noOfEpisodes, 0, "Episodes for programme " + programme[1] + " not found")
self.assertLess(noOfEpisodes, 21, "Too many episodes found. Could be an error")
def testVideo(self):
self.channel.getVideo('/Dell-Studio-Dell-Studio-Studiyo-Songs&vid=13532&page=1')
class TestSwarnavahini(unittest.TestCase):
def setUp(self):
self.channel = Swarnavahini()
def testGetSource(self):
source = self.channel.getSource("/")
self.assertIsNotNone(source)
self.assertGreater(source, 0, "Swarnavahini Home page source not available")
self.assertTrue(len(source) > 10000, "Swarnavahini home page source length is too short")
def testGetCategories(self):
self.assertEquals(self.channel.getCategories(), ('News','Teledrama', 'Political', 'Entertainment'))
def testGetProgrammesForNewsCategory(self):
programmes = self.channel.getProgrammes('News')
self.assertIsNotNone(programmes)
noOfProgrammes = 0
for prg in programmes:
self.assertIsInstance(prg, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(prg) == 2, "Does not provide the expected details of the programme")
self.assertIsNotNone(prg[0])
self.assertIsNotNone(prg[1])
noOfProgrammes += 1
self.assertGreater(noOfProgrammes, 0, "Number of programmes found is too low. Could be an error")
self.assertLess(noOfProgrammes, 40, "Number of programmes found is too high. Could be an error")
def testGetEpisodesForNews(self):
programmes = self.channel.getProgrammes('News')
programme = programmes.next()
episodes = self.channel.getEpisodes(programme[1])
noOfEpisodes = 0
for episode in episodes:
self.assertIsInstance(episode, tuple, "Does not return the expected data structure (tuple)")
self.assertTrue(len(episode) == 3, "Does not provide the expected details of the episode")
self.assertIsNotNone(episode[0])
self.assertIsNotNone(episode[1])
self.assertIsNotNone(episode[2])
noOfEpisodes += 1
self.assertGreater(noOfEpisodes, 0, "Episodes for programme " + programme[1] + " not found")
self.assertLess(noOfEpisodes, 61, "Too many episodes found. Could be an error")
if __name__ == '__main__':
unittest.main()
| 49.260504
| 118
| 0.669737
| 1,291
| 11,724
| 6.072812
| 0.08598
| 0.096939
| 0.021811
| 0.033929
| 0.920918
| 0.916071
| 0.899745
| 0.867857
| 0.858036
| 0.853316
| 0
| 0.01635
| 0.233112
| 11,724
| 237
| 119
| 49.468354
| 0.855633
| 0
| 0
| 0.81068
| 0
| 0
| 0.261111
| 0.009383
| 0
| 0
| 0
| 0
| 0.485437
| 1
| 0.126214
| false
| 0
| 0.024272
| 0
| 0.169903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
57831c7fcb326518fa2fa962d72128c13fc894ec
| 24,729
|
py
|
Python
|
tests/test_so_instantiation.py
|
PhilippeCharlot22/python-onapsdk
|
5f1398d9c344a985e830bba36286f95c46e4807c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_so_instantiation.py
|
PhilippeCharlot22/python-onapsdk
|
5f1398d9c344a985e830bba36286f95c46e4807c
|
[
"Apache-2.0"
] | 10
|
2021-09-20T15:42:47.000Z
|
2021-09-23T12:49:51.000Z
|
tests/test_so_instantiation.py
|
PhilippeCharlot22/python-onapsdk
|
5f1398d9c344a985e830bba36286f95c46e4807c
|
[
"Apache-2.0"
] | 2
|
2021-09-20T13:53:12.000Z
|
2021-09-21T08:05:58.000Z
|
import json
from collections import namedtuple
from unittest import mock
import pytest
from onapsdk.exceptions import APIError, InvalidResponse, ResourceNotFound, StatusError
from onapsdk.sdnc import NetworkPreload, VfModulePreload
from onapsdk.so.instantiation import (
NetworkInstantiation,
ServiceInstantiation,
VfModuleInstantiation,
VnfInstantiation
)
from onapsdk.vid import Vid
@mock.patch.object(ServiceInstantiation, "send_message_json")
def test_service_ala_carte_instantiation(mock_service_instantiation_send_message):
mock_sdc_service = mock.MagicMock()
mock_sdc_service.distributed = False
with pytest.raises(StatusError):
ServiceInstantiation.\
instantiate_so_ala_carte(sdc_service=mock_sdc_service,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=mock.MagicMock(),
owning_entity=mock.MagicMock(),
project=mock.MagicMock(),
service_instance_name="test")
mock_sdc_service.distributed = True
service_instance = ServiceInstantiation.\
instantiate_so_ala_carte(sdc_service=mock_sdc_service,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=mock.MagicMock(),
owning_entity=mock.MagicMock(),
project=mock.MagicMock(),
service_instance_name="test")
assert service_instance.name == "test"
service_instance = ServiceInstantiation.\
instantiate_so_ala_carte(sdc_service=mock_sdc_service,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=mock.MagicMock(),
owning_entity=mock.MagicMock(),
project=mock.MagicMock())
assert service_instance.name.startswith("Python_ONAP_SDK_service_instance_")
mock_service_instantiation_send_message.assert_called()
method, _, url = mock_service_instantiation_send_message.call_args[0]
assert method == "POST"
assert url == (f"{ServiceInstantiation.base_url}/onap/so/infra/"
f"serviceInstantiation/{ServiceInstantiation.api_version}/serviceInstances")
@mock.patch.object(ServiceInstantiation, "send_message_json")
def test_service_macro_instantiation(mock_service_instantiation_send_message):
mock_sdc_service = mock.MagicMock()
mock_sdc_service.distributed = False
with pytest.raises(StatusError):
ServiceInstantiation.\
instantiate_macro(sdc_service=mock_sdc_service,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=mock.MagicMock(),
owning_entity=mock.MagicMock(),
project=mock.MagicMock(),
line_of_business=mock.MagicMock(),
platform=mock.MagicMock(),
service_instance_name="test")
mock_sdc_service.distributed = True
service_instance = ServiceInstantiation.\
instantiate_macro(sdc_service=mock_sdc_service,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=mock.MagicMock(),
owning_entity=mock.MagicMock(),
project=mock.MagicMock(),
line_of_business=mock.MagicMock(),
platform=mock.MagicMock(),
service_instance_name="test")
assert service_instance.name == "test"
service_instance = ServiceInstantiation.\
instantiate_macro(sdc_service=mock_sdc_service,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=mock.MagicMock(),
owning_entity=mock.MagicMock(),
line_of_business=mock.MagicMock(),
platform=mock.MagicMock(),
project=mock.MagicMock())
assert service_instance.name.startswith("Python_ONAP_SDK_service_instance_")
mock_service_instantiation_send_message.assert_called()
method, _, url = mock_service_instantiation_send_message.call_args[0]
assert method == "POST"
assert url == (f"{ServiceInstantiation.base_url}/onap/so/infra/"
f"serviceInstantiation/{ServiceInstantiation.api_version}/serviceInstances")
def test_service_instance_aai_service_instance():
customer_mock = mock.MagicMock()
service_instantiation = ServiceInstantiation(name="test",
request_id="test_request_id",
instance_id="test_instance_id",
sdc_service=mock.MagicMock(),
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=customer_mock,
owning_entity=mock.MagicMock(),
project=mock.MagicMock())
status_mock = mock.PropertyMock(return_value=ServiceInstantiation.StatusEnum.IN_PROGRESS)
type(service_instantiation).status = status_mock
with pytest.raises(StatusError):
service_instantiation.aai_service_instance
status_mock.return_value = return_value=ServiceInstantiation.StatusEnum.COMPLETED
assert service_instantiation.aai_service_instance is not None
customer_mock.get_service_subscription_by_service_type.side_effect = APIError
with pytest.raises(APIError) as err:
service_instantiation.aai_service_instance
assert err.type == APIError
@mock.patch.object(VnfInstantiation, "send_message_json")
def test_vnf_instantiation(mock_vnf_instantiation_send_message):
aai_service_instance_mock = mock.MagicMock()
aai_service_instance_mock.instance_id = "test_instance_id"
vnf_instantiation = VnfInstantiation.\
instantiate_ala_carte(aai_service_instance=aai_service_instance_mock,
vnf_object=mock.MagicMock(),
line_of_business_object=mock.MagicMock(),
platform_object=mock.MagicMock())
assert vnf_instantiation.name.startswith("Python_ONAP_SDK_vnf_instance_")
mock_vnf_instantiation_send_message.assert_called_once()
method, _, url = mock_vnf_instantiation_send_message.call_args[0]
assert method == "POST"
assert url == (f"{VnfInstantiation.base_url}/onap/so/infra/serviceInstantiation/"
f"{VnfInstantiation.api_version}/serviceInstances/"
f"{aai_service_instance_mock.instance_id}/vnfs")
vnf_instantiation = VnfInstantiation.\
instantiate_ala_carte(aai_service_instance=aai_service_instance_mock,
vnf_object=mock.MagicMock(),
line_of_business_object=mock.MagicMock(),
platform_object=mock.MagicMock(),
vnf_instance_name="test")
assert vnf_instantiation.name == "test"
@mock.patch.object(VnfInstantiation, "send_message_json")
def test_vnf_instantiation_with_cr_and_tenant(mock_vnf_instantiation_send_message):
aai_service_instance_mock = mock.MagicMock()
aai_service_instance_mock.instance_id = "test_instance_id"
vnf_instantiation = VnfInstantiation.\
instantiate_ala_carte(aai_service_instance=aai_service_instance_mock,
vnf_object=mock.MagicMock(),
line_of_business_object=mock.MagicMock(),
platform_object=mock.MagicMock(),
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock())
assert vnf_instantiation.name.startswith("Python_ONAP_SDK_vnf_instance_")
mock_vnf_instantiation_send_message.assert_called_once()
method, _, url = mock_vnf_instantiation_send_message.call_args[0]
assert method == "POST"
assert url == (f"{VnfInstantiation.base_url}/onap/so/infra/serviceInstantiation/"
f"{VnfInstantiation.api_version}/serviceInstances/"
f"{aai_service_instance_mock.instance_id}/vnfs")
vnf_instantiation = VnfInstantiation.\
instantiate_ala_carte(aai_service_instance=aai_service_instance_mock,
vnf_object=mock.MagicMock(),
line_of_business_object=mock.MagicMock(),
platform_object=mock.MagicMock(),
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
vnf_instance_name="test")
assert vnf_instantiation.name == "test"
@mock.patch.object(NetworkInstantiation, "send_message_json")
@mock.patch.object(NetworkPreload, "send_message_json")
def test_network_instantiation(mock_network_preload, mock_network_instantiation_send_message):
aai_service_instance_mock = mock.MagicMock()
aai_service_instance_mock.instance_id = "test_instance_id"
vnf_instantiation = NetworkInstantiation.\
instantiate_ala_carte(aai_service_instance=aai_service_instance_mock,
network_object=mock.MagicMock(),
line_of_business_object=mock.MagicMock(),
platform_object=mock.MagicMock())
mock_network_preload.assert_called_once()
assert vnf_instantiation.name.startswith("Python_ONAP_SDK_network_instance_")
mock_network_instantiation_send_message.assert_called_once()
method, _, url = mock_network_instantiation_send_message.call_args[0]
assert method == "POST"
assert url == (f"{NetworkInstantiation.base_url}/onap/so/infra/serviceInstantiation/"
f"{NetworkInstantiation.api_version}/serviceInstances/"
f"{aai_service_instance_mock.instance_id}/networks")
network_instantiation = NetworkInstantiation.\
instantiate_ala_carte(aai_service_instance=aai_service_instance_mock,
network_object=mock.MagicMock(),
line_of_business_object=mock.MagicMock(),
platform_object=mock.MagicMock(),
network_instance_name="test")
assert mock_network_preload.call_count == 2
assert network_instantiation.name == "test"
@mock.patch.object(NetworkInstantiation, "send_message_json")
@mock.patch.object(NetworkPreload, "send_message_json")
def test_network_instantiation_with_cr_and_tenant(mock_network_preload, mock_network_instantiation_send_message):
aai_service_instance_mock = mock.MagicMock()
aai_service_instance_mock.instance_id = "test_instance_id"
vnf_instantiation = NetworkInstantiation.\
instantiate_ala_carte(aai_service_instance=aai_service_instance_mock,
network_object=mock.MagicMock(),
line_of_business_object=mock.MagicMock(),
platform_object=mock.MagicMock(),
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock())
mock_network_preload.assert_called_once()
assert vnf_instantiation.name.startswith("Python_ONAP_SDK_network_instance_")
mock_network_instantiation_send_message.assert_called_once()
method, _, url = mock_network_instantiation_send_message.call_args[0]
assert method == "POST"
assert url == (f"{NetworkInstantiation.base_url}/onap/so/infra/serviceInstantiation/"
f"{NetworkInstantiation.api_version}/serviceInstances/"
f"{aai_service_instance_mock.instance_id}/networks")
network_instantiation = NetworkInstantiation.\
instantiate_ala_carte(aai_service_instance=aai_service_instance_mock,
network_object=mock.MagicMock(),
line_of_business_object=mock.MagicMock(),
platform_object=mock.MagicMock(),
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
network_instance_name="test")
assert mock_network_preload.call_count == 2
assert network_instantiation.name == "test"
@mock.patch.object(Vid, "send_message")
@mock.patch.object(VnfInstantiation, "send_message_json")
@mock.patch("onapsdk.so.instantiation.SdcService")
def test_vnf_instantiation_get_by_vnf_instance_name(mock_sdc_service, mock_send_message_json, mock_send):
mock_sdc_service.return_value.vnfs = []
mock_send_message_json.return_value = {}
with pytest.raises(InvalidResponse):
VnfInstantiation.get_by_vnf_instance_name("test_vnf_instance_name")
mock_send_message_json.return_value = {
"requestList": [
{
"request": {
"requestScope": "not_vnf"
}
}
]
}
with pytest.raises(InvalidResponse):
VnfInstantiation.get_by_vnf_instance_name("test_vnf_instance_name")
mock_send_message_json.return_value = {
"requestList": [
{
"request": {
"requestScope": "vnf",
"requestType": "updateInstance"
}
}
]
}
with pytest.raises(InvalidResponse):
VnfInstantiation.get_by_vnf_instance_name("test_vnf_instance_name")
mock_send_message_json.return_value = {
"requestList": [
{
"request": {
"requestScope": "vnf",
"requestType": "createInstance"
}
}
]
}
with pytest.raises(ResourceNotFound):
VnfInstantiation.get_by_vnf_instance_name("test_vnf_instance_name")
mock_send_message_json.return_value = {
"requestList": [
{
"request": {
"requestScope": "vnf",
"requestType": "createInstance",
"requestDetails": {
"relatedInstanceList": [
{
"relatedInstance": {
"modelInfo": {
"modelType": "service",
"modelName": "test_service"
}
}
}
]
}
}
}
]
}
with pytest.raises(ResourceNotFound):
VnfInstantiation.get_by_vnf_instance_name("test_vnf_instance_name")
mock_vnf = mock.MagicMock()
mock_vnf.name = "test_vnf_name"
mock_sdc_service.return_value.vnfs = [mock_vnf]
mock_send_message_json.return_value = {
"requestList": [
{
"request": {
"requestScope": "vnf",
"requestType": "createInstance",
"requestDetails": {
"modelInfo": {
"modelCustomizationName": "test_fail_vnf_name"
},
"relatedInstanceList": [
{
"relatedInstance": {
"modelInfo": {
"modelType": "service",
"modelName": "test_service",
}
}
}
]
}
}
}
]
}
with pytest.raises(ResourceNotFound):
VnfInstantiation.get_by_vnf_instance_name("test_vnf_instance_name")
mock_sdc_service.return_value.vnfs = [mock_vnf]
mock_send_message_json.return_value = {
"requestList": [
{
"request": {
"requestScope": "vnf",
"requestType": "createInstance",
"requestDetails": {
"modelInfo": {
"modelCustomizationName": "test_vnf_name"
},
"relatedInstanceList": [
{
"relatedInstance": {
"modelInfo": {
"modelType": "service",
"modelName": "test_service"
}
}
}
]
}
}
}
]
}
assert VnfInstantiation.get_by_vnf_instance_name("test_vnf_instance_name") is not None
@mock.patch.object(VfModuleInstantiation, "send_message_json")
@mock.patch.object(VfModulePreload, "upload_vf_module_preload")
def test_vf_module_instantiation(mock_vf_module_preload, mock_send_message_json):
mock_service_instance = mock.MagicMock()
mock_service_instance.instance_id = "1234"
mock_vnf_instance = mock.MagicMock()
mock_vnf_instance.service_instance = mock_service_instance
mock_vnf_instance.vnf_id = "4321"
instantiation = VfModuleInstantiation.\
instantiate_ala_carte(vf_module=mock.MagicMock(),
vnf_instance=mock_vnf_instance)
assert instantiation.name.startswith("Python_ONAP_SDK_vf_module_instance_")
mock_send_message_json.assert_called_once()
method, _, url = mock_send_message_json.call_args[0]
assert method == "POST"
assert url == (f"{VfModuleInstantiation.base_url}/onap/so/infra/serviceInstantiation/"
f"{VfModuleInstantiation.api_version}/serviceInstances/1234/vnfs/"
f"4321/vfModules")
instantiation = VfModuleInstantiation.\
instantiate_ala_carte(vf_module=mock.MagicMock(),
vnf_instance=mock_vnf_instance,
vf_module_instance_name="test")
assert instantiation.name == "test"
@mock.patch.object(VfModuleInstantiation, "send_message_json")
@mock.patch.object(VfModulePreload, "upload_vf_module_preload")
def test_vf_module_instantiation_with_cr_and_tenant(mock_vf_module_preload, mock_send_message_json):
mock_service_instance = mock.MagicMock()
mock_service_instance.instance_id = "1234"
mock_vnf_instance = mock.MagicMock()
mock_vnf_instance.service_instance = mock_service_instance
mock_vnf_instance.vnf_id = "4321"
instantiation = VfModuleInstantiation.\
instantiate_ala_carte(vf_module=mock.MagicMock(),
vnf_instance=mock_vnf_instance,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock())
assert instantiation.name.startswith("Python_ONAP_SDK_vf_module_instance_")
mock_send_message_json.assert_called_once()
method, _, url = mock_send_message_json.call_args[0]
assert method == "POST"
assert url == (f"{VfModuleInstantiation.base_url}/onap/so/infra/serviceInstantiation/"
f"{VfModuleInstantiation.api_version}/serviceInstances/1234/vnfs/"
f"4321/vfModules")
instantiation = VfModuleInstantiation.\
instantiate_ala_carte(vf_module=mock.MagicMock(),
vnf_instance=mock_vnf_instance,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
vf_module_instance_name="test")
assert instantiation.name == "test"
def test_instantiation_wait_for_finish():
with mock.patch.object(ServiceInstantiation, "finished", new_callable=mock.PropertyMock) as mock_finished:
with mock.patch.object(ServiceInstantiation, "completed", new_callable=mock.PropertyMock) as mock_completed:
instantiation = ServiceInstantiation(
name="test",
request_id="test",
instance_id="test",
sdc_service=mock.MagicMock(),
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=mock.MagicMock(),
owning_entity=mock.MagicMock(),
project=mock.MagicMock()
)
instantiation.WAIT_FOR_SLEEP_TIME = 0
mock_finished.side_effect = [False, False, True]
mock_completed.return_value = True
rv = namedtuple("Value", ["return_value"])
instantiation._wait_for_finish(rv)
assert rv.return_value
@mock.patch.object(ServiceInstantiation, "send_message_json")
def test_service_instantiation_multicloud(mock_send_message_json):
mock_sdc_service = mock.MagicMock()
mock_sdc_service.distributed = True
_ = ServiceInstantiation.\
instantiate_ala_carte(sdc_service=mock_sdc_service,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=mock.MagicMock(),
owning_entity=mock.MagicMock(),
project=mock.MagicMock())
_, kwargs = mock_send_message_json.call_args
data = json.loads(kwargs["data"])
assert data["requestDetails"]["requestParameters"]["userParams"] == []
mock_send_message_json.reset_mock()
_ = ServiceInstantiation.\
instantiate_ala_carte(sdc_service=mock_sdc_service,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=mock.MagicMock(),
owning_entity=mock.MagicMock(),
project=mock.MagicMock(),
enable_multicloud=True)
_, kwargs = mock_send_message_json.call_args
data = json.loads(kwargs["data"])
assert data["requestDetails"]["requestParameters"]["userParams"] == [{"name": "orchestrator", "value": "multicloud"}]
mock_send_message_json.reset_mock()
_ = ServiceInstantiation.\
instantiate_macro(sdc_service=mock_sdc_service,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=mock.MagicMock(),
owning_entity=mock.MagicMock(),
project=mock.MagicMock(),
line_of_business=mock.MagicMock(),
platform=mock.MagicMock(),
service_instance_name="test")
_, kwargs = mock_send_message_json.call_args
data = json.loads(kwargs["data"])
assert not any(filter(lambda x: x == {"name": "orchestrator", "value": "multicloud"}, data["requestDetails"]["requestParameters"]["userParams"]))
mock_send_message_json.reset_mock()
_ = ServiceInstantiation.\
instantiate_macro(sdc_service=mock_sdc_service,
cloud_region=mock.MagicMock(),
tenant=mock.MagicMock(),
customer=mock.MagicMock(),
owning_entity=mock.MagicMock(),
project=mock.MagicMock(),
line_of_business=mock.MagicMock(),
platform=mock.MagicMock(),
service_instance_name="test",
enable_multicloud=True)
_, kwargs = mock_send_message_json.call_args
data = json.loads(kwargs["data"])
assert any(filter(lambda x: x == {"name": "orchestrator", "value": "multicloud"}, data["requestDetails"]["requestParameters"]["userParams"]))
| 48.871542
| 149
| 0.584294
| 2,157
| 24,729
| 6.308299
| 0.06815
| 0.118468
| 0.037481
| 0.030719
| 0.912912
| 0.888734
| 0.876534
| 0.859484
| 0.859484
| 0.848166
| 0
| 0.002603
| 0.33208
| 24,729
| 505
| 150
| 48.968317
| 0.821215
| 0
| 0
| 0.743041
| 0
| 0
| 0.127138
| 0.068058
| 0
| 0
| 0
| 0
| 0.111349
| 1
| 0.025696
| false
| 0
| 0.017131
| 0
| 0.042827
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a4f8b2e77078b71be19989d40a88463f0b0038d5
| 42,117
|
py
|
Python
|
tests/python/unittest/test_tir_schedule_compute_at.py
|
zhuochenKIDD/tvm
|
f6a404447406f91ddf6195ac044dc1f0b0e867e7
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 1
|
2021-09-29T20:19:21.000Z
|
2021-09-29T20:19:21.000Z
|
tests/python/unittest/test_tir_schedule_compute_at.py
|
zhuochenKIDD/tvm
|
f6a404447406f91ddf6195ac044dc1f0b0e867e7
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null |
tests/python/unittest/test_tir_schedule_compute_at.py
|
zhuochenKIDD/tvm
|
f6a404447406f91ddf6195ac044dc1f0b0e867e7
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 5
|
2020-11-13T19:26:25.000Z
|
2022-01-25T07:55:16.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=missing-function-docstring,missing-module-docstring
import sys
import pytest
import tvm
from tvm import tir
from tvm.script import ty
from tvm.tir.schedule.testing import verify_trace_roundtrip
# fmt: off
# pylint: disable=no-member,invalid-name,unused-variable,line-too-long,redefined-outer-name,unexpected-keyword-arg,too-many-nested-blocks
@tvm.script.tir
def two_elementwise(a: ty.handle, c: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128), "float32")
B = tir.alloc_buffer((128, 128), "float32")
C = tir.match_buffer(c, (128, 128), "float32")
with tir.block([128, 128], "B") as [vi, vj]:
B[vi, vj] = A[vi, vj] * 2.0
with tir.block([128, 128], "C") as [vi, vj]:
C[vi, vj] = B[vi, vj] + 1.0
@tvm.script.tir
def two_elementwise_after_compute_at(a: ty.handle, c: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128), "float32")
B = tir.alloc_buffer((128, 128), "float32")
C = tir.match_buffer(c, (128, 128), "float32")
for i in range(0, 128):
for ax0, ax1 in tir.grid(1, 128):
with tir.block([128, 128], "B") as [vi, vj]:
tir.bind(vi, i + ax0)
tir.bind(vj, ax1)
B[vi, vj] = A[vi, vj] * 2.0
for j in range(0, 128):
with tir.block([128, 128], "B") as [vi, vj]:
C[vi, vj] = B[vi, vj] + 1.0
@tvm.script.tir
def blockized_1(a: ty.handle, c: ty.handle) -> None:
A = tir.match_buffer(a, [128, 128], "float32")
B = tir.alloc_buffer([128, 128], "float32")
C = tir.match_buffer(c, [128, 128], "float32")
with tir.block([128, 128], "B") as [vi, vj]:
B[vi, vj] = A[vi, vj] * 2.0
with tir.block([8, 8], "C_outer") as [vi_o, vj_o]:
tir.reads([B[
vi_o * 16 : vi_o * 16 + 16,
vj_o * 16 : vj_o * 16 + 16,
]])
tir.writes([C[
vi_o * 16 : vi_o * 16 + 16,
vj_o * 16 : vj_o * 16 + 16
]])
for i_i, j_i in tir.grid(16, 16):
with tir.block([128, 128], "C_inner") as [vi, vj]:
tir.bind(vi, vi_o * 16 + i_i)
tir.bind(vj, vj_o * 16 + j_i)
C[vi, vj] = B[vi, vj] + 1.0
@tvm.script.tir
def blockized_after_compute_at(a: ty.handle, c: ty.handle) -> None:
A = tir.match_buffer(a, [128, 128], "float32")
B = tir.alloc_buffer([128, 128], "float32")
C = tir.match_buffer(c, [128, 128], "float32")
for i0_0, i1_0 in tir.grid(8, 8):
for ax0, ax1 in tir.grid(16, 16):
with tir.block([128, 128], "B") as [vi, vj]:
tir.bind(vi, i0_0 * 16 + ax0)
tir.bind(vj, i1_0 * 16 + ax1)
B[vi, vj] = A[vi, vj] * 2.0
with tir.block([8, 8], "C_outer") as [vi_o, vj_o]:
tir.bind(vi_o, i0_0)
tir.bind(vj_o, i1_0)
tir.reads([B[
vi_o * 16 : vi_o * 16 + 16,
vj_o * 16 : vj_o * 16 + 16,
]])
tir.writes([C[
vi_o * 16 : vi_o * 16 + 16,
vj_o * 16 : vj_o * 16 + 16
]])
for i0_1, i1_1 in tir.grid(16, 16):
with tir.block([128, 128], "C_inner") as [vi, vj]:
tir.bind(vi, vi_o * 16 + i0_1)
tir.bind(vj, vj_o * 16 + i1_1)
C[vi, vj] = B[vi, vj] + 1.0
@tvm.script.tir
def blockized_2(a: ty.handle, c: ty.handle) -> None:
A = tir.match_buffer(a, [128, 128], "float32")
B = tir.alloc_buffer([128, 128], "float32")
C = tir.match_buffer(c, [128, 128], "float32")
for i_o, j_o in tir.grid(8, 8):
with tir.block([8, 8], "B_outer") as [vio, vjo]:
tir.bind(vio, i_o)
tir.bind(vjo, j_o)
tir.reads([A[
vio * 16 : vio * 16 + 16,
vjo * 16 : vjo * 16 + 16,
]])
tir.writes([B[
vio * 16 : vio * 16 + 16,
vjo * 16 : vjo * 16 + 16
]])
for i_i, j_i in tir.grid(16, 16):
with tir.block([128, 128], "B_inner") as [vi, vj]:
tir.bind(vi, vio * 16 + i_i)
tir.bind(vj, vjo * 16 + j_i)
B[vi, vj] = A[vi, vj] * 2.0
for i_o, j_o, i_i, j_i in tir.grid(4, 4, 32, 32):
with tir.block([128, 128], "C") as [vi, vj]:
tir.bind(vi, i_o * 32 + i_i)
tir.bind(vj, j_o * 32 + j_i)
C[vi, vj] = B[vi, vj] + 1.0
@tvm.script.tir
def blockized_2_after_reverse_compute_at(a: ty.handle, c: ty.handle) -> None:
A = tir.match_buffer(a, [128, 128], "float32")
B = tir.alloc_buffer([128, 128], "float32")
C = tir.match_buffer(c, [128, 128], "float32")
for i_o, j_o in tir.grid(8, 8):
with tir.block([8, 8], "B_outer") as [vio, vjo]:
tir.bind(vio, i_o)
tir.bind(vjo, j_o)
tir.reads([A[
vio * 16 : vio * 16 + 16,
vjo * 16 : vjo * 16 + 16,
]])
tir.writes([B[
vio * 16 : vio * 16 + 16,
vjo * 16 : vjo * 16 + 16
]])
for i_i, j_i in tir.grid(16, 16):
with tir.block([128, 128], "B_inner") as [vi, vj]:
tir.bind(vi, vio * 16 + i_i)
tir.bind(vj, vjo * 16 + j_i)
B[vi, vj] = A[vi, vj] * 2.0
for ax0, ax1 in tir.grid(16, 16):
with tir.block([128, 128], "C") as [vi, vj]:
tir.bind(vi, i_o * 16 + ax0)
tir.bind(vj, j_o * 16 + ax1)
tir.reads([B[vi, vj]])
tir.writes([C[vi, vj]])
C[vi, vj] = B[vi, vj] + 1.0
@tvm.script.tir
def blockized_2_after_compute_at(a: ty.handle, c: ty.handle) -> None:
A = tir.match_buffer(a, [128, 128], "float32")
B = tir.alloc_buffer([128, 128], "float32")
C = tir.match_buffer(c, [128, 128], "float32")
for i_o, j_o in tir.grid(4, 4):
for ax0, ax1 in tir.grid(2, 2):
with tir.block([8, 8], "blockized_B") as [vio, vjo]:
tir.bind(vio, i_o * 2 + ax0)
tir.bind(vjo, j_o * 2 + ax1)
tir.reads([A[
vio * 16 : vio * 16 + 16,
vjo * 16 : vjo * 16 + 16,
]])
tir.writes([B[
vio * 16 : vio * 16 + 16,
vjo * 16 : vjo * 16 + 16,
]])
for i_i, j_i in tir.grid(16, 16):
with tir.block([128, 128], "B") as [vi, vj]:
tir.bind(vi, vio * 16 + i_i)
tir.bind(vj, vjo * 16 + j_i)
B[vi, vj] = A[vi, vj] * 2.0
for i_i, j_i in tir.grid(32, 32):
with tir.block([128, 128], "C") as [vi, vj]:
tir.bind(vi, i_o * 32 + i_i)
tir.bind(vj, j_o * 32 + j_i)
C[vi, vj] = B[vi, vj] + 1.0
@tvm.script.tir
def cuda_matmul_0(a: ty.handle, b: ty.handle, c: ty.handle) -> None: # pylint: disable=undefined-loop-variable
A = tir.match_buffer(a, [2048, 2048], "float32")
B = tir.match_buffer(b, [2048, 2048], "float32")
C = tir.match_buffer(c, [2048, 2048], "float32")
A_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
B_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
A_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
B_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
C_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
with tir.block([2048, 2048], "A_shared") as [v0, v1]:
A_shared[v0, v1] = A[v0, v1]
with tir.block([2048, 2048], "B_shared") as [v0, v1]:
B_shared[v0, v1] = B[v0, v1]
with tir.block([2048, 2048], "A_shared_local") as [v0, v1]:
A_shared_local[v0, v1] = A_shared[v0, v1]
with tir.block([2048, 2048], "B_shared_local") as [v0, v1]:
B_shared_local[v0, v1] = B_shared[v0, v1]
with tir.block([2048, 2048, tir.reduce_axis(0, 2048)], "C") as [vi, vj, vk]:
with tir.init():
C_local[vi, vj] = 0.0
C_local[vi, vj] = C_local[vi, vj] + A_shared_local[vk, vi] * B_shared_local[vk, vj]
for by in tir.thread_binding(0, 32, thread = "blockIdx.y"):
for bx in tir.thread_binding(0, 32, thread = "blockIdx.x"):
for vy in tir.thread_binding(0, 2, thread = "vthread.y"):
for vx in tir.thread_binding(0, 2, thread = "vthread.x"):
for ty in tir.thread_binding(0, 8, thread = "threadIdx.y"):
for tx in tir.thread_binding(0, 8, thread = "threadIdx.x"):
for i, j in tir.grid(4, 4):
with tir.block([2048, 2048], "C_local") as [v0_4, v1_4]:
tir.bind(v0_4, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(v1_4, bx * 64 + vx * 32 + tx * 4 + j)
C[v0_4, v1_4] = C_local[v0_4, v1_4]
@tvm.script.tir
def cuda_matmul_0_after_compute_at(a: ty.handle, b: ty.handle, c: ty.handle) -> None: # pylint: disable=undefined-loop-variable
A = tir.match_buffer(a, [2048, 2048], "float32")
B = tir.match_buffer(b, [2048, 2048], "float32")
C = tir.match_buffer(c, [2048, 2048], "float32")
A_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
B_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
A_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
B_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
C_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
with tir.block([2048, 2048], "A_shared") as [v0, v1]:
A_shared[v0, v1] = A[v0, v1]
with tir.block([2048, 2048], "B_shared") as [v0, v1]:
B_shared[v0, v1] = B[v0, v1]
with tir.block([2048, 2048], "A_shared_local") as [v0, v1]:
A_shared_local[v0, v1] = A_shared[v0, v1]
with tir.block([2048, 2048], "B_shared_local") as [v0, v1]:
B_shared_local[v0, v1] = B_shared[v0, v1]
for by in tir.thread_binding(0, 32, thread = "blockIdx.y"):
for bx in tir.thread_binding(0, 32, thread = "blockIdx.x"):
for vy in tir.thread_binding(0, 2, thread = "vthread.y"):
for vx in tir.thread_binding(0, 2, thread = "vthread.x"):
for ty in tir.thread_binding(0, 8, thread = "threadIdx.y"):
for tx in tir.thread_binding(0, 8, thread = "threadIdx.x"):
for i, j, k in tir.grid(4, 4, 2048):
with tir.block([2048, 2048, tir.reduce_axis(0, 2048)], "C") as [vi, vj, vk]:
tir.bind(vi, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(vj, bx * 64 + vx * 32 + tx * 4 + j)
tir.bind(vk, k)
with tir.init():
C_local[vi, vj] = 0.0
C_local[vi, vj] = C_local[vi, vj] + A_shared_local[vk, vi] * B_shared_local[vk, vj]
for i, j in tir.grid(4, 4):
with tir.block([2048, 2048], "C_local") as [vi, vj]:
tir.bind(vi, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(vj, bx * 64 + vx * 32 + tx * 4 + j)
C[vi, vj] = C_local[vi, vj]
@tvm.script.tir
def cuda_matmul_1(a: ty.handle, b: ty.handle, c: ty.handle) -> None: # pylint: disable=undefined-loop-variable
A = tir.match_buffer(a, [2048, 2048], "float32")
B = tir.match_buffer(b, [2048, 2048], "float32")
C = tir.match_buffer(c, [2048, 2048], "float32")
A_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
B_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
A_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
B_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
C_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
with tir.block([2048, 2048], "A_shared") as [v0, v1]:
A_shared[v0, v1] = A[v0, v1]
with tir.block([2048, 2048], "B_shared") as [v0, v1]:
B_shared[v0, v1] = B[v0, v1]
with tir.block([2048, 2048], "A_shared_local") as [v0, v1]:
A_shared_local[v0, v1] = A_shared[v0, v1]
with tir.block([2048, 2048], "B_shared_local") as [v0, v1]:
B_shared_local[v0, v1] = B_shared[v0, v1]
for by in tir.thread_binding(0, 32, thread = "blockIdx.y"):
for bx in tir.thread_binding(0, 32, thread = "blockIdx.x"):
for vy in tir.thread_binding(0, 2, thread = "vthread.y"):
for vx in tir.thread_binding(0, 2, thread = "vthread.x"):
for ty in tir.thread_binding(0, 8, thread = "threadIdx.y"):
for tx in tir.thread_binding(0, 8, thread = "threadIdx.x"):
for k_0 in tir.serial(0, 256):
for k_1 in tir.unroll(0, 8):
for _, i, j in tir.grid(1, 4, 4):
with tir.block([2048, 2048, tir.reduce_axis(0, 2048)], "C") as [vi, vj, vk]:
tir.bind(vi, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(vj, bx * 64 + vx * 32 + tx * 4 + j)
tir.bind(vk, k_0 * 8 + k_1)
with tir.init():
C_local[vi, vj] = 0.0
C_local[vi, vj] = C_local[vi, vj] + A_shared_local[vk, vi] * B_shared_local[vk, vj]
for i, j in tir.grid(4, 4):
with tir.block([2048, 2048], "C_local") as [vi, vj]:
tir.bind(vi, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(vj, bx * 64 + vx * 32 + tx * 4 + j)
C[vi, vj] = C_local[vi, vj]
@tvm.script.tir
def cuda_matmul_2(a: ty.handle, b: ty.handle, c: ty.handle) -> None: # pylint: disable=undefined-loop-variable
A = tir.match_buffer(a, [2048, 2048], "float32")
B = tir.match_buffer(b, [2048, 2048], "float32")
C = tir.match_buffer(c, [2048, 2048], "float32")
A_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
B_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
A_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
B_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
C_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
with tir.block([2048, 2048], "A_shared") as [v0, v1]:
A_shared[v0, v1] = A[v0, v1]
with tir.block([2048, 2048], "B_shared") as [v0, v1]:
B_shared[v0, v1] = B[v0, v1]
with tir.block([2048, 2048], "B_shared_local") as [v0, v1]:
B_shared_local[v0, v1] = B_shared[v0, v1]
for by in tir.thread_binding(0, 32, thread = "blockIdx.y"):
for bx in tir.thread_binding(0, 32, thread = "blockIdx.x"):
for vy in tir.thread_binding(0, 2, thread = "vthread.y"):
for vx in tir.thread_binding(0, 2, thread = "vthread.x"):
for ty in tir.thread_binding(0, 8, thread = "threadIdx.y"):
for tx in tir.thread_binding(0, 8, thread = "threadIdx.x"):
for k_0 in tir.serial(0, 256):
for k_1 in tir.unroll(0, 8):
for i, j in tir.grid(1, 4):
with tir.block([2048, 2048], "A_shared_local") as [v0, v1]:
tir.bind(v0, k_0 * 8 + k_1 + i)
tir.bind(v1, by * 64 + vy * 32 + ty * 4 + j)
A_shared_local[v0, v1] = A_shared[v0, v1]
for _, i, j in tir.grid(1, 4, 4):
with tir.block([2048, 2048, tir.reduce_axis(0, 2048)], "C") as [vi, vj, vk]:
tir.bind(vi, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(vj, bx * 64 + vx * 32 + tx * 4 + j)
tir.bind(vk, k_0 * 8 + k_1)
with tir.init():
C_local[vi, vj] = tir.float32(0)
C_local[vi, vj] = C_local[vi, vj] + A_shared_local[vk, vi] * B_shared_local[vk, vj]
for i, j in tir.grid(4, 4):
with tir.block([2048, 2048], "C_local") as [v0, v1]:
tir.bind(v0, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(v1, bx * 64 + vx * 32 + tx * 4 + j)
C[v0, v1] = C_local[v0, v1]
@tvm.script.tir
def cuda_matmul_3(a: ty.handle, b: ty.handle, c: ty.handle) -> None: # pylint: disable=undefined-loop-variable
A = tir.match_buffer(a, [2048, 2048], "float32")
B = tir.match_buffer(b, [2048, 2048], "float32")
C = tir.match_buffer(c, [2048, 2048], "float32")
A_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
B_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
A_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
B_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
C_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
with tir.block([2048, 2048], "A_shared") as [v0, v1]:
A_shared[v0, v1] = A[v0, v1]
with tir.block([2048, 2048], "B_shared") as [v0, v1]:
B_shared[v0, v1] = B[v0, v1]
for by in tir.thread_binding(0, 32, thread = "blockIdx.y"):
for bx in tir.thread_binding(0, 32, thread = "blockIdx.x"):
for vy in tir.thread_binding(0, 2, thread = "vthread.y"):
for vx in tir.thread_binding(0, 2, thread = "vthread.x"):
for ty in tir.thread_binding(0, 8, thread = "threadIdx.y"):
for tx in tir.thread_binding(0, 8, thread = "threadIdx.x"):
for k0 in tir.serial(0, 256):
for k1 in tir.unroll(0, 8):
for i, j in tir.grid(1, 4):
with tir.block([2048, 2048], "A_shared_local") as [v0, v1]:
tir.bind(v0, k0 * 8 + k1 + i)
tir.bind(v1, by * 64 + vy * 32 + ty * 4 + j)
A_shared_local[v0, v1] = A_shared[v0, v1]
for i, j in tir.grid(1, 4):
with tir.block([2048, 2048], "B_shared_local") as [v0, v1]:
tir.bind(v0, k0 * 8 + k1 + i)
tir.bind(v1, bx * 64 + vx * 32 + tx * 4 + j)
B_shared_local[v0, v1] = B_shared[v0, v1]
for _, i, j in tir.grid(1, 4, 4):
with tir.block([2048, 2048, tir.reduce_axis(0, 2048)], "C") as [vi, vj, vk]:
tir.bind(vi, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(vj, bx * 64 + vx * 32 + tx * 4 + j)
tir.bind(vk, k0 * 8 + k1)
with tir.init():
C_local[vi, vj] = tir.float32(0)
C_local[vi, vj] = C_local[vi, vj] + A_shared_local[vk, vi] * B_shared_local[vk, vj]
for i, j in tir.grid(4, 4):
with tir.block([2048, 2048], "C_local") as [v0, v1]:
tir.bind(v0, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(v1, bx * 64 + vx * 32 + tx * 4 + j)
C[v0, v1] = C_local[v0, v1]
@tvm.script.tir
def cuda_matmul_4(a: ty.handle, b: ty.handle, c: ty.handle) -> None: # pylint: disable=undefined-loop-variable
A = tir.match_buffer(a, [2048, 2048], "float32")
B = tir.match_buffer(b, [2048, 2048], "float32")
C = tir.match_buffer(c, [2048, 2048], "float32")
A_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
B_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
A_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
B_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
C_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
with tir.block([2048, 2048], "B_shared") as [v0, v1]:
B_shared[v0, v1] = B[v0, v1]
for by in tir.thread_binding(0, 32, thread = "blockIdx.y"):
for bx in tir.thread_binding(0, 32, thread = "blockIdx.x"):
for vy in tir.thread_binding(0, 2, thread = "vthread.y"):
for vx in tir.thread_binding(0, 2, thread = "vthread.x"):
for ty in tir.thread_binding(0, 8, thread = "threadIdx.y"):
for tx in tir.thread_binding(0, 8, thread = "threadIdx.x"):
for k0 in tir.serial(0, 256):
for i, j in tir.grid(8, 64):
with tir.block([2048, 2048], "A_shared") as [v0, v1]:
tir.bind(v0, k0 * 8 + i)
tir.bind(v1, by * 64 + j)
A_shared[v0, v1] = A[v0, v1]
for k1 in tir.unroll(0, 8):
for i, j in tir.grid(1, 4):
with tir.block([2048, 2048], "A_shared_local") as [v0, v1]:
tir.bind(v0, k0 * 8 + k1 + i)
tir.bind(v1, by * 64 + vy * 32 + ty * 4 + j)
A_shared_local[v0, v1] = A_shared[v0, v1]
for i, j in tir.grid(1, 4):
with tir.block([2048, 2048], "B_shared_local") as [v0, v1]:
tir.bind(v0, k0 * 8 + k1 + i)
tir.bind(v1, bx * 64 + vx * 32 + tx * 4 + j)
B_shared_local[v0, v1] = B_shared[v0, v1]
for _, i, j in tir.grid(1, 4, 4):
with tir.block([2048, 2048, tir.reduce_axis(0, 2048)], "C") as [vi, vj, vk]:
tir.bind(vi, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(vj, bx * 64 + vx * 32 + tx * 4 + j)
tir.bind(vk, k0 * 8 + k1)
with tir.init():
C_local[vi, vj] = 0.0
C_local[vi, vj] = C_local[vi, vj] + A_shared_local[vk, vi] * B_shared_local[vk, vj]
for i, j in tir.grid(4, 4):
with tir.block([2048, 2048], "C_local") as [v0, v1]:
tir.bind(v0, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(v1, bx * 64 + vx * 32 + tx * 4 + j)
C[v0, v1] = C_local[v0, v1]
@tvm.script.tir
def cuda_matmul_5(a: ty.handle, b: ty.handle, c: ty.handle) -> None: # pylint: disable=undefined-loop-variable
A = tir.match_buffer(a, [2048, 2048], "float32")
B = tir.match_buffer(b, [2048, 2048], "float32")
C = tir.match_buffer(c, [2048, 2048], "float32")
A_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
B_shared = tir.alloc_buffer([2048, 2048], "float32", scope="shared")
A_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
B_shared_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
C_local = tir.alloc_buffer([2048, 2048], "float32", scope="local")
for by in tir.thread_binding(0, 32, thread = "blockIdx.y"):
for bx in tir.thread_binding(0, 32, thread = "blockIdx.x"):
for vy in tir.thread_binding(0, 2, thread = "vthread.y"):
for vx in tir.thread_binding(0, 2, thread = "vthread.x"):
for ty in tir.thread_binding(0, 8, thread = "threadIdx.y"):
for tx in tir.thread_binding(0, 8, thread = "threadIdx.x"):
for k0 in tir.serial(0, 256):
for i, j in tir.grid(8, 64):
with tir.block([2048, 2048], "A_shared") as [v0, v1]:
tir.bind(v0, k0 * 8 + i)
tir.bind(v1, by * 64 + j)
A_shared[v0, v1] = A[v0, v1]
for i, j in tir.grid(8, 64):
with tir.block([2048, 2048], "B_shared") as [v0, v1]:
tir.bind(v0, k0 * 8 + i)
tir.bind(v1, bx * 64 + j)
B_shared[v0, v1] = B[v0, v1]
for k1 in tir.unroll(0, 8):
for i, j in tir.grid(1, 4):
with tir.block([2048, 2048], "A_shared_local") as [v0, v1]:
tir.bind(v0, k0 * 8 + k1 + i)
tir.bind(v1, by * 64 + vy * 32 + ty * 4 + j)
A_shared_local[v0, v1] = A_shared[v0, v1]
for i, j in tir.grid(1, 4):
with tir.block([2048, 2048], "B_shared_local") as [v0, v1]:
tir.bind(v0, k0 * 8 + k1 + i)
tir.bind(v1, bx * 64 + vx * 32 + tx * 4 + j)
B_shared_local[v0, v1] = B_shared[v0, v1]
for _, i, j in tir.grid(1, 4, 4):
with tir.block([2048, 2048, tir.reduce_axis(0, 2048)], "C") as [vi, vj, vk]:
tir.bind(vi, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(vj, bx * 64 + vx * 32 + tx * 4 + j)
tir.bind(vk, k0 * 8 + k1)
with tir.init():
C_local[vi, vj] = 0.0
C_local[vi, vj] = C_local[vi, vj] + A_shared_local[vk, vi] * B_shared_local[vk, vj]
for i, j in tir.grid(4, 4):
with tir.block([2048, 2048], "C_local") as [v0, v1]:
tir.bind(v0, by * 64 + vy * 32 + ty * 4 + i)
tir.bind(v1, bx * 64 + vx * 32 + tx * 4 + j)
C[v0, v1] = C_local[v0, v1]
@tvm.script.tir
def tiled(a: ty.handle, c: ty.handle) -> None:
A = tir.match_buffer(a, [128, 128], "float32")
B = tir.alloc_buffer([128, 128], "float32")
C = tir.match_buffer(c, [128, 128], "float32")
for i_0, j_0, i_1, j_1 in tir.grid(8, 8, 16, 16):
with tir.block([128, 128], "B") as [vi, vj]:
tir.bind(vi, i_0 * 16 + i_1)
tir.bind(vj, j_0 * 16 + j_1)
B[vi, vj] = A[vi, vj] * 2.0
with tir.block([128, 128], "C") as [vi, vj]:
C[vi, vj] = B[vi, vj] + 1.0
@tvm.script.tir
def tiled_after_reverse_compute_at(a: ty.handle, c: ty.handle) -> None:
A = tir.match_buffer(a, [128, 128], "float32")
B = tir.alloc_buffer([128, 128], "float32")
C = tir.match_buffer(c, [128, 128], "float32")
for i_0, j_0, i_1 in tir.grid(8, 8, 16):
for j_1 in tir.serial(0, 16):
with tir.block([128, 128], "B") as [vi, vj]:
tir.bind(vi, i_0 * 16 + i_1)
tir.bind(vj, j_0 * 16 + j_1)
B[vi, vj] = A[vi, vj] * 2.0
for j_1 in tir.serial(0, 16):
with tir.block([128, 128], "C") as [vi, vj]:
tir.bind(vi, i_0 * 16 + i_1)
tir.bind(vj, j_0 * 16 + j_1)
C[vi, vj] = B[vi, vj] + 1.0
@tvm.script.tir
def factorized(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, [16, 16, 16], "float32")
B = tir.match_buffer(b, [16], "float32")
B_rf_local = tir.alloc_buffer([16, 16], "float32", scope="local")
for j in tir.thread_binding(0, 16, thread = "blockIdx.x"):
for i_o in tir.thread_binding(0, 4, thread = "threadIdx.x"):
for i_i, k in tir.grid(4, 16):
with tir.block([16, 16, tir.reduce_axis(0, 16)], "B_rf") as [vi, vj, vk]:
tir.bind(vi, i_o * 4 + i_i)
tir.bind(vj, j)
tir.bind(vk, k)
with tir.init():
B_rf_local[vi, vj] = 0.0
B_rf_local[vi, vj] = B_rf_local[vi, vj] + A[vj, vi, vk]
for i, k in tir.grid(16, 16):
with tir.block([16, tir.reduce_axis(0, 16)], "B") as [vi, vk]:
tir.bind(vi, i)
tir.bind(vk, k)
with tir.init():
B[vi] = 0.0
B[vi] = B[vi] + B_rf_local[vk, vi]
@tvm.script.tir
def factorized_after_reverse_compute_at(a: ty.handle, b: ty.handle) -> None:
A = tir.match_buffer(a, [16, 16, 16], "float32")
B = tir.match_buffer(b, [16], "float32")
B_rf_local = tir.alloc_buffer([16, 16], "float32", scope="local")
for j in tir.thread_binding(0, 16, thread = "blockIdx.x"):
for i_o in tir.thread_binding(0, 4, thread = "threadIdx.x"):
for i_i, k in tir.grid(4, 16):
with tir.block([16, 16, tir.reduce_axis(0, 16)], "B_rf") as [vi, vj, vk]:
tir.bind(vi, i_o * 4 + i_i)
tir.bind(vj, j)
tir.bind(vk, k)
with tir.init():
B_rf_local[vi, vj] = 0.0
B_rf_local[vi, vj] = B_rf_local[vi, vj] + A[vj, vi, vk]
for k in tir.serial(0, 4):
with tir.block([16, tir.reduce_axis(0, 16)], "B") as [vi, vk]:
tir.bind(vi, j)
tir.bind(vk, i_o * 4 + k)
with tir.init():
B[vi] = 0.0
B[vi] = B[vi] + B_rf_local[vk, vi]
@tvm.script.tir
def fail_subtree_compact_dataflow(a: ty.handle, c: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128), "float32")
B = tir.alloc_buffer((128, 128), "float32")
C = tir.match_buffer(c, (128, 128), "float32")
for i in range(0, 128):
for j in range(0, 64):
with tir.block([128, 128], "B_0") as [vi, vj]:
tir.bind(vi, i)
tir.bind(vj, j)
B[vi, vj] = A[vi, vj] * 2.0
for j in range(0, 64):
with tir.block([128, 128], "B_1") as [vi, vj]:
tir.bind(vi, i)
tir.bind(vj, j + 64)
B[vi, vj] = A[vi, vj] * 2.0
with tir.block([128, 128], "C") as [vi, vj]:
C[vi, vj] = B[vi, vj] + 1.0
@tvm.script.tir
def fail_all_consumers_under_loop(a: ty.handle, c: ty.handle, d: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128), "float32")
B = tir.alloc_buffer((128, 128), "float32")
C = tir.match_buffer(c, (128, 128), "float32")
D = tir.match_buffer(d, (128, 128), "float32")
for i, j in tir.grid(128, 128):
with tir.block([128, 128], "B") as [vi, vj]:
B[vi, vj] = A[vi, vj] * 2.0
for i, j in tir.grid(128, 128):
with tir.block([128, 128], "C") as [vi, vj]:
C[vi, vj] = B[vi, vj] + 1.0
for i, j in tir.grid(128, 128):
with tir.block([128, 128], "D") as [vi, vj]:
D[vi, vj] = B[vi, vj] + 1.0
@tvm.script.tir
def fail_all_producers_under_loop(a: ty.handle, d: ty.handle) -> None:
A = tir.match_buffer(a, (128, 128), "float32")
B = tir.alloc_buffer((128, 128), "float32")
C = tir.alloc_buffer((128, 128), "float32")
D = tir.match_buffer(d, (128, 128), "float32")
for i, j in tir.grid(128, 128):
with tir.block([128, 128], "B") as [vi, vj]:
B[vi, vj] = A[vi, vj] * 2.0
for i, j in tir.grid(128, 128):
with tir.block([128, 128], "C") as [vi, vj]:
C[vi, vj] = A[vi, vj] + 1.0
for i, j in tir.grid(128, 128):
with tir.block([128, 128], "D") as [vi, vj]:
D[vi, vj] = B[vi, vj] + C[vi, vj]
@tvm.script.tir
def read_out_of_bound(a: ty.handle, c:ty.handle) -> None:
A = tir.match_buffer(a, [16], "float32")
B = tir.alloc_buffer([16], "float32")
C = tir.match_buffer(c, [16], "float32")
for i in tir.serial(0, 16):
with tir.block([16], "B") as [v]:
B[v] = A[v]
for j in tir.serial(0, 16):
with tir.block([16], "C") as [v]:
tir.reads(B[v : v + 2])
C[v] = tir.if_then_else(v < 15, tir.max(B[v], B[v + 1]), B[v], dtype="float32")
@tvm.script.tir
def read_out_of_bound_after_compute_at(a: ty.handle, c: ty.handle) -> None:
A = tir.match_buffer(a, [16], "float32")
B = tir.alloc_buffer([16], "float32")
C = tir.match_buffer(c, [16], "float32")
for j in tir.serial(0, 16):
for i in tir.serial(0, tir.min(1, 15 - j) + 1):
with tir.block([16], "B") as [v]:
tir.bind(v, j + i)
B[v] = A[v]
with tir.block([16], "C") as [v]:
tir.bind(v, j)
tir.reads([B[v : v + 2]])
C[v] = tir.if_then_else(v < 15, tir.max(B[v], B[v + 1]), B[v], dtype="float32")
# pylint: enable=no-member,invalid-name,unused-variable,line-too-long,redefined-outer-name,unexpected-keyword-arg,too-many-nested-blocks
# fmt: on
def test_compute_at_two_elementwise():
sch = tir.Schedule(two_elementwise, debug_mask="all")
block = sch.get_block("B")
loop, _ = sch.get_loops(sch.get_block("C"))
sch.compute_at(block, loop, preserve_unit_loops=True)
tvm.ir.assert_structural_equal(two_elementwise_after_compute_at, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=two_elementwise)
def test_compute_at_blockized_1():
sch = tir.Schedule(blockized_1, debug_mask="all")
block = sch.get_block("B")
_, loop = sch.get_loops(sch.get_block("C_outer"))
sch.compute_at(block, loop, preserve_unit_loops=True)
tvm.ir.assert_structural_equal(blockized_after_compute_at, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=blockized_1)
def test_compute_at_blockized_2():
sch = tir.Schedule(blockized_2, debug_mask="all")
block = sch.get_block("B_outer")
_, loop, _, _ = sch.get_loops(sch.get_block("C"))
sch.compute_at(block, loop, preserve_unit_loops=True)
tvm.ir.assert_structural_equal(blockized_2_after_compute_at, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=blockized_2)
def test_compute_at_cuda_matmul_0():
sch = tir.Schedule(cuda_matmul_0, debug_mask="all")
block = sch.get_block("C")
_, _, _, _, _, loop, _, _ = sch.get_loops(sch.get_block("C_local"))
sch.compute_at(block, loop, preserve_unit_loops=True)
tvm.ir.assert_structural_equal(cuda_matmul_0_after_compute_at, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=cuda_matmul_0)
def test_compute_at_cuda_matmul_1():
sch = tir.Schedule(cuda_matmul_1, debug_mask="all")
block = sch.get_block("A_shared_local")
_, _, _, _, _, _, _, loop, _, _, _ = sch.get_loops(sch.get_block("C"))
sch.compute_at(block, loop, preserve_unit_loops=True)
tvm.ir.assert_structural_equal(cuda_matmul_2, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=cuda_matmul_1)
def test_compute_at_cuda_matmul_2():
sch = tir.Schedule(cuda_matmul_2, debug_mask="all")
block = sch.get_block("B_shared_local")
_, _, _, _, _, _, _, loop, _, _, _ = sch.get_loops(sch.get_block("C"))
sch.compute_at(block, loop, preserve_unit_loops=True)
tvm.ir.assert_structural_equal(cuda_matmul_3, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=cuda_matmul_2)
def test_compute_at_cuda_matmul_3():
sch = tir.Schedule(cuda_matmul_3, debug_mask="all")
block = sch.get_block("A_shared")
_, _, _, _, _, _, loop, _, _, _, _ = sch.get_loops(sch.get_block("C"))
sch.compute_at(block, loop, preserve_unit_loops=True)
tvm.ir.assert_structural_equal(cuda_matmul_4, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=cuda_matmul_3)
def test_compute_at_cuda_matmul_4():
sch = tir.Schedule(cuda_matmul_4, debug_mask="all")
block = sch.get_block("B_shared")
_, _, _, _, _, _, loop, _, _, _, _ = sch.get_loops(sch.get_block("C"))
sch.compute_at(block, loop, preserve_unit_loops=True)
tvm.ir.assert_structural_equal(cuda_matmul_5, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=cuda_matmul_4)
def test_reverse_compute_at_tiled():
sch = tir.Schedule(tiled, debug_mask="all")
block = sch.get_block("C")
_, _, loop, _ = sch.get_loops(sch.get_block("B"))
sch.reverse_compute_at(block, loop, preserve_unit_loops=False)
tvm.ir.assert_structural_equal(tiled_after_reverse_compute_at, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=tiled)
def test_reverse_compute_at_blockized_2():
sch = tir.Schedule(blockized_2, debug_mask="all")
block = sch.get_block("C")
_, loop = sch.get_loops(sch.get_block("B_outer"))
sch.reverse_compute_at(block, loop, preserve_unit_loops=True)
tvm.ir.assert_structural_equal(blockized_2_after_reverse_compute_at, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=blockized_2)
def test_reverse_compute_at_factorized():
sch = tir.Schedule(factorized, debug_mask="all")
block = sch.get_block("B")
_, loop, _, _ = sch.get_loops(sch.get_block("B_rf"))
sch.reverse_compute_at(block, loop, preserve_unit_loops=False)
tvm.ir.assert_structural_equal(factorized_after_reverse_compute_at, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=factorized)
def test_read_out_of_bound():
sch = tir.Schedule(read_out_of_bound, debug_mask="all")
block = sch.get_block("B")
(loop,) = sch.get_loops(sch.get_block("C"))
sch.compute_at(block, loop)
tvm.ir.assert_structural_equal(read_out_of_bound_after_compute_at, sch.mod["main"])
verify_trace_roundtrip(sch=sch, mod=read_out_of_bound)
def test_fail_subtree_compact_dataflow():
sch = tir.Schedule(fail_subtree_compact_dataflow, debug_mask="all")
block = sch.get_block("B_0")
loop, _ = sch.get_loops(sch.get_block("C"))
with pytest.raises(tvm.tir.ScheduleError, match="compact dataflow"):
sch.compute_at(block, loop)
def test_fail_not_in_same_scope():
sch = tir.Schedule(blockized_1, debug_mask="all")
block = sch.get_block("B")
loop, _ = sch.get_loops(sch.get_block("C_inner"))
with pytest.raises(tvm.tir.ScheduleError, match="same block scope"):
sch.compute_at(block, loop)
def test_fail_loop_is_ancestor_of_block():
sch = tir.Schedule(two_elementwise, debug_mask="all")
block = sch.get_block("B")
loop, _ = sch.get_loops(sch.get_block("B"))
with pytest.raises(tvm.tir.ScheduleError, match="ancestor of block"):
sch.compute_at(block, loop)
def test_fail_output_block():
sch = tir.Schedule(tiled, debug_mask="all")
block = sch.get_block("C")
loop, _, _, _ = sch.get_loops(sch.get_block("B"))
with pytest.raises(tvm.tir.ScheduleError, match="output block"):
sch.compute_at(block, loop)
def test_fail_all_consumers_under_loop():
sch = tir.Schedule(fail_all_consumers_under_loop, debug_mask="all")
block = sch.get_block("B")
loop, _ = sch.get_loops(sch.get_block("C"))
with pytest.raises(tvm.tir.ScheduleError, match="requires all the consumer"):
sch.compute_at(block, loop)
def test_fail_all_producers_under_loop():
sch = tir.Schedule(fail_all_producers_under_loop, debug_mask="all")
block = sch.get_block("D")
loop, _ = sch.get_loops(sch.get_block("C"))
with pytest.raises(tvm.tir.ScheduleError, match="requires all the producer"):
sch.reverse_compute_at(block, loop)
if __name__ == "__main__":
sys.exit(pytest.main([__file__] + sys.argv[1:]))
| 50.560624
| 137
| 0.506423
| 6,216
| 42,117
| 3.264479
| 0.037323
| 0.024837
| 0.048492
| 0.040804
| 0.92805
| 0.904692
| 0.890893
| 0.8767
| 0.863789
| 0.8459
| 0
| 0.096687
| 0.34359
| 42,117
| 832
| 138
| 50.621394
| 0.637307
| 0.032956
| 0
| 0.783149
| 0
| 0
| 0.05457
| 0
| 0
| 0
| 0
| 0
| 0.016575
| 1
| 0.05663
| false
| 0
| 0.008287
| 0
| 0.064917
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
354394834df60f76bcfaa6aaf269e91b2aaa047f
| 16,377
|
py
|
Python
|
gitee/api/search_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
gitee/api/search_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
gitee/api/search_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from gitee.api_client import ApiClient
class SearchApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_v5_search_issues(self, q, **kwargs): # noqa: E501
"""搜索 Issues # noqa: E501
搜索 Issues # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_search_issues(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: 搜索关键字 (required)
:param str access_token: 用户授权码
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:param str repo: 筛选指定仓库 (path, e.g. oschina/git-osc) 的 issues
:param str language: 筛选指定语言的 issues
:param str label: 筛选指定标签的 issues
:param str state: 筛选指定状态的 issues, open(开启)、closed(完成)、rejected(拒绝)
:param str author: 筛选指定创建者 (username/login) 的 issues
:param str assignee: 筛选指定负责人 (username/login) 的 issues
:param str sort: 排序字段,created_at(创建时间)、last_push_at(更新时间)、notes_count(评论数),默认为最佳匹配
:param str order: 排序顺序: desc(default)、asc
:return: list[Issue]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_v5_search_issues_with_http_info(q, **kwargs) # noqa: E501
else:
(data) = self.get_v5_search_issues_with_http_info(q, **kwargs) # noqa: E501
return data
def get_v5_search_issues_with_http_info(self, q, **kwargs): # noqa: E501
"""搜索 Issues # noqa: E501
搜索 Issues # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_search_issues_with_http_info(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: 搜索关键字 (required)
:param str access_token: 用户授权码
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:param str repo: 筛选指定仓库 (path, e.g. oschina/git-osc) 的 issues
:param str language: 筛选指定语言的 issues
:param str label: 筛选指定标签的 issues
:param str state: 筛选指定状态的 issues, open(开启)、closed(完成)、rejected(拒绝)
:param str author: 筛选指定创建者 (username/login) 的 issues
:param str assignee: 筛选指定负责人 (username/login) 的 issues
:param str sort: 排序字段,created_at(创建时间)、last_push_at(更新时间)、notes_count(评论数),默认为最佳匹配
:param str order: 排序顺序: desc(default)、asc
:return: list[Issue]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['q', 'access_token', 'page', 'per_page', 'repo', 'language', 'label', 'state', 'author',
'assignee', 'sort', 'order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_v5_search_issues" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'q' is set
if ('q' not in params or
params['q'] is None):
raise ValueError("Missing the required parameter `q` when calling `get_v5_search_issues`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
if 'q' in params:
query_params.append(('q', params['q'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
if 'repo' in params:
query_params.append(('repo', params['repo'])) # noqa: E501
if 'language' in params:
query_params.append(('language', params['language'])) # noqa: E501
if 'label' in params:
query_params.append(('label', params['label'])) # noqa: E501
if 'state' in params:
query_params.append(('state', params['state'])) # noqa: E501
if 'author' in params:
query_params.append(('author', params['author'])) # noqa: E501
if 'assignee' in params:
query_params.append(('assignee', params['assignee'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'order' in params:
query_params.append(('order', params['order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/search/issues', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Issue]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_v5_search_repositories(self, q, **kwargs): # noqa: E501
"""搜索仓库 # noqa: E501
搜索仓库 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_search_repositories(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: 搜索关键字 (required)
:param str access_token: 用户授权码
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:param str owner: 筛选指定空间地址(企业、组织或个人的地址 path) 的仓库
:param bool fork: 是否搜索含 fork 的仓库,默认:否
:param str language: 筛选指定语言的仓库
:param str sort: 排序字段,last_push_at(更新时间)、stars_count(收藏数)、forks_count(Fork 数)、watches_count(关注数),默认为最佳匹配
:param str order: 排序顺序: desc(default)、asc
:return: list[Project]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_v5_search_repositories_with_http_info(q, **kwargs) # noqa: E501
else:
(data) = self.get_v5_search_repositories_with_http_info(q, **kwargs) # noqa: E501
return data
def get_v5_search_repositories_with_http_info(self, q, **kwargs): # noqa: E501
"""搜索仓库 # noqa: E501
搜索仓库 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_search_repositories_with_http_info(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: 搜索关键字 (required)
:param str access_token: 用户授权码
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:param str owner: 筛选指定空间地址(企业、组织或个人的地址 path) 的仓库
:param bool fork: 是否搜索含 fork 的仓库,默认:否
:param str language: 筛选指定语言的仓库
:param str sort: 排序字段,last_push_at(更新时间)、stars_count(收藏数)、forks_count(Fork 数)、watches_count(关注数),默认为最佳匹配
:param str order: 排序顺序: desc(default)、asc
:return: list[Project]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['q', 'access_token', 'page', 'per_page', 'owner', 'fork', 'language', 'sort',
'order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_v5_search_repositories" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'q' is set
if ('q' not in params or
params['q'] is None):
raise ValueError(
"Missing the required parameter `q` when calling `get_v5_search_repositories`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
if 'q' in params:
query_params.append(('q', params['q'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
if 'owner' in params:
query_params.append(('owner', params['owner'])) # noqa: E501
if 'fork' in params:
query_params.append(('fork', params['fork'])) # noqa: E501
if 'language' in params:
query_params.append(('language', params['language'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'order' in params:
query_params.append(('order', params['order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/search/repositories', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Project]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_v5_search_users(self, q, **kwargs): # noqa: E501
"""搜索用户 # noqa: E501
搜索用户 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_search_users(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: 搜索关键字 (required)
:param str access_token: 用户授权码
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:param str sort: 排序字段,joined_at(注册时间),默认为最佳匹配
:param str order: 排序顺序: desc(default)、asc
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_v5_search_users_with_http_info(q, **kwargs) # noqa: E501
else:
(data) = self.get_v5_search_users_with_http_info(q, **kwargs) # noqa: E501
return data
def get_v5_search_users_with_http_info(self, q, **kwargs): # noqa: E501
"""搜索用户 # noqa: E501
搜索用户 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_v5_search_users_with_http_info(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: 搜索关键字 (required)
:param str access_token: 用户授权码
:param int page: 当前的页码
:param int per_page: 每页的数量,最大为 100
:param str sort: 排序字段,joined_at(注册时间),默认为最佳匹配
:param str order: 排序顺序: desc(default)、asc
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['q', 'access_token', 'page', 'per_page', 'sort', 'order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_v5_search_users" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'q' is set
if ('q' not in params or
params['q'] is None):
raise ValueError("Missing the required parameter `q` when calling `get_v5_search_users`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'access_token' in params:
query_params.append(('access_token', params['access_token'])) # noqa: E501
if 'q' in params:
query_params.append(('q', params['q'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'order' in params:
query_params.append(('order', params['order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v5/search/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.041565
| 116
| 0.593882
| 1,987
| 16,377
| 4.682436
| 0.10921
| 0.05675
| 0.060297
| 0.055138
| 0.925516
| 0.90273
| 0.90273
| 0.900258
| 0.896389
| 0.896389
| 0
| 0.021705
| 0.299505
| 16,377
| 408
| 117
| 40.139706
| 0.789313
| 0.347194
| 0
| 0.740741
| 0
| 0
| 0.178901
| 0.032923
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032407
| false
| 0
| 0.018519
| 0
| 0.097222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
102bb6b84b789506d2db5aff5d39e87ef3d9997d
| 656
|
py
|
Python
|
python/labs/pluralize-it/pluralizer.py
|
TheOtakuNinja/cssi-labs
|
2875a1c7cc86a182e367733c1bf9bb79b8e92a94
|
[
"Apache-2.0"
] | null | null | null |
python/labs/pluralize-it/pluralizer.py
|
TheOtakuNinja/cssi-labs
|
2875a1c7cc86a182e367733c1bf9bb79b8e92a94
|
[
"Apache-2.0"
] | null | null | null |
python/labs/pluralize-it/pluralizer.py
|
TheOtakuNinja/cssi-labs
|
2875a1c7cc86a182e367733c1bf9bb79b8e92a94
|
[
"Apache-2.0"
] | null | null | null |
num=int(raw_input("Please enter a number: "))
word=raw_input("Please enter a animal: ")
if num == 0 and word[-x] == "ife" :
print("there is %d %s alive" % (num, word))
elif num >= 2 and word[-x] == "ife":
print(" there are %d %ss alive" % (num, word))
elif num <= -1 and word[-x] == "ife":
print("there are %d %ss dead" % (num, word))
elif num == 0 and word[-x] == "ife":
print("there is %d %s alive" % (num, word))
elif num >= 2 and word[-x] == :
print(" there are %d %ss alive" % (num, word))
elif num <= -1 and word[-x] == :
print("there are %d %ss dead" % (num, word))
else:
print("there are %d %s remaining" % (num, word))
| 38.588235
| 52
| 0.559451
| 112
| 656
| 3.258929
| 0.258929
| 0.191781
| 0.131507
| 0.191781
| 0.827397
| 0.717808
| 0.717808
| 0.717808
| 0.613699
| 0.553425
| 0
| 0.011765
| 0.222561
| 656
| 16
| 53
| 41
| 0.703922
| 0
| 0
| 0.375
| 0
| 0
| 0.321646
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.4375
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
10911bf2521e1a29c19bca62c91e6765eeb398c4
| 2,434
|
py
|
Python
|
test/test_sinkhorn_2node.py
|
zqsh/cbp
|
4456a93a83f717395bd2445e1ba37f1dd17c317c
|
[
"MIT"
] | 3
|
2020-09-10T16:06:45.000Z
|
2021-08-04T20:08:43.000Z
|
test/test_sinkhorn_2node.py
|
zqsh/cbp
|
4456a93a83f717395bd2445e1ba37f1dd17c317c
|
[
"MIT"
] | null | null | null |
test/test_sinkhorn_2node.py
|
zqsh/cbp
|
4456a93a83f717395bd2445e1ba37f1dd17c317c
|
[
"MIT"
] | 2
|
2022-02-07T22:28:44.000Z
|
2022-02-09T12:10:13.000Z
|
import unittest
import numpy as np
from .utils import two_node_tree
class TestGraph(unittest.TestCase):
def setUp(self):
self.graph = two_node_tree()
def test_sinkhorn_single_0(self):
self.graph.varnode_recorder['VarNode_000'].constrained_marginal \
= np.array([0.5, 0.5])
self.graph.varnode_recorder['VarNode_000'].isconstrained = True
self.graph.constrained_names.append('VarNode_000')
self.graph.sinkhorn()
node_equal = np.isclose(
self.graph.varnode_recorder['VarNode_000'].sinkhorn,
np.array([0.5, 0.5])
)
self.assertTrue(all(node_equal))
node_equal = np.isclose(
self.graph.varnode_recorder['VarNode_001'].sinkhorn,
np.array([0.375, 0.625])
)
self.assertTrue(all(node_equal))
def test_sinkhorn_single_1(self):
self.graph.varnode_recorder['VarNode_001'].constrained_marginal \
= np.array([0.5, 0.5])
self.graph.varnode_recorder['VarNode_001'].isconstrained = True
self.graph.constrained_names.append('VarNode_001')
self.graph.sinkhorn()
node_equal = np.isclose(
self.graph.varnode_recorder['VarNode_001'].sinkhorn,
np.array([0.5, 0.5])
)
self.assertTrue(all(node_equal))
node_equal = np.isclose(
self.graph.varnode_recorder['VarNode_000'].sinkhorn,
np.array([23.0 / 76, 53.0 / 76])
)
self.assertTrue(all(node_equal))
def test_sinkhorn_2(self):
self.graph.varnode_recorder['VarNode_000'].constrained_marginal \
= np.array([0.5, 0.5])
self.graph.varnode_recorder['VarNode_000'].isconstrained = True
self.graph.varnode_recorder['VarNode_001'].constrained_marginal \
= np.array([0.5, 0.5])
self.graph.varnode_recorder['VarNode_001'].isconstrained = True
self.graph.constrained_names.append('VarNode_000')
self.graph.constrained_names.append('VarNode_001')
self.graph.sinkhorn()
node_equal = np.isclose(
self.graph.varnode_recorder['VarNode_000'].sinkhorn,
np.array([0.5, 0.5])
)
self.assertTrue(all(node_equal))
node_equal = np.isclose(
self.graph.varnode_recorder['VarNode_001'].sinkhorn,
np.array([0.5, 0.5])
)
self.assertTrue(all(node_equal))
| 36.878788
| 73
| 0.624486
| 300
| 2,434
| 4.853333
| 0.146667
| 0.135989
| 0.153846
| 0.230769
| 0.887363
| 0.887363
| 0.884615
| 0.884615
| 0.828297
| 0.828297
| 0
| 0.058374
| 0.246919
| 2,434
| 65
| 74
| 37.446154
| 0.735952
| 0
| 0
| 0.706897
| 0
| 0
| 0.081348
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 1
| 0.068966
| false
| 0
| 0.051724
| 0
| 0.137931
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
10a4b80a089454eb409814e65adb326091eead27
| 156
|
py
|
Python
|
tournament/__init__.py
|
zedoax/Hubris
|
0f57fb72afbfbed22699628af63fa324c29ed3d2
|
[
"MIT"
] | 1
|
2018-02-24T22:45:03.000Z
|
2018-02-24T22:45:03.000Z
|
tournament/__init__.py
|
zedoax/Hubris
|
0f57fb72afbfbed22699628af63fa324c29ed3d2
|
[
"MIT"
] | 5
|
2018-02-24T22:49:46.000Z
|
2018-03-02T00:27:14.000Z
|
tournament/__init__.py
|
zedoax/Hubris
|
0f57fb72afbfbed22699628af63fa324c29ed3d2
|
[
"MIT"
] | 2
|
2018-02-24T22:47:17.000Z
|
2019-01-21T07:32:02.000Z
|
from flask import Blueprint
tournament_blueprint = Blueprint('tournament_blueprint', __name__, template_folder='templates')
from tournament import routes
| 26
| 95
| 0.839744
| 17
| 156
| 7.294118
| 0.588235
| 0.306452
| 0.451613
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096154
| 156
| 5
| 96
| 31.2
| 0.879433
| 0
| 0
| 0
| 0
| 0
| 0.185897
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
52b0c835397583a9b02576d7785da37112863110
| 14,807
|
py
|
Python
|
largefile.py
|
deltaruby-ps/Wars-of-Palatia-Bot
|
3ecf06d46476c0be03873ba12c07becb5806ce46
|
[
"MIT"
] | null | null | null |
largefile.py
|
deltaruby-ps/Wars-of-Palatia-Bot
|
3ecf06d46476c0be03873ba12c07becb5806ce46
|
[
"MIT"
] | null | null | null |
largefile.py
|
deltaruby-ps/Wars-of-Palatia-Bot
|
3ecf06d46476c0be03873ba12c07becb5806ce46
|
[
"MIT"
] | null | null | null |
def cmap(squadDict):
return str('<div class="infobox"><html><body><div style="width:auto;height:auto;overflow:auto"><table align="center" border="2"><tr><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b> </b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">1</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">2</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">3</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">4</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">5</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">6</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">7</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">8</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">9</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">10</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">11</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">12</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">13</b></td></tr><tr><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">A</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['1']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['2']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['3']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['4']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['5']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['6']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['7']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['8']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['9']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['10']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['11']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['12']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['A']['13']+'</b></td></tr><tr><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">B</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['1']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['2']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['3']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['4']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['5']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['6']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['7']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['8']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['9']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['10']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['11']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['12']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['B']['13']+'</b></td></tr><tr><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">C</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['1']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['2']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['3']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['4']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['5']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['6']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['7']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['8']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['9']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['10']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['11']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['12']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['C']['13']+'</b></td></tr><tr><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">D</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['1']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['2']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['3']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['4']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['5']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['6']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['7']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['8']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['9']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['10']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['11']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['12']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['D']['13']+'</b></td></tr><tr><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">E</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['1']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['2']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['3']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['4']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['5']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['6']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['7']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['8']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['9']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['10']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['11']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['12']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['E']['13']+'</b></td></tr><tr><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">F</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['1']+' </b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['2']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['3']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['4']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['5']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['6']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['7']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['8']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['9']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['10']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['11']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['12']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['F']['13']+'</b></td></tr><tr><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">G</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['1']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['2']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['3']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['4']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['5']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['6']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['7']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['8']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['9']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['10']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['11']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['12']+'</b></td><td style=background-color:#FFFFFF; width="20px" height="20px"; align="center"><b style="color:black">'+squadDict['G']['13']+'</b></td></tr></table></div></div>')
| 4,935.666667
| 14,785
| 0.662862
| 2,236
| 14,807
| 4.389535
| 0.020572
| 0.126643
| 0.193989
| 0.251044
| 0.986347
| 0.986347
| 0.986347
| 0.986347
| 0.986347
| 0.986347
| 0
| 0.041099
| 0.038698
| 14,807
| 2
| 14,786
| 7,403.5
| 0.648447
| 0
| 0
| 0
| 0
| 45.5
| 0.868238
| 0.296346
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 16
|
5e2897aa5ebe7d527e37bb55807a85755f6d13cf
| 36,455
|
py
|
Python
|
sdk/python/pulumi_rancher2/multi_cluster_app.py
|
pulumi/pulumi-rancher2
|
7a98af8cf598b711084a7f46c0fe71b43ed7a8ac
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-03-23T15:59:11.000Z
|
2021-01-29T00:37:32.000Z
|
sdk/python/pulumi_rancher2/multi_cluster_app.py
|
pulumi/pulumi-rancher2
|
7a98af8cf598b711084a7f46c0fe71b43ed7a8ac
|
[
"ECL-2.0",
"Apache-2.0"
] | 76
|
2020-01-16T20:00:25.000Z
|
2022-03-31T20:30:08.000Z
|
sdk/python/pulumi_rancher2/multi_cluster_app.py
|
pulumi/pulumi-rancher2
|
7a98af8cf598b711084a7f46c0fe71b43ed7a8ac
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-03-27T17:39:59.000Z
|
2020-11-24T23:09:24.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['MultiClusterAppArgs', 'MultiClusterApp']
@pulumi.input_type
class MultiClusterAppArgs:
def __init__(__self__, *,
catalog_name: pulumi.Input[str],
roles: pulumi.Input[Sequence[pulumi.Input[str]]],
targets: pulumi.Input[Sequence[pulumi.Input['MultiClusterAppTargetArgs']]],
template_name: pulumi.Input[str],
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
answers: Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppAnswerArgs']]]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppMemberArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
revision_history_limit: Optional[pulumi.Input[int]] = None,
revision_id: Optional[pulumi.Input[str]] = None,
template_version: Optional[pulumi.Input[str]] = None,
upgrade_strategy: Optional[pulumi.Input['MultiClusterAppUpgradeStrategyArgs']] = None,
wait: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a MultiClusterApp resource.
:param pulumi.Input[str] catalog_name: The multi cluster app catalog name (string)
:param pulumi.Input[Sequence[pulumi.Input[str]]] roles: The multi cluster app roles (list)
:param pulumi.Input[Sequence[pulumi.Input['MultiClusterAppTargetArgs']]] targets: The multi cluster app target projects (list)
:param pulumi.Input[str] template_name: The multi cluster app template name (string)
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations for multi cluster app object (map)
:param pulumi.Input[Sequence[pulumi.Input['MultiClusterAppAnswerArgs']]] answers: The multi cluster app answers (list)
:param pulumi.Input[Mapping[str, Any]] labels: Labels for multi cluster app object (map)
:param pulumi.Input[Sequence[pulumi.Input['MultiClusterAppMemberArgs']]] members: The multi cluster app answers (list)
:param pulumi.Input[str] name: The multi cluster app name (string)
:param pulumi.Input[int] revision_history_limit: The multi cluster app revision history limit. Default `10` (int)
:param pulumi.Input[str] revision_id: Current revision id for the multi cluster app (string)
:param pulumi.Input[str] template_version: The multi cluster app template version. Default: `latest` (string)
:param pulumi.Input['MultiClusterAppUpgradeStrategyArgs'] upgrade_strategy: The multi cluster app upgrade strategy (list MaxItems:1)
:param pulumi.Input[bool] wait: Wait until the multi cluster app is active. Default `true` (bool)
"""
pulumi.set(__self__, "catalog_name", catalog_name)
pulumi.set(__self__, "roles", roles)
pulumi.set(__self__, "targets", targets)
pulumi.set(__self__, "template_name", template_name)
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if answers is not None:
pulumi.set(__self__, "answers", answers)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if members is not None:
pulumi.set(__self__, "members", members)
if name is not None:
pulumi.set(__self__, "name", name)
if revision_history_limit is not None:
pulumi.set(__self__, "revision_history_limit", revision_history_limit)
if revision_id is not None:
pulumi.set(__self__, "revision_id", revision_id)
if template_version is not None:
pulumi.set(__self__, "template_version", template_version)
if upgrade_strategy is not None:
pulumi.set(__self__, "upgrade_strategy", upgrade_strategy)
if wait is not None:
pulumi.set(__self__, "wait", wait)
@property
@pulumi.getter(name="catalogName")
def catalog_name(self) -> pulumi.Input[str]:
"""
The multi cluster app catalog name (string)
"""
return pulumi.get(self, "catalog_name")
@catalog_name.setter
def catalog_name(self, value: pulumi.Input[str]):
pulumi.set(self, "catalog_name", value)
@property
@pulumi.getter
def roles(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The multi cluster app roles (list)
"""
return pulumi.get(self, "roles")
@roles.setter
def roles(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "roles", value)
@property
@pulumi.getter
def targets(self) -> pulumi.Input[Sequence[pulumi.Input['MultiClusterAppTargetArgs']]]:
"""
The multi cluster app target projects (list)
"""
return pulumi.get(self, "targets")
@targets.setter
def targets(self, value: pulumi.Input[Sequence[pulumi.Input['MultiClusterAppTargetArgs']]]):
pulumi.set(self, "targets", value)
@property
@pulumi.getter(name="templateName")
def template_name(self) -> pulumi.Input[str]:
"""
The multi cluster app template name (string)
"""
return pulumi.get(self, "template_name")
@template_name.setter
def template_name(self, value: pulumi.Input[str]):
pulumi.set(self, "template_name", value)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Annotations for multi cluster app object (map)
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter
def answers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppAnswerArgs']]]]:
"""
The multi cluster app answers (list)
"""
return pulumi.get(self, "answers")
@answers.setter
def answers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppAnswerArgs']]]]):
pulumi.set(self, "answers", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Labels for multi cluster app object (map)
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def members(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppMemberArgs']]]]:
"""
The multi cluster app answers (list)
"""
return pulumi.get(self, "members")
@members.setter
def members(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppMemberArgs']]]]):
pulumi.set(self, "members", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The multi cluster app name (string)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="revisionHistoryLimit")
def revision_history_limit(self) -> Optional[pulumi.Input[int]]:
"""
The multi cluster app revision history limit. Default `10` (int)
"""
return pulumi.get(self, "revision_history_limit")
@revision_history_limit.setter
def revision_history_limit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "revision_history_limit", value)
@property
@pulumi.getter(name="revisionId")
def revision_id(self) -> Optional[pulumi.Input[str]]:
"""
Current revision id for the multi cluster app (string)
"""
return pulumi.get(self, "revision_id")
@revision_id.setter
def revision_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "revision_id", value)
@property
@pulumi.getter(name="templateVersion")
def template_version(self) -> Optional[pulumi.Input[str]]:
"""
The multi cluster app template version. Default: `latest` (string)
"""
return pulumi.get(self, "template_version")
@template_version.setter
def template_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_version", value)
@property
@pulumi.getter(name="upgradeStrategy")
def upgrade_strategy(self) -> Optional[pulumi.Input['MultiClusterAppUpgradeStrategyArgs']]:
"""
The multi cluster app upgrade strategy (list MaxItems:1)
"""
return pulumi.get(self, "upgrade_strategy")
@upgrade_strategy.setter
def upgrade_strategy(self, value: Optional[pulumi.Input['MultiClusterAppUpgradeStrategyArgs']]):
pulumi.set(self, "upgrade_strategy", value)
@property
@pulumi.getter
def wait(self) -> Optional[pulumi.Input[bool]]:
"""
Wait until the multi cluster app is active. Default `true` (bool)
"""
return pulumi.get(self, "wait")
@wait.setter
def wait(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "wait", value)
@pulumi.input_type
class _MultiClusterAppState:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
answers: Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppAnswerArgs']]]] = None,
catalog_name: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppMemberArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
revision_history_limit: Optional[pulumi.Input[int]] = None,
revision_id: Optional[pulumi.Input[str]] = None,
roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
targets: Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppTargetArgs']]]] = None,
template_name: Optional[pulumi.Input[str]] = None,
template_version: Optional[pulumi.Input[str]] = None,
template_version_id: Optional[pulumi.Input[str]] = None,
upgrade_strategy: Optional[pulumi.Input['MultiClusterAppUpgradeStrategyArgs']] = None,
wait: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering MultiClusterApp resources.
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations for multi cluster app object (map)
:param pulumi.Input[Sequence[pulumi.Input['MultiClusterAppAnswerArgs']]] answers: The multi cluster app answers (list)
:param pulumi.Input[str] catalog_name: The multi cluster app catalog name (string)
:param pulumi.Input[Mapping[str, Any]] labels: Labels for multi cluster app object (map)
:param pulumi.Input[Sequence[pulumi.Input['MultiClusterAppMemberArgs']]] members: The multi cluster app answers (list)
:param pulumi.Input[str] name: The multi cluster app name (string)
:param pulumi.Input[int] revision_history_limit: The multi cluster app revision history limit. Default `10` (int)
:param pulumi.Input[str] revision_id: Current revision id for the multi cluster app (string)
:param pulumi.Input[Sequence[pulumi.Input[str]]] roles: The multi cluster app roles (list)
:param pulumi.Input[Sequence[pulumi.Input['MultiClusterAppTargetArgs']]] targets: The multi cluster app target projects (list)
:param pulumi.Input[str] template_name: The multi cluster app template name (string)
:param pulumi.Input[str] template_version: The multi cluster app template version. Default: `latest` (string)
:param pulumi.Input[str] template_version_id: (Computed) The multi cluster app template version ID (string)
:param pulumi.Input['MultiClusterAppUpgradeStrategyArgs'] upgrade_strategy: The multi cluster app upgrade strategy (list MaxItems:1)
:param pulumi.Input[bool] wait: Wait until the multi cluster app is active. Default `true` (bool)
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if answers is not None:
pulumi.set(__self__, "answers", answers)
if catalog_name is not None:
pulumi.set(__self__, "catalog_name", catalog_name)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if members is not None:
pulumi.set(__self__, "members", members)
if name is not None:
pulumi.set(__self__, "name", name)
if revision_history_limit is not None:
pulumi.set(__self__, "revision_history_limit", revision_history_limit)
if revision_id is not None:
pulumi.set(__self__, "revision_id", revision_id)
if roles is not None:
pulumi.set(__self__, "roles", roles)
if targets is not None:
pulumi.set(__self__, "targets", targets)
if template_name is not None:
pulumi.set(__self__, "template_name", template_name)
if template_version is not None:
pulumi.set(__self__, "template_version", template_version)
if template_version_id is not None:
pulumi.set(__self__, "template_version_id", template_version_id)
if upgrade_strategy is not None:
pulumi.set(__self__, "upgrade_strategy", upgrade_strategy)
if wait is not None:
pulumi.set(__self__, "wait", wait)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Annotations for multi cluster app object (map)
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter
def answers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppAnswerArgs']]]]:
"""
The multi cluster app answers (list)
"""
return pulumi.get(self, "answers")
@answers.setter
def answers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppAnswerArgs']]]]):
pulumi.set(self, "answers", value)
@property
@pulumi.getter(name="catalogName")
def catalog_name(self) -> Optional[pulumi.Input[str]]:
"""
The multi cluster app catalog name (string)
"""
return pulumi.get(self, "catalog_name")
@catalog_name.setter
def catalog_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "catalog_name", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Labels for multi cluster app object (map)
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def members(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppMemberArgs']]]]:
"""
The multi cluster app answers (list)
"""
return pulumi.get(self, "members")
@members.setter
def members(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppMemberArgs']]]]):
pulumi.set(self, "members", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The multi cluster app name (string)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="revisionHistoryLimit")
def revision_history_limit(self) -> Optional[pulumi.Input[int]]:
"""
The multi cluster app revision history limit. Default `10` (int)
"""
return pulumi.get(self, "revision_history_limit")
@revision_history_limit.setter
def revision_history_limit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "revision_history_limit", value)
@property
@pulumi.getter(name="revisionId")
def revision_id(self) -> Optional[pulumi.Input[str]]:
"""
Current revision id for the multi cluster app (string)
"""
return pulumi.get(self, "revision_id")
@revision_id.setter
def revision_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "revision_id", value)
@property
@pulumi.getter
def roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The multi cluster app roles (list)
"""
return pulumi.get(self, "roles")
@roles.setter
def roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "roles", value)
@property
@pulumi.getter
def targets(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppTargetArgs']]]]:
"""
The multi cluster app target projects (list)
"""
return pulumi.get(self, "targets")
@targets.setter
def targets(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MultiClusterAppTargetArgs']]]]):
pulumi.set(self, "targets", value)
@property
@pulumi.getter(name="templateName")
def template_name(self) -> Optional[pulumi.Input[str]]:
"""
The multi cluster app template name (string)
"""
return pulumi.get(self, "template_name")
@template_name.setter
def template_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_name", value)
@property
@pulumi.getter(name="templateVersion")
def template_version(self) -> Optional[pulumi.Input[str]]:
"""
The multi cluster app template version. Default: `latest` (string)
"""
return pulumi.get(self, "template_version")
@template_version.setter
def template_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_version", value)
@property
@pulumi.getter(name="templateVersionId")
def template_version_id(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) The multi cluster app template version ID (string)
"""
return pulumi.get(self, "template_version_id")
@template_version_id.setter
def template_version_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_version_id", value)
@property
@pulumi.getter(name="upgradeStrategy")
def upgrade_strategy(self) -> Optional[pulumi.Input['MultiClusterAppUpgradeStrategyArgs']]:
"""
The multi cluster app upgrade strategy (list MaxItems:1)
"""
return pulumi.get(self, "upgrade_strategy")
@upgrade_strategy.setter
def upgrade_strategy(self, value: Optional[pulumi.Input['MultiClusterAppUpgradeStrategyArgs']]):
pulumi.set(self, "upgrade_strategy", value)
@property
@pulumi.getter
def wait(self) -> Optional[pulumi.Input[bool]]:
"""
Wait until the multi cluster app is active. Default `true` (bool)
"""
return pulumi.get(self, "wait")
@wait.setter
def wait(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "wait", value)
class MultiClusterApp(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
answers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppAnswerArgs']]]]] = None,
catalog_name: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppMemberArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
revision_history_limit: Optional[pulumi.Input[int]] = None,
revision_id: Optional[pulumi.Input[str]] = None,
roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
targets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppTargetArgs']]]]] = None,
template_name: Optional[pulumi.Input[str]] = None,
template_version: Optional[pulumi.Input[str]] = None,
upgrade_strategy: Optional[pulumi.Input[pulumi.InputType['MultiClusterAppUpgradeStrategyArgs']]] = None,
wait: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
## Import
Multi cluster app can be imported using the multi cluster app ID in the format `<multi_cluster_app_name>`
```sh
$ pulumi import rancher2:index/multiClusterApp:MultiClusterApp foo <MULTI_CLUSTER_APP_ID>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations for multi cluster app object (map)
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppAnswerArgs']]]] answers: The multi cluster app answers (list)
:param pulumi.Input[str] catalog_name: The multi cluster app catalog name (string)
:param pulumi.Input[Mapping[str, Any]] labels: Labels for multi cluster app object (map)
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppMemberArgs']]]] members: The multi cluster app answers (list)
:param pulumi.Input[str] name: The multi cluster app name (string)
:param pulumi.Input[int] revision_history_limit: The multi cluster app revision history limit. Default `10` (int)
:param pulumi.Input[str] revision_id: Current revision id for the multi cluster app (string)
:param pulumi.Input[Sequence[pulumi.Input[str]]] roles: The multi cluster app roles (list)
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppTargetArgs']]]] targets: The multi cluster app target projects (list)
:param pulumi.Input[str] template_name: The multi cluster app template name (string)
:param pulumi.Input[str] template_version: The multi cluster app template version. Default: `latest` (string)
:param pulumi.Input[pulumi.InputType['MultiClusterAppUpgradeStrategyArgs']] upgrade_strategy: The multi cluster app upgrade strategy (list MaxItems:1)
:param pulumi.Input[bool] wait: Wait until the multi cluster app is active. Default `true` (bool)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: MultiClusterAppArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
Multi cluster app can be imported using the multi cluster app ID in the format `<multi_cluster_app_name>`
```sh
$ pulumi import rancher2:index/multiClusterApp:MultiClusterApp foo <MULTI_CLUSTER_APP_ID>
```
:param str resource_name: The name of the resource.
:param MultiClusterAppArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(MultiClusterAppArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
answers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppAnswerArgs']]]]] = None,
catalog_name: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppMemberArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
revision_history_limit: Optional[pulumi.Input[int]] = None,
revision_id: Optional[pulumi.Input[str]] = None,
roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
targets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppTargetArgs']]]]] = None,
template_name: Optional[pulumi.Input[str]] = None,
template_version: Optional[pulumi.Input[str]] = None,
upgrade_strategy: Optional[pulumi.Input[pulumi.InputType['MultiClusterAppUpgradeStrategyArgs']]] = None,
wait: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = MultiClusterAppArgs.__new__(MultiClusterAppArgs)
__props__.__dict__["annotations"] = annotations
__props__.__dict__["answers"] = answers
if catalog_name is None and not opts.urn:
raise TypeError("Missing required property 'catalog_name'")
__props__.__dict__["catalog_name"] = catalog_name
__props__.__dict__["labels"] = labels
__props__.__dict__["members"] = members
__props__.__dict__["name"] = name
__props__.__dict__["revision_history_limit"] = revision_history_limit
__props__.__dict__["revision_id"] = revision_id
if roles is None and not opts.urn:
raise TypeError("Missing required property 'roles'")
__props__.__dict__["roles"] = roles
if targets is None and not opts.urn:
raise TypeError("Missing required property 'targets'")
__props__.__dict__["targets"] = targets
if template_name is None and not opts.urn:
raise TypeError("Missing required property 'template_name'")
__props__.__dict__["template_name"] = template_name
__props__.__dict__["template_version"] = template_version
__props__.__dict__["upgrade_strategy"] = upgrade_strategy
__props__.__dict__["wait"] = wait
__props__.__dict__["template_version_id"] = None
super(MultiClusterApp, __self__).__init__(
'rancher2:index/multiClusterApp:MultiClusterApp',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
answers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppAnswerArgs']]]]] = None,
catalog_name: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppMemberArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
revision_history_limit: Optional[pulumi.Input[int]] = None,
revision_id: Optional[pulumi.Input[str]] = None,
roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
targets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppTargetArgs']]]]] = None,
template_name: Optional[pulumi.Input[str]] = None,
template_version: Optional[pulumi.Input[str]] = None,
template_version_id: Optional[pulumi.Input[str]] = None,
upgrade_strategy: Optional[pulumi.Input[pulumi.InputType['MultiClusterAppUpgradeStrategyArgs']]] = None,
wait: Optional[pulumi.Input[bool]] = None) -> 'MultiClusterApp':
"""
Get an existing MultiClusterApp resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations for multi cluster app object (map)
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppAnswerArgs']]]] answers: The multi cluster app answers (list)
:param pulumi.Input[str] catalog_name: The multi cluster app catalog name (string)
:param pulumi.Input[Mapping[str, Any]] labels: Labels for multi cluster app object (map)
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppMemberArgs']]]] members: The multi cluster app answers (list)
:param pulumi.Input[str] name: The multi cluster app name (string)
:param pulumi.Input[int] revision_history_limit: The multi cluster app revision history limit. Default `10` (int)
:param pulumi.Input[str] revision_id: Current revision id for the multi cluster app (string)
:param pulumi.Input[Sequence[pulumi.Input[str]]] roles: The multi cluster app roles (list)
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MultiClusterAppTargetArgs']]]] targets: The multi cluster app target projects (list)
:param pulumi.Input[str] template_name: The multi cluster app template name (string)
:param pulumi.Input[str] template_version: The multi cluster app template version. Default: `latest` (string)
:param pulumi.Input[str] template_version_id: (Computed) The multi cluster app template version ID (string)
:param pulumi.Input[pulumi.InputType['MultiClusterAppUpgradeStrategyArgs']] upgrade_strategy: The multi cluster app upgrade strategy (list MaxItems:1)
:param pulumi.Input[bool] wait: Wait until the multi cluster app is active. Default `true` (bool)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _MultiClusterAppState.__new__(_MultiClusterAppState)
__props__.__dict__["annotations"] = annotations
__props__.__dict__["answers"] = answers
__props__.__dict__["catalog_name"] = catalog_name
__props__.__dict__["labels"] = labels
__props__.__dict__["members"] = members
__props__.__dict__["name"] = name
__props__.__dict__["revision_history_limit"] = revision_history_limit
__props__.__dict__["revision_id"] = revision_id
__props__.__dict__["roles"] = roles
__props__.__dict__["targets"] = targets
__props__.__dict__["template_name"] = template_name
__props__.__dict__["template_version"] = template_version
__props__.__dict__["template_version_id"] = template_version_id
__props__.__dict__["upgrade_strategy"] = upgrade_strategy
__props__.__dict__["wait"] = wait
return MultiClusterApp(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def annotations(self) -> pulumi.Output[Mapping[str, Any]]:
"""
Annotations for multi cluster app object (map)
"""
return pulumi.get(self, "annotations")
@property
@pulumi.getter
def answers(self) -> pulumi.Output[Sequence['outputs.MultiClusterAppAnswer']]:
"""
The multi cluster app answers (list)
"""
return pulumi.get(self, "answers")
@property
@pulumi.getter(name="catalogName")
def catalog_name(self) -> pulumi.Output[str]:
"""
The multi cluster app catalog name (string)
"""
return pulumi.get(self, "catalog_name")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Mapping[str, Any]]:
"""
Labels for multi cluster app object (map)
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def members(self) -> pulumi.Output[Optional[Sequence['outputs.MultiClusterAppMember']]]:
"""
The multi cluster app answers (list)
"""
return pulumi.get(self, "members")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The multi cluster app name (string)
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="revisionHistoryLimit")
def revision_history_limit(self) -> pulumi.Output[Optional[int]]:
"""
The multi cluster app revision history limit. Default `10` (int)
"""
return pulumi.get(self, "revision_history_limit")
@property
@pulumi.getter(name="revisionId")
def revision_id(self) -> pulumi.Output[str]:
"""
Current revision id for the multi cluster app (string)
"""
return pulumi.get(self, "revision_id")
@property
@pulumi.getter
def roles(self) -> pulumi.Output[Sequence[str]]:
"""
The multi cluster app roles (list)
"""
return pulumi.get(self, "roles")
@property
@pulumi.getter
def targets(self) -> pulumi.Output[Sequence['outputs.MultiClusterAppTarget']]:
"""
The multi cluster app target projects (list)
"""
return pulumi.get(self, "targets")
@property
@pulumi.getter(name="templateName")
def template_name(self) -> pulumi.Output[str]:
"""
The multi cluster app template name (string)
"""
return pulumi.get(self, "template_name")
@property
@pulumi.getter(name="templateVersion")
def template_version(self) -> pulumi.Output[str]:
"""
The multi cluster app template version. Default: `latest` (string)
"""
return pulumi.get(self, "template_version")
@property
@pulumi.getter(name="templateVersionId")
def template_version_id(self) -> pulumi.Output[str]:
"""
(Computed) The multi cluster app template version ID (string)
"""
return pulumi.get(self, "template_version_id")
@property
@pulumi.getter(name="upgradeStrategy")
def upgrade_strategy(self) -> pulumi.Output['outputs.MultiClusterAppUpgradeStrategy']:
"""
The multi cluster app upgrade strategy (list MaxItems:1)
"""
return pulumi.get(self, "upgrade_strategy")
@property
@pulumi.getter
def wait(self) -> pulumi.Output[Optional[bool]]:
"""
Wait until the multi cluster app is active. Default `true` (bool)
"""
return pulumi.get(self, "wait")
| 45.285714
| 158
| 0.652695
| 4,041
| 36,455
| 5.698342
| 0.045781
| 0.116559
| 0.097364
| 0.070352
| 0.911061
| 0.895297
| 0.87506
| 0.860208
| 0.847787
| 0.837278
| 0
| 0.000894
| 0.233082
| 36,455
| 804
| 159
| 45.34204
| 0.822734
| 0.265121
| 0
| 0.798354
| 1
| 0
| 0.126621
| 0.053873
| 0
| 0
| 0
| 0
| 0
| 1
| 0.164609
| false
| 0.002058
| 0.014403
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
eab24fdd95ab786efd228688e40dee61b71e805b
| 4,966
|
py
|
Python
|
app/core/migrations/0005_auto_20200103_2337.py
|
Raysultan/roscosmos-stats
|
8931ee824c4e4cd67ae4f86ce221515b00d9e872
|
[
"MIT"
] | 5
|
2020-11-24T09:57:36.000Z
|
2021-11-17T08:02:29.000Z
|
app/core/migrations/0005_auto_20200103_2337.py
|
raisultan/roscosmos-api
|
8931ee824c4e4cd67ae4f86ce221515b00d9e872
|
[
"MIT"
] | null | null | null |
app/core/migrations/0005_auto_20200103_2337.py
|
raisultan/roscosmos-api
|
8931ee824c4e4cd67ae4f86ce221515b00d9e872
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.2 on 2020-01-03 23:37
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0004_auto_20200102_1801'),
]
operations = [
migrations.CreateModel(
name='ParseUrl',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('url', models.URLField()),
('description', models.TextField(blank=True)),
],
),
migrations.AlterField(
model_name='launchpad',
name='use_period',
field=models.CharField(blank=True, default='', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='launchpad',
name='used_by',
field=models.CharField(blank=True, default='', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='launchvehicle',
name='max_distance',
field=models.CharField(blank=True, default='', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='orbitalgrouping',
name='accuracy',
field=models.CharField(blank=True, default='', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='orbitalgrouping',
name='coverage',
field=models.CharField(blank=True, default='', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='orbitalgrouping',
name='description',
field=models.TextField(blank=True, default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='orbitalgrouping',
name='orbit_height',
field=models.CharField(blank=True, default='', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='orbitalgrouping',
name='orbital_inclination',
field=models.CharField(blank=True, default='', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='orbitalgrouping',
name='orbital_period',
field=models.CharField(blank=True, default='', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='spacecraft',
name='accuracy',
field=models.CharField(blank=True, default='', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='spacecraft',
name='coverage_diameter',
field=models.CharField(blank=True, default='', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='spacecraft',
name='description',
field=models.TextField(blank=True, default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='spacecraft',
name='manufacturer',
field=models.CharField(blank=True, default='', max_length=255),
preserve_default=False,
),
migrations.AlterField(
model_name='spacecraft',
name='orbital_inclination',
field=models.CharField(blank=True, default='', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='spacecraft',
name='orbital_period',
field=models.CharField(blank=True, default='', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='spacetug',
name='autonomous_flight_time',
field=models.CharField(blank=True, default='', max_length=64),
preserve_default=False,
),
migrations.AlterField(
model_name='spacetug',
name='description',
field=models.TextField(blank=True, default=''),
preserve_default=False,
),
migrations.CreateModel(
name='ParserLaunch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('parse_date', models.DateField(auto_now_add=True)),
('no_launches', models.IntegerField()),
('last_saved_launch_no', models.IntegerField()),
('url', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.ParseUrl')),
],
),
]
| 36.514706
| 114
| 0.564841
| 442
| 4,966
| 6.171946
| 0.205882
| 0.059384
| 0.155792
| 0.180718
| 0.782625
| 0.782625
| 0.762463
| 0.762463
| 0.762463
| 0.748167
| 0
| 0.019084
| 0.314136
| 4,966
| 135
| 115
| 36.785185
| 0.781856
| 0.009062
| 0
| 0.79845
| 1
| 0
| 0.110185
| 0.009148
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015504
| 0
| 0.03876
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
eacdd95582f84bc8320cab826fbc4cbd04c8df10
| 382,261
|
py
|
Python
|
tools/bm_runtime/standard/Standard.py
|
clickp4/bmv2
|
aca47e23c968353a9ffd27e9b6305f41cefadf6b
|
[
"Apache-2.0"
] | 2
|
2018-08-29T22:58:07.000Z
|
2018-08-30T01:44:30.000Z
|
tools/bm_runtime/standard/Standard.py
|
clickp4/bmv2
|
aca47e23c968353a9ffd27e9b6305f41cefadf6b
|
[
"Apache-2.0"
] | null | null | null |
tools/bm_runtime/standard/Standard.py
|
clickp4/bmv2
|
aca47e23c968353a9ffd27e9b6305f41cefadf6b
|
[
"Apache-2.0"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.9.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import logging
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def bm_mt_add_entry(self, cxt_id, table_name, match_key, action_name, action_data, options):
"""
Parameters:
- cxt_id
- table_name
- match_key
- action_name
- action_data
- options
"""
pass
def bm_mt_set_default_action(self, cxt_id, table_name, action_name, action_data):
"""
Parameters:
- cxt_id
- table_name
- action_name
- action_data
"""
pass
def bm_mt_delete_entry(self, cxt_id, table_name, entry_handle):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
"""
pass
def bm_mt_modify_entry(self, cxt_id, table_name, entry_handle, action_name, action_data):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- action_name
- action_data
"""
pass
def bm_mt_set_entry_ttl(self, cxt_id, table_name, entry_handle, timeout_ms):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- timeout_ms
"""
pass
def bm_mt_indirect_add_member(self, cxt_id, table_name, action_name, action_data):
"""
Parameters:
- cxt_id
- table_name
- action_name
- action_data
"""
pass
def bm_mt_indirect_delete_member(self, cxt_id, table_name, mbr_handle):
"""
Parameters:
- cxt_id
- table_name
- mbr_handle
"""
pass
def bm_mt_indirect_modify_member(self, cxt_id, table_name, mbr_handle, action_name, action_data):
"""
Parameters:
- cxt_id
- table_name
- mbr_handle
- action_name
- action_data
"""
pass
def bm_mt_indirect_add_entry(self, cxt_id, table_name, match_key, mbr_handle, options):
"""
Parameters:
- cxt_id
- table_name
- match_key
- mbr_handle
- options
"""
pass
def bm_mt_indirect_modify_entry(self, cxt_id, table_name, entry_handle, mbr_handle):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- mbr_handle
"""
pass
def bm_mt_indirect_delete_entry(self, cxt_id, table_name, entry_handle):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
"""
pass
def bm_mt_indirect_set_entry_ttl(self, cxt_id, table_name, entry_handle, timeout_ms):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- timeout_ms
"""
pass
def bm_mt_indirect_set_default_member(self, cxt_id, table_name, mbr_handle):
"""
Parameters:
- cxt_id
- table_name
- mbr_handle
"""
pass
def bm_mt_indirect_ws_create_group(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
pass
def bm_mt_indirect_ws_delete_group(self, cxt_id, table_name, grp_handle):
"""
Parameters:
- cxt_id
- table_name
- grp_handle
"""
pass
def bm_mt_indirect_ws_add_member_to_group(self, cxt_id, table_name, mbr_handle, grp_handle):
"""
Parameters:
- cxt_id
- table_name
- mbr_handle
- grp_handle
"""
pass
def bm_mt_indirect_ws_remove_member_from_group(self, cxt_id, table_name, mbr_handle, grp_handle):
"""
Parameters:
- cxt_id
- table_name
- mbr_handle
- grp_handle
"""
pass
def bm_mt_indirect_ws_add_entry(self, cxt_id, table_name, match_key, grp_handle, options):
"""
Parameters:
- cxt_id
- table_name
- match_key
- grp_handle
- options
"""
pass
def bm_mt_indirect_ws_modify_entry(self, cxt_id, table_name, entry_handle, grp_handle):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- grp_handle
"""
pass
def bm_mt_indirect_ws_set_default_group(self, cxt_id, table_name, grp_handle):
"""
Parameters:
- cxt_id
- table_name
- grp_handle
"""
pass
def bm_mt_read_counter(self, cxt_id, table_name, entry_handle):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
"""
pass
def bm_mt_reset_counters(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
pass
def bm_mt_write_counter(self, cxt_id, table_name, entry_handle, value):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- value
"""
pass
def bm_mt_set_meter_rates(self, cxt_id, table_name, entry_handle, rates):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- rates
"""
pass
def bm_mt_get_entries(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
pass
def bm_mt_get_default_entry(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
pass
def bm_mt_indirect_get_members(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
pass
def bm_mt_indirect_ws_get_groups(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
pass
def bm_counter_read(self, cxt_id, counter_name, index):
"""
Parameters:
- cxt_id
- counter_name
- index
"""
pass
def bm_counter_reset_all(self, cxt_id, counter_name):
"""
Parameters:
- cxt_id
- counter_name
"""
pass
def bm_counter_write(self, cxt_id, counter_name, index, value):
"""
Parameters:
- cxt_id
- counter_name
- index
- value
"""
pass
def bm_learning_ack(self, cxt_id, list_id, buffer_id, sample_ids):
"""
Parameters:
- cxt_id
- list_id
- buffer_id
- sample_ids
"""
pass
def bm_learning_ack_buffer(self, cxt_id, list_id, buffer_id):
"""
Parameters:
- cxt_id
- list_id
- buffer_id
"""
pass
def bm_learning_set_timeout(self, cxt_id, list_id, timeout_ms):
"""
Parameters:
- cxt_id
- list_id
- timeout_ms
"""
pass
def bm_learning_set_buffer_size(self, cxt_id, list_id, nb_samples):
"""
Parameters:
- cxt_id
- list_id
- nb_samples
"""
pass
def bm_load_new_config(self, config_str):
"""
Parameters:
- config_str
"""
pass
def bm_swap_configs(self):
pass
def bm_meter_array_set_rates(self, cxt_id, meter_array_name, rates):
"""
Parameters:
- cxt_id
- meter_array_name
- rates
"""
pass
def bm_meter_set_rates(self, cxt_id, meter_array_name, index, rates):
"""
Parameters:
- cxt_id
- meter_array_name
- index
- rates
"""
pass
def bm_register_read(self, cxt_id, register_array_name, idx):
"""
Parameters:
- cxt_id
- register_array_name
- idx
"""
pass
def bm_register_write(self, cxt_id, register_array_name, index, value):
"""
Parameters:
- cxt_id
- register_array_name
- index
- value
"""
pass
def bm_register_write_range(self, cxt_id, register_array_name, start_index, end_index, value):
"""
Parameters:
- cxt_id
- register_array_name
- start_index
- end_index
- value
"""
pass
def bm_register_reset(self, cxt_id, register_array_name):
"""
Parameters:
- cxt_id
- register_array_name
"""
pass
def bm_dev_mgr_add_port(self, iface_name, port_num, pcap_path):
"""
Parameters:
- iface_name
- port_num
- pcap_path
"""
pass
def bm_dev_mgr_remove_port(self, port_num):
"""
Parameters:
- port_num
"""
pass
def bm_dev_mgr_show_ports(self):
pass
def bm_mgmt_get_info(self):
pass
def bm_set_crc16_custom_parameters(self, cxt_id, calc_name, crc16_config):
"""
Parameters:
- cxt_id
- calc_name
- crc16_config
"""
pass
def bm_set_crc32_custom_parameters(self, cxt_id, calc_name, crc32_config):
"""
Parameters:
- cxt_id
- calc_name
- crc32_config
"""
pass
def bm_reset_state(self):
pass
def bm_get_config(self):
pass
def bm_get_config_md5(self):
pass
def bm_serialize_state(self):
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def bm_mt_add_entry(self, cxt_id, table_name, match_key, action_name, action_data, options):
"""
Parameters:
- cxt_id
- table_name
- match_key
- action_name
- action_data
- options
"""
self.send_bm_mt_add_entry(cxt_id, table_name, match_key, action_name, action_data, options)
return self.recv_bm_mt_add_entry()
def send_bm_mt_add_entry(self, cxt_id, table_name, match_key, action_name, action_data, options):
self._oprot.writeMessageBegin('bm_mt_add_entry', TMessageType.CALL, self._seqid)
args = bm_mt_add_entry_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.match_key = match_key
args.action_name = action_name
args.action_data = action_data
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_add_entry(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_add_entry_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_mt_add_entry failed: unknown result")
def bm_mt_set_default_action(self, cxt_id, table_name, action_name, action_data):
"""
Parameters:
- cxt_id
- table_name
- action_name
- action_data
"""
self.send_bm_mt_set_default_action(cxt_id, table_name, action_name, action_data)
self.recv_bm_mt_set_default_action()
def send_bm_mt_set_default_action(self, cxt_id, table_name, action_name, action_data):
self._oprot.writeMessageBegin('bm_mt_set_default_action', TMessageType.CALL, self._seqid)
args = bm_mt_set_default_action_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.action_name = action_name
args.action_data = action_data
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_set_default_action(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_set_default_action_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_delete_entry(self, cxt_id, table_name, entry_handle):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
"""
self.send_bm_mt_delete_entry(cxt_id, table_name, entry_handle)
self.recv_bm_mt_delete_entry()
def send_bm_mt_delete_entry(self, cxt_id, table_name, entry_handle):
self._oprot.writeMessageBegin('bm_mt_delete_entry', TMessageType.CALL, self._seqid)
args = bm_mt_delete_entry_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.entry_handle = entry_handle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_delete_entry(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_delete_entry_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_modify_entry(self, cxt_id, table_name, entry_handle, action_name, action_data):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- action_name
- action_data
"""
self.send_bm_mt_modify_entry(cxt_id, table_name, entry_handle, action_name, action_data)
self.recv_bm_mt_modify_entry()
def send_bm_mt_modify_entry(self, cxt_id, table_name, entry_handle, action_name, action_data):
self._oprot.writeMessageBegin('bm_mt_modify_entry', TMessageType.CALL, self._seqid)
args = bm_mt_modify_entry_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.entry_handle = entry_handle
args.action_name = action_name
args.action_data = action_data
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_modify_entry(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_modify_entry_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_set_entry_ttl(self, cxt_id, table_name, entry_handle, timeout_ms):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- timeout_ms
"""
self.send_bm_mt_set_entry_ttl(cxt_id, table_name, entry_handle, timeout_ms)
self.recv_bm_mt_set_entry_ttl()
def send_bm_mt_set_entry_ttl(self, cxt_id, table_name, entry_handle, timeout_ms):
self._oprot.writeMessageBegin('bm_mt_set_entry_ttl', TMessageType.CALL, self._seqid)
args = bm_mt_set_entry_ttl_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.entry_handle = entry_handle
args.timeout_ms = timeout_ms
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_set_entry_ttl(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_set_entry_ttl_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_indirect_add_member(self, cxt_id, table_name, action_name, action_data):
"""
Parameters:
- cxt_id
- table_name
- action_name
- action_data
"""
self.send_bm_mt_indirect_add_member(cxt_id, table_name, action_name, action_data)
return self.recv_bm_mt_indirect_add_member()
def send_bm_mt_indirect_add_member(self, cxt_id, table_name, action_name, action_data):
self._oprot.writeMessageBegin('bm_mt_indirect_add_member', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_add_member_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.action_name = action_name
args.action_data = action_data
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_add_member(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_add_member_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_mt_indirect_add_member failed: unknown result")
def bm_mt_indirect_delete_member(self, cxt_id, table_name, mbr_handle):
"""
Parameters:
- cxt_id
- table_name
- mbr_handle
"""
self.send_bm_mt_indirect_delete_member(cxt_id, table_name, mbr_handle)
self.recv_bm_mt_indirect_delete_member()
def send_bm_mt_indirect_delete_member(self, cxt_id, table_name, mbr_handle):
self._oprot.writeMessageBegin('bm_mt_indirect_delete_member', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_delete_member_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.mbr_handle = mbr_handle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_delete_member(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_delete_member_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_indirect_modify_member(self, cxt_id, table_name, mbr_handle, action_name, action_data):
"""
Parameters:
- cxt_id
- table_name
- mbr_handle
- action_name
- action_data
"""
self.send_bm_mt_indirect_modify_member(cxt_id, table_name, mbr_handle, action_name, action_data)
self.recv_bm_mt_indirect_modify_member()
def send_bm_mt_indirect_modify_member(self, cxt_id, table_name, mbr_handle, action_name, action_data):
self._oprot.writeMessageBegin('bm_mt_indirect_modify_member', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_modify_member_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.mbr_handle = mbr_handle
args.action_name = action_name
args.action_data = action_data
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_modify_member(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_modify_member_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_indirect_add_entry(self, cxt_id, table_name, match_key, mbr_handle, options):
"""
Parameters:
- cxt_id
- table_name
- match_key
- mbr_handle
- options
"""
self.send_bm_mt_indirect_add_entry(cxt_id, table_name, match_key, mbr_handle, options)
return self.recv_bm_mt_indirect_add_entry()
def send_bm_mt_indirect_add_entry(self, cxt_id, table_name, match_key, mbr_handle, options):
self._oprot.writeMessageBegin('bm_mt_indirect_add_entry', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_add_entry_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.match_key = match_key
args.mbr_handle = mbr_handle
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_add_entry(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_add_entry_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_mt_indirect_add_entry failed: unknown result")
def bm_mt_indirect_modify_entry(self, cxt_id, table_name, entry_handle, mbr_handle):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- mbr_handle
"""
self.send_bm_mt_indirect_modify_entry(cxt_id, table_name, entry_handle, mbr_handle)
self.recv_bm_mt_indirect_modify_entry()
def send_bm_mt_indirect_modify_entry(self, cxt_id, table_name, entry_handle, mbr_handle):
self._oprot.writeMessageBegin('bm_mt_indirect_modify_entry', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_modify_entry_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.entry_handle = entry_handle
args.mbr_handle = mbr_handle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_modify_entry(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_modify_entry_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_indirect_delete_entry(self, cxt_id, table_name, entry_handle):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
"""
self.send_bm_mt_indirect_delete_entry(cxt_id, table_name, entry_handle)
self.recv_bm_mt_indirect_delete_entry()
def send_bm_mt_indirect_delete_entry(self, cxt_id, table_name, entry_handle):
self._oprot.writeMessageBegin('bm_mt_indirect_delete_entry', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_delete_entry_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.entry_handle = entry_handle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_delete_entry(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_delete_entry_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_indirect_set_entry_ttl(self, cxt_id, table_name, entry_handle, timeout_ms):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- timeout_ms
"""
self.send_bm_mt_indirect_set_entry_ttl(cxt_id, table_name, entry_handle, timeout_ms)
self.recv_bm_mt_indirect_set_entry_ttl()
def send_bm_mt_indirect_set_entry_ttl(self, cxt_id, table_name, entry_handle, timeout_ms):
self._oprot.writeMessageBegin('bm_mt_indirect_set_entry_ttl', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_set_entry_ttl_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.entry_handle = entry_handle
args.timeout_ms = timeout_ms
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_set_entry_ttl(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_set_entry_ttl_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_indirect_set_default_member(self, cxt_id, table_name, mbr_handle):
"""
Parameters:
- cxt_id
- table_name
- mbr_handle
"""
self.send_bm_mt_indirect_set_default_member(cxt_id, table_name, mbr_handle)
self.recv_bm_mt_indirect_set_default_member()
def send_bm_mt_indirect_set_default_member(self, cxt_id, table_name, mbr_handle):
self._oprot.writeMessageBegin('bm_mt_indirect_set_default_member', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_set_default_member_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.mbr_handle = mbr_handle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_set_default_member(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_set_default_member_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_indirect_ws_create_group(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
self.send_bm_mt_indirect_ws_create_group(cxt_id, table_name)
return self.recv_bm_mt_indirect_ws_create_group()
def send_bm_mt_indirect_ws_create_group(self, cxt_id, table_name):
self._oprot.writeMessageBegin('bm_mt_indirect_ws_create_group', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_ws_create_group_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_ws_create_group(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_ws_create_group_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_mt_indirect_ws_create_group failed: unknown result")
def bm_mt_indirect_ws_delete_group(self, cxt_id, table_name, grp_handle):
"""
Parameters:
- cxt_id
- table_name
- grp_handle
"""
self.send_bm_mt_indirect_ws_delete_group(cxt_id, table_name, grp_handle)
self.recv_bm_mt_indirect_ws_delete_group()
def send_bm_mt_indirect_ws_delete_group(self, cxt_id, table_name, grp_handle):
self._oprot.writeMessageBegin('bm_mt_indirect_ws_delete_group', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_ws_delete_group_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.grp_handle = grp_handle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_ws_delete_group(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_ws_delete_group_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_indirect_ws_add_member_to_group(self, cxt_id, table_name, mbr_handle, grp_handle):
"""
Parameters:
- cxt_id
- table_name
- mbr_handle
- grp_handle
"""
self.send_bm_mt_indirect_ws_add_member_to_group(cxt_id, table_name, mbr_handle, grp_handle)
self.recv_bm_mt_indirect_ws_add_member_to_group()
def send_bm_mt_indirect_ws_add_member_to_group(self, cxt_id, table_name, mbr_handle, grp_handle):
self._oprot.writeMessageBegin('bm_mt_indirect_ws_add_member_to_group', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_ws_add_member_to_group_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.mbr_handle = mbr_handle
args.grp_handle = grp_handle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_ws_add_member_to_group(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_ws_add_member_to_group_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_indirect_ws_remove_member_from_group(self, cxt_id, table_name, mbr_handle, grp_handle):
"""
Parameters:
- cxt_id
- table_name
- mbr_handle
- grp_handle
"""
self.send_bm_mt_indirect_ws_remove_member_from_group(cxt_id, table_name, mbr_handle, grp_handle)
self.recv_bm_mt_indirect_ws_remove_member_from_group()
def send_bm_mt_indirect_ws_remove_member_from_group(self, cxt_id, table_name, mbr_handle, grp_handle):
self._oprot.writeMessageBegin('bm_mt_indirect_ws_remove_member_from_group', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_ws_remove_member_from_group_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.mbr_handle = mbr_handle
args.grp_handle = grp_handle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_ws_remove_member_from_group(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_ws_remove_member_from_group_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_indirect_ws_add_entry(self, cxt_id, table_name, match_key, grp_handle, options):
"""
Parameters:
- cxt_id
- table_name
- match_key
- grp_handle
- options
"""
self.send_bm_mt_indirect_ws_add_entry(cxt_id, table_name, match_key, grp_handle, options)
return self.recv_bm_mt_indirect_ws_add_entry()
def send_bm_mt_indirect_ws_add_entry(self, cxt_id, table_name, match_key, grp_handle, options):
self._oprot.writeMessageBegin('bm_mt_indirect_ws_add_entry', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_ws_add_entry_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.match_key = match_key
args.grp_handle = grp_handle
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_ws_add_entry(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_ws_add_entry_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_mt_indirect_ws_add_entry failed: unknown result")
def bm_mt_indirect_ws_modify_entry(self, cxt_id, table_name, entry_handle, grp_handle):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- grp_handle
"""
self.send_bm_mt_indirect_ws_modify_entry(cxt_id, table_name, entry_handle, grp_handle)
self.recv_bm_mt_indirect_ws_modify_entry()
def send_bm_mt_indirect_ws_modify_entry(self, cxt_id, table_name, entry_handle, grp_handle):
self._oprot.writeMessageBegin('bm_mt_indirect_ws_modify_entry', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_ws_modify_entry_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.entry_handle = entry_handle
args.grp_handle = grp_handle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_ws_modify_entry(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_ws_modify_entry_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_indirect_ws_set_default_group(self, cxt_id, table_name, grp_handle):
"""
Parameters:
- cxt_id
- table_name
- grp_handle
"""
self.send_bm_mt_indirect_ws_set_default_group(cxt_id, table_name, grp_handle)
self.recv_bm_mt_indirect_ws_set_default_group()
def send_bm_mt_indirect_ws_set_default_group(self, cxt_id, table_name, grp_handle):
self._oprot.writeMessageBegin('bm_mt_indirect_ws_set_default_group', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_ws_set_default_group_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.grp_handle = grp_handle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_ws_set_default_group(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_ws_set_default_group_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_read_counter(self, cxt_id, table_name, entry_handle):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
"""
self.send_bm_mt_read_counter(cxt_id, table_name, entry_handle)
return self.recv_bm_mt_read_counter()
def send_bm_mt_read_counter(self, cxt_id, table_name, entry_handle):
self._oprot.writeMessageBegin('bm_mt_read_counter', TMessageType.CALL, self._seqid)
args = bm_mt_read_counter_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.entry_handle = entry_handle
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_read_counter(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_read_counter_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_mt_read_counter failed: unknown result")
def bm_mt_reset_counters(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
self.send_bm_mt_reset_counters(cxt_id, table_name)
self.recv_bm_mt_reset_counters()
def send_bm_mt_reset_counters(self, cxt_id, table_name):
self._oprot.writeMessageBegin('bm_mt_reset_counters', TMessageType.CALL, self._seqid)
args = bm_mt_reset_counters_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_reset_counters(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_reset_counters_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_write_counter(self, cxt_id, table_name, entry_handle, value):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- value
"""
self.send_bm_mt_write_counter(cxt_id, table_name, entry_handle, value)
self.recv_bm_mt_write_counter()
def send_bm_mt_write_counter(self, cxt_id, table_name, entry_handle, value):
self._oprot.writeMessageBegin('bm_mt_write_counter', TMessageType.CALL, self._seqid)
args = bm_mt_write_counter_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.entry_handle = entry_handle
args.value = value
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_write_counter(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_write_counter_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_set_meter_rates(self, cxt_id, table_name, entry_handle, rates):
"""
Parameters:
- cxt_id
- table_name
- entry_handle
- rates
"""
self.send_bm_mt_set_meter_rates(cxt_id, table_name, entry_handle, rates)
self.recv_bm_mt_set_meter_rates()
def send_bm_mt_set_meter_rates(self, cxt_id, table_name, entry_handle, rates):
self._oprot.writeMessageBegin('bm_mt_set_meter_rates', TMessageType.CALL, self._seqid)
args = bm_mt_set_meter_rates_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.entry_handle = entry_handle
args.rates = rates
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_set_meter_rates(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_set_meter_rates_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_mt_get_entries(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
self.send_bm_mt_get_entries(cxt_id, table_name)
return self.recv_bm_mt_get_entries()
def send_bm_mt_get_entries(self, cxt_id, table_name):
self._oprot.writeMessageBegin('bm_mt_get_entries', TMessageType.CALL, self._seqid)
args = bm_mt_get_entries_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_get_entries(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_get_entries_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_mt_get_entries failed: unknown result")
def bm_mt_get_default_entry(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
self.send_bm_mt_get_default_entry(cxt_id, table_name)
return self.recv_bm_mt_get_default_entry()
def send_bm_mt_get_default_entry(self, cxt_id, table_name):
self._oprot.writeMessageBegin('bm_mt_get_default_entry', TMessageType.CALL, self._seqid)
args = bm_mt_get_default_entry_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_get_default_entry(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_get_default_entry_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_mt_get_default_entry failed: unknown result")
def bm_mt_indirect_get_members(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
self.send_bm_mt_indirect_get_members(cxt_id, table_name)
return self.recv_bm_mt_indirect_get_members()
def send_bm_mt_indirect_get_members(self, cxt_id, table_name):
self._oprot.writeMessageBegin('bm_mt_indirect_get_members', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_get_members_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_get_members(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_get_members_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_mt_indirect_get_members failed: unknown result")
def bm_mt_indirect_ws_get_groups(self, cxt_id, table_name):
"""
Parameters:
- cxt_id
- table_name
"""
self.send_bm_mt_indirect_ws_get_groups(cxt_id, table_name)
return self.recv_bm_mt_indirect_ws_get_groups()
def send_bm_mt_indirect_ws_get_groups(self, cxt_id, table_name):
self._oprot.writeMessageBegin('bm_mt_indirect_ws_get_groups', TMessageType.CALL, self._seqid)
args = bm_mt_indirect_ws_get_groups_args()
args.cxt_id = cxt_id
args.table_name = table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mt_indirect_ws_get_groups(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mt_indirect_ws_get_groups_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_mt_indirect_ws_get_groups failed: unknown result")
def bm_counter_read(self, cxt_id, counter_name, index):
"""
Parameters:
- cxt_id
- counter_name
- index
"""
self.send_bm_counter_read(cxt_id, counter_name, index)
return self.recv_bm_counter_read()
def send_bm_counter_read(self, cxt_id, counter_name, index):
self._oprot.writeMessageBegin('bm_counter_read', TMessageType.CALL, self._seqid)
args = bm_counter_read_args()
args.cxt_id = cxt_id
args.counter_name = counter_name
args.index = index
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_counter_read(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_counter_read_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_counter_read failed: unknown result")
def bm_counter_reset_all(self, cxt_id, counter_name):
"""
Parameters:
- cxt_id
- counter_name
"""
self.send_bm_counter_reset_all(cxt_id, counter_name)
self.recv_bm_counter_reset_all()
def send_bm_counter_reset_all(self, cxt_id, counter_name):
self._oprot.writeMessageBegin('bm_counter_reset_all', TMessageType.CALL, self._seqid)
args = bm_counter_reset_all_args()
args.cxt_id = cxt_id
args.counter_name = counter_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_counter_reset_all(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_counter_reset_all_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_counter_write(self, cxt_id, counter_name, index, value):
"""
Parameters:
- cxt_id
- counter_name
- index
- value
"""
self.send_bm_counter_write(cxt_id, counter_name, index, value)
self.recv_bm_counter_write()
def send_bm_counter_write(self, cxt_id, counter_name, index, value):
self._oprot.writeMessageBegin('bm_counter_write', TMessageType.CALL, self._seqid)
args = bm_counter_write_args()
args.cxt_id = cxt_id
args.counter_name = counter_name
args.index = index
args.value = value
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_counter_write(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_counter_write_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_learning_ack(self, cxt_id, list_id, buffer_id, sample_ids):
"""
Parameters:
- cxt_id
- list_id
- buffer_id
- sample_ids
"""
self.send_bm_learning_ack(cxt_id, list_id, buffer_id, sample_ids)
self.recv_bm_learning_ack()
def send_bm_learning_ack(self, cxt_id, list_id, buffer_id, sample_ids):
self._oprot.writeMessageBegin('bm_learning_ack', TMessageType.CALL, self._seqid)
args = bm_learning_ack_args()
args.cxt_id = cxt_id
args.list_id = list_id
args.buffer_id = buffer_id
args.sample_ids = sample_ids
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_learning_ack(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_learning_ack_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_learning_ack_buffer(self, cxt_id, list_id, buffer_id):
"""
Parameters:
- cxt_id
- list_id
- buffer_id
"""
self.send_bm_learning_ack_buffer(cxt_id, list_id, buffer_id)
self.recv_bm_learning_ack_buffer()
def send_bm_learning_ack_buffer(self, cxt_id, list_id, buffer_id):
self._oprot.writeMessageBegin('bm_learning_ack_buffer', TMessageType.CALL, self._seqid)
args = bm_learning_ack_buffer_args()
args.cxt_id = cxt_id
args.list_id = list_id
args.buffer_id = buffer_id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_learning_ack_buffer(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_learning_ack_buffer_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_learning_set_timeout(self, cxt_id, list_id, timeout_ms):
"""
Parameters:
- cxt_id
- list_id
- timeout_ms
"""
self.send_bm_learning_set_timeout(cxt_id, list_id, timeout_ms)
self.recv_bm_learning_set_timeout()
def send_bm_learning_set_timeout(self, cxt_id, list_id, timeout_ms):
self._oprot.writeMessageBegin('bm_learning_set_timeout', TMessageType.CALL, self._seqid)
args = bm_learning_set_timeout_args()
args.cxt_id = cxt_id
args.list_id = list_id
args.timeout_ms = timeout_ms
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_learning_set_timeout(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_learning_set_timeout_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_learning_set_buffer_size(self, cxt_id, list_id, nb_samples):
"""
Parameters:
- cxt_id
- list_id
- nb_samples
"""
self.send_bm_learning_set_buffer_size(cxt_id, list_id, nb_samples)
self.recv_bm_learning_set_buffer_size()
def send_bm_learning_set_buffer_size(self, cxt_id, list_id, nb_samples):
self._oprot.writeMessageBegin('bm_learning_set_buffer_size', TMessageType.CALL, self._seqid)
args = bm_learning_set_buffer_size_args()
args.cxt_id = cxt_id
args.list_id = list_id
args.nb_samples = nb_samples
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_learning_set_buffer_size(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_learning_set_buffer_size_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_load_new_config(self, config_str):
"""
Parameters:
- config_str
"""
self.send_bm_load_new_config(config_str)
self.recv_bm_load_new_config()
def send_bm_load_new_config(self, config_str):
self._oprot.writeMessageBegin('bm_load_new_config', TMessageType.CALL, self._seqid)
args = bm_load_new_config_args()
args.config_str = config_str
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_load_new_config(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_load_new_config_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_swap_configs(self):
self.send_bm_swap_configs()
self.recv_bm_swap_configs()
def send_bm_swap_configs(self):
self._oprot.writeMessageBegin('bm_swap_configs', TMessageType.CALL, self._seqid)
args = bm_swap_configs_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_swap_configs(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_swap_configs_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_meter_array_set_rates(self, cxt_id, meter_array_name, rates):
"""
Parameters:
- cxt_id
- meter_array_name
- rates
"""
self.send_bm_meter_array_set_rates(cxt_id, meter_array_name, rates)
self.recv_bm_meter_array_set_rates()
def send_bm_meter_array_set_rates(self, cxt_id, meter_array_name, rates):
self._oprot.writeMessageBegin('bm_meter_array_set_rates', TMessageType.CALL, self._seqid)
args = bm_meter_array_set_rates_args()
args.cxt_id = cxt_id
args.meter_array_name = meter_array_name
args.rates = rates
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_meter_array_set_rates(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_meter_array_set_rates_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_meter_set_rates(self, cxt_id, meter_array_name, index, rates):
"""
Parameters:
- cxt_id
- meter_array_name
- index
- rates
"""
self.send_bm_meter_set_rates(cxt_id, meter_array_name, index, rates)
self.recv_bm_meter_set_rates()
def send_bm_meter_set_rates(self, cxt_id, meter_array_name, index, rates):
self._oprot.writeMessageBegin('bm_meter_set_rates', TMessageType.CALL, self._seqid)
args = bm_meter_set_rates_args()
args.cxt_id = cxt_id
args.meter_array_name = meter_array_name
args.index = index
args.rates = rates
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_meter_set_rates(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_meter_set_rates_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_register_read(self, cxt_id, register_array_name, idx):
"""
Parameters:
- cxt_id
- register_array_name
- idx
"""
self.send_bm_register_read(cxt_id, register_array_name, idx)
return self.recv_bm_register_read()
def send_bm_register_read(self, cxt_id, register_array_name, idx):
self._oprot.writeMessageBegin('bm_register_read', TMessageType.CALL, self._seqid)
args = bm_register_read_args()
args.cxt_id = cxt_id
args.register_array_name = register_array_name
args.idx = idx
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_register_read(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_register_read_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_register_read failed: unknown result")
def bm_register_write(self, cxt_id, register_array_name, index, value):
"""
Parameters:
- cxt_id
- register_array_name
- index
- value
"""
self.send_bm_register_write(cxt_id, register_array_name, index, value)
self.recv_bm_register_write()
def send_bm_register_write(self, cxt_id, register_array_name, index, value):
self._oprot.writeMessageBegin('bm_register_write', TMessageType.CALL, self._seqid)
args = bm_register_write_args()
args.cxt_id = cxt_id
args.register_array_name = register_array_name
args.index = index
args.value = value
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_register_write(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_register_write_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_register_write_range(self, cxt_id, register_array_name, start_index, end_index, value):
"""
Parameters:
- cxt_id
- register_array_name
- start_index
- end_index
- value
"""
self.send_bm_register_write_range(cxt_id, register_array_name, start_index, end_index, value)
self.recv_bm_register_write_range()
def send_bm_register_write_range(self, cxt_id, register_array_name, start_index, end_index, value):
self._oprot.writeMessageBegin('bm_register_write_range', TMessageType.CALL, self._seqid)
args = bm_register_write_range_args()
args.cxt_id = cxt_id
args.register_array_name = register_array_name
args.start_index = start_index
args.end_index = end_index
args.value = value
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_register_write_range(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_register_write_range_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_register_reset(self, cxt_id, register_array_name):
"""
Parameters:
- cxt_id
- register_array_name
"""
self.send_bm_register_reset(cxt_id, register_array_name)
self.recv_bm_register_reset()
def send_bm_register_reset(self, cxt_id, register_array_name):
self._oprot.writeMessageBegin('bm_register_reset', TMessageType.CALL, self._seqid)
args = bm_register_reset_args()
args.cxt_id = cxt_id
args.register_array_name = register_array_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_register_reset(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_register_reset_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_dev_mgr_add_port(self, iface_name, port_num, pcap_path):
"""
Parameters:
- iface_name
- port_num
- pcap_path
"""
self.send_bm_dev_mgr_add_port(iface_name, port_num, pcap_path)
self.recv_bm_dev_mgr_add_port()
def send_bm_dev_mgr_add_port(self, iface_name, port_num, pcap_path):
self._oprot.writeMessageBegin('bm_dev_mgr_add_port', TMessageType.CALL, self._seqid)
args = bm_dev_mgr_add_port_args()
args.iface_name = iface_name
args.port_num = port_num
args.pcap_path = pcap_path
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_dev_mgr_add_port(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_dev_mgr_add_port_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_dev_mgr_remove_port(self, port_num):
"""
Parameters:
- port_num
"""
self.send_bm_dev_mgr_remove_port(port_num)
self.recv_bm_dev_mgr_remove_port()
def send_bm_dev_mgr_remove_port(self, port_num):
self._oprot.writeMessageBegin('bm_dev_mgr_remove_port', TMessageType.CALL, self._seqid)
args = bm_dev_mgr_remove_port_args()
args.port_num = port_num
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_dev_mgr_remove_port(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_dev_mgr_remove_port_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_dev_mgr_show_ports(self):
self.send_bm_dev_mgr_show_ports()
return self.recv_bm_dev_mgr_show_ports()
def send_bm_dev_mgr_show_ports(self):
self._oprot.writeMessageBegin('bm_dev_mgr_show_ports', TMessageType.CALL, self._seqid)
args = bm_dev_mgr_show_ports_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_dev_mgr_show_ports(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_dev_mgr_show_ports_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.ouch is not None:
raise result.ouch
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_dev_mgr_show_ports failed: unknown result")
def bm_mgmt_get_info(self):
self.send_bm_mgmt_get_info()
return self.recv_bm_mgmt_get_info()
def send_bm_mgmt_get_info(self):
self._oprot.writeMessageBegin('bm_mgmt_get_info', TMessageType.CALL, self._seqid)
args = bm_mgmt_get_info_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_mgmt_get_info(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_mgmt_get_info_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_mgmt_get_info failed: unknown result")
def bm_set_crc16_custom_parameters(self, cxt_id, calc_name, crc16_config):
"""
Parameters:
- cxt_id
- calc_name
- crc16_config
"""
self.send_bm_set_crc16_custom_parameters(cxt_id, calc_name, crc16_config)
self.recv_bm_set_crc16_custom_parameters()
def send_bm_set_crc16_custom_parameters(self, cxt_id, calc_name, crc16_config):
self._oprot.writeMessageBegin('bm_set_crc16_custom_parameters', TMessageType.CALL, self._seqid)
args = bm_set_crc16_custom_parameters_args()
args.cxt_id = cxt_id
args.calc_name = calc_name
args.crc16_config = crc16_config
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_set_crc16_custom_parameters(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_set_crc16_custom_parameters_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_set_crc32_custom_parameters(self, cxt_id, calc_name, crc32_config):
"""
Parameters:
- cxt_id
- calc_name
- crc32_config
"""
self.send_bm_set_crc32_custom_parameters(cxt_id, calc_name, crc32_config)
self.recv_bm_set_crc32_custom_parameters()
def send_bm_set_crc32_custom_parameters(self, cxt_id, calc_name, crc32_config):
self._oprot.writeMessageBegin('bm_set_crc32_custom_parameters', TMessageType.CALL, self._seqid)
args = bm_set_crc32_custom_parameters_args()
args.cxt_id = cxt_id
args.calc_name = calc_name
args.crc32_config = crc32_config
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_set_crc32_custom_parameters(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_set_crc32_custom_parameters_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ouch is not None:
raise result.ouch
return
def bm_reset_state(self):
self.send_bm_reset_state()
self.recv_bm_reset_state()
def send_bm_reset_state(self):
self._oprot.writeMessageBegin('bm_reset_state', TMessageType.CALL, self._seqid)
args = bm_reset_state_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_reset_state(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_reset_state_result()
result.read(iprot)
iprot.readMessageEnd()
return
def bm_get_config(self):
self.send_bm_get_config()
return self.recv_bm_get_config()
def send_bm_get_config(self):
self._oprot.writeMessageBegin('bm_get_config', TMessageType.CALL, self._seqid)
args = bm_get_config_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_get_config(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_get_config_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_get_config failed: unknown result")
def bm_get_config_md5(self):
self.send_bm_get_config_md5()
return self.recv_bm_get_config_md5()
def send_bm_get_config_md5(self):
self._oprot.writeMessageBegin('bm_get_config_md5', TMessageType.CALL, self._seqid)
args = bm_get_config_md5_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_get_config_md5(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_get_config_md5_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_get_config_md5 failed: unknown result")
def bm_serialize_state(self):
self.send_bm_serialize_state()
return self.recv_bm_serialize_state()
def send_bm_serialize_state(self):
self._oprot.writeMessageBegin('bm_serialize_state', TMessageType.CALL, self._seqid)
args = bm_serialize_state_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_bm_serialize_state(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = bm_serialize_state_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "bm_serialize_state failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["bm_mt_add_entry"] = Processor.process_bm_mt_add_entry
self._processMap["bm_mt_set_default_action"] = Processor.process_bm_mt_set_default_action
self._processMap["bm_mt_delete_entry"] = Processor.process_bm_mt_delete_entry
self._processMap["bm_mt_modify_entry"] = Processor.process_bm_mt_modify_entry
self._processMap["bm_mt_set_entry_ttl"] = Processor.process_bm_mt_set_entry_ttl
self._processMap["bm_mt_indirect_add_member"] = Processor.process_bm_mt_indirect_add_member
self._processMap["bm_mt_indirect_delete_member"] = Processor.process_bm_mt_indirect_delete_member
self._processMap["bm_mt_indirect_modify_member"] = Processor.process_bm_mt_indirect_modify_member
self._processMap["bm_mt_indirect_add_entry"] = Processor.process_bm_mt_indirect_add_entry
self._processMap["bm_mt_indirect_modify_entry"] = Processor.process_bm_mt_indirect_modify_entry
self._processMap["bm_mt_indirect_delete_entry"] = Processor.process_bm_mt_indirect_delete_entry
self._processMap["bm_mt_indirect_set_entry_ttl"] = Processor.process_bm_mt_indirect_set_entry_ttl
self._processMap["bm_mt_indirect_set_default_member"] = Processor.process_bm_mt_indirect_set_default_member
self._processMap["bm_mt_indirect_ws_create_group"] = Processor.process_bm_mt_indirect_ws_create_group
self._processMap["bm_mt_indirect_ws_delete_group"] = Processor.process_bm_mt_indirect_ws_delete_group
self._processMap["bm_mt_indirect_ws_add_member_to_group"] = Processor.process_bm_mt_indirect_ws_add_member_to_group
self._processMap["bm_mt_indirect_ws_remove_member_from_group"] = Processor.process_bm_mt_indirect_ws_remove_member_from_group
self._processMap["bm_mt_indirect_ws_add_entry"] = Processor.process_bm_mt_indirect_ws_add_entry
self._processMap["bm_mt_indirect_ws_modify_entry"] = Processor.process_bm_mt_indirect_ws_modify_entry
self._processMap["bm_mt_indirect_ws_set_default_group"] = Processor.process_bm_mt_indirect_ws_set_default_group
self._processMap["bm_mt_read_counter"] = Processor.process_bm_mt_read_counter
self._processMap["bm_mt_reset_counters"] = Processor.process_bm_mt_reset_counters
self._processMap["bm_mt_write_counter"] = Processor.process_bm_mt_write_counter
self._processMap["bm_mt_set_meter_rates"] = Processor.process_bm_mt_set_meter_rates
self._processMap["bm_mt_get_entries"] = Processor.process_bm_mt_get_entries
self._processMap["bm_mt_get_default_entry"] = Processor.process_bm_mt_get_default_entry
self._processMap["bm_mt_indirect_get_members"] = Processor.process_bm_mt_indirect_get_members
self._processMap["bm_mt_indirect_ws_get_groups"] = Processor.process_bm_mt_indirect_ws_get_groups
self._processMap["bm_counter_read"] = Processor.process_bm_counter_read
self._processMap["bm_counter_reset_all"] = Processor.process_bm_counter_reset_all
self._processMap["bm_counter_write"] = Processor.process_bm_counter_write
self._processMap["bm_learning_ack"] = Processor.process_bm_learning_ack
self._processMap["bm_learning_ack_buffer"] = Processor.process_bm_learning_ack_buffer
self._processMap["bm_learning_set_timeout"] = Processor.process_bm_learning_set_timeout
self._processMap["bm_learning_set_buffer_size"] = Processor.process_bm_learning_set_buffer_size
self._processMap["bm_load_new_config"] = Processor.process_bm_load_new_config
self._processMap["bm_swap_configs"] = Processor.process_bm_swap_configs
self._processMap["bm_meter_array_set_rates"] = Processor.process_bm_meter_array_set_rates
self._processMap["bm_meter_set_rates"] = Processor.process_bm_meter_set_rates
self._processMap["bm_register_read"] = Processor.process_bm_register_read
self._processMap["bm_register_write"] = Processor.process_bm_register_write
self._processMap["bm_register_write_range"] = Processor.process_bm_register_write_range
self._processMap["bm_register_reset"] = Processor.process_bm_register_reset
self._processMap["bm_dev_mgr_add_port"] = Processor.process_bm_dev_mgr_add_port
self._processMap["bm_dev_mgr_remove_port"] = Processor.process_bm_dev_mgr_remove_port
self._processMap["bm_dev_mgr_show_ports"] = Processor.process_bm_dev_mgr_show_ports
self._processMap["bm_mgmt_get_info"] = Processor.process_bm_mgmt_get_info
self._processMap["bm_set_crc16_custom_parameters"] = Processor.process_bm_set_crc16_custom_parameters
self._processMap["bm_set_crc32_custom_parameters"] = Processor.process_bm_set_crc32_custom_parameters
self._processMap["bm_reset_state"] = Processor.process_bm_reset_state
self._processMap["bm_get_config"] = Processor.process_bm_get_config
self._processMap["bm_get_config_md5"] = Processor.process_bm_get_config_md5
self._processMap["bm_serialize_state"] = Processor.process_bm_serialize_state
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_bm_mt_add_entry(self, seqid, iprot, oprot):
args = bm_mt_add_entry_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_add_entry_result()
try:
result.success = self._handler.bm_mt_add_entry(args.cxt_id, args.table_name, args.match_key, args.action_name, args.action_data, args.options)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_add_entry", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_set_default_action(self, seqid, iprot, oprot):
args = bm_mt_set_default_action_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_set_default_action_result()
try:
self._handler.bm_mt_set_default_action(args.cxt_id, args.table_name, args.action_name, args.action_data)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_set_default_action", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_delete_entry(self, seqid, iprot, oprot):
args = bm_mt_delete_entry_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_delete_entry_result()
try:
self._handler.bm_mt_delete_entry(args.cxt_id, args.table_name, args.entry_handle)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_delete_entry", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_modify_entry(self, seqid, iprot, oprot):
args = bm_mt_modify_entry_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_modify_entry_result()
try:
self._handler.bm_mt_modify_entry(args.cxt_id, args.table_name, args.entry_handle, args.action_name, args.action_data)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_modify_entry", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_set_entry_ttl(self, seqid, iprot, oprot):
args = bm_mt_set_entry_ttl_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_set_entry_ttl_result()
try:
self._handler.bm_mt_set_entry_ttl(args.cxt_id, args.table_name, args.entry_handle, args.timeout_ms)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_set_entry_ttl", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_add_member(self, seqid, iprot, oprot):
args = bm_mt_indirect_add_member_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_add_member_result()
try:
result.success = self._handler.bm_mt_indirect_add_member(args.cxt_id, args.table_name, args.action_name, args.action_data)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_add_member", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_delete_member(self, seqid, iprot, oprot):
args = bm_mt_indirect_delete_member_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_delete_member_result()
try:
self._handler.bm_mt_indirect_delete_member(args.cxt_id, args.table_name, args.mbr_handle)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_delete_member", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_modify_member(self, seqid, iprot, oprot):
args = bm_mt_indirect_modify_member_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_modify_member_result()
try:
self._handler.bm_mt_indirect_modify_member(args.cxt_id, args.table_name, args.mbr_handle, args.action_name, args.action_data)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_modify_member", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_add_entry(self, seqid, iprot, oprot):
args = bm_mt_indirect_add_entry_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_add_entry_result()
try:
result.success = self._handler.bm_mt_indirect_add_entry(args.cxt_id, args.table_name, args.match_key, args.mbr_handle, args.options)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_add_entry", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_modify_entry(self, seqid, iprot, oprot):
args = bm_mt_indirect_modify_entry_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_modify_entry_result()
try:
self._handler.bm_mt_indirect_modify_entry(args.cxt_id, args.table_name, args.entry_handle, args.mbr_handle)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_modify_entry", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_delete_entry(self, seqid, iprot, oprot):
args = bm_mt_indirect_delete_entry_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_delete_entry_result()
try:
self._handler.bm_mt_indirect_delete_entry(args.cxt_id, args.table_name, args.entry_handle)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_delete_entry", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_set_entry_ttl(self, seqid, iprot, oprot):
args = bm_mt_indirect_set_entry_ttl_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_set_entry_ttl_result()
try:
self._handler.bm_mt_indirect_set_entry_ttl(args.cxt_id, args.table_name, args.entry_handle, args.timeout_ms)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_set_entry_ttl", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_set_default_member(self, seqid, iprot, oprot):
args = bm_mt_indirect_set_default_member_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_set_default_member_result()
try:
self._handler.bm_mt_indirect_set_default_member(args.cxt_id, args.table_name, args.mbr_handle)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_set_default_member", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_ws_create_group(self, seqid, iprot, oprot):
args = bm_mt_indirect_ws_create_group_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_ws_create_group_result()
try:
result.success = self._handler.bm_mt_indirect_ws_create_group(args.cxt_id, args.table_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_ws_create_group", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_ws_delete_group(self, seqid, iprot, oprot):
args = bm_mt_indirect_ws_delete_group_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_ws_delete_group_result()
try:
self._handler.bm_mt_indirect_ws_delete_group(args.cxt_id, args.table_name, args.grp_handle)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_ws_delete_group", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_ws_add_member_to_group(self, seqid, iprot, oprot):
args = bm_mt_indirect_ws_add_member_to_group_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_ws_add_member_to_group_result()
try:
self._handler.bm_mt_indirect_ws_add_member_to_group(args.cxt_id, args.table_name, args.mbr_handle, args.grp_handle)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_ws_add_member_to_group", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_ws_remove_member_from_group(self, seqid, iprot, oprot):
args = bm_mt_indirect_ws_remove_member_from_group_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_ws_remove_member_from_group_result()
try:
self._handler.bm_mt_indirect_ws_remove_member_from_group(args.cxt_id, args.table_name, args.mbr_handle, args.grp_handle)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_ws_remove_member_from_group", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_ws_add_entry(self, seqid, iprot, oprot):
args = bm_mt_indirect_ws_add_entry_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_ws_add_entry_result()
try:
result.success = self._handler.bm_mt_indirect_ws_add_entry(args.cxt_id, args.table_name, args.match_key, args.grp_handle, args.options)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_ws_add_entry", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_ws_modify_entry(self, seqid, iprot, oprot):
args = bm_mt_indirect_ws_modify_entry_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_ws_modify_entry_result()
try:
self._handler.bm_mt_indirect_ws_modify_entry(args.cxt_id, args.table_name, args.entry_handle, args.grp_handle)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_ws_modify_entry", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_ws_set_default_group(self, seqid, iprot, oprot):
args = bm_mt_indirect_ws_set_default_group_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_ws_set_default_group_result()
try:
self._handler.bm_mt_indirect_ws_set_default_group(args.cxt_id, args.table_name, args.grp_handle)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_ws_set_default_group", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_read_counter(self, seqid, iprot, oprot):
args = bm_mt_read_counter_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_read_counter_result()
try:
result.success = self._handler.bm_mt_read_counter(args.cxt_id, args.table_name, args.entry_handle)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_read_counter", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_reset_counters(self, seqid, iprot, oprot):
args = bm_mt_reset_counters_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_reset_counters_result()
try:
self._handler.bm_mt_reset_counters(args.cxt_id, args.table_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_reset_counters", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_write_counter(self, seqid, iprot, oprot):
args = bm_mt_write_counter_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_write_counter_result()
try:
self._handler.bm_mt_write_counter(args.cxt_id, args.table_name, args.entry_handle, args.value)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_write_counter", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_set_meter_rates(self, seqid, iprot, oprot):
args = bm_mt_set_meter_rates_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_set_meter_rates_result()
try:
self._handler.bm_mt_set_meter_rates(args.cxt_id, args.table_name, args.entry_handle, args.rates)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_set_meter_rates", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_get_entries(self, seqid, iprot, oprot):
args = bm_mt_get_entries_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_get_entries_result()
try:
result.success = self._handler.bm_mt_get_entries(args.cxt_id, args.table_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_get_entries", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_get_default_entry(self, seqid, iprot, oprot):
args = bm_mt_get_default_entry_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_get_default_entry_result()
try:
result.success = self._handler.bm_mt_get_default_entry(args.cxt_id, args.table_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_get_default_entry", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_get_members(self, seqid, iprot, oprot):
args = bm_mt_indirect_get_members_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_get_members_result()
try:
result.success = self._handler.bm_mt_indirect_get_members(args.cxt_id, args.table_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_get_members", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mt_indirect_ws_get_groups(self, seqid, iprot, oprot):
args = bm_mt_indirect_ws_get_groups_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mt_indirect_ws_get_groups_result()
try:
result.success = self._handler.bm_mt_indirect_ws_get_groups(args.cxt_id, args.table_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidTableOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mt_indirect_ws_get_groups", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_counter_read(self, seqid, iprot, oprot):
args = bm_counter_read_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_counter_read_result()
try:
result.success = self._handler.bm_counter_read(args.cxt_id, args.counter_name, args.index)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidCounterOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_counter_read", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_counter_reset_all(self, seqid, iprot, oprot):
args = bm_counter_reset_all_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_counter_reset_all_result()
try:
self._handler.bm_counter_reset_all(args.cxt_id, args.counter_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidCounterOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_counter_reset_all", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_counter_write(self, seqid, iprot, oprot):
args = bm_counter_write_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_counter_write_result()
try:
self._handler.bm_counter_write(args.cxt_id, args.counter_name, args.index, args.value)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidCounterOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_counter_write", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_learning_ack(self, seqid, iprot, oprot):
args = bm_learning_ack_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_learning_ack_result()
try:
self._handler.bm_learning_ack(args.cxt_id, args.list_id, args.buffer_id, args.sample_ids)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidLearnOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_learning_ack", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_learning_ack_buffer(self, seqid, iprot, oprot):
args = bm_learning_ack_buffer_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_learning_ack_buffer_result()
try:
self._handler.bm_learning_ack_buffer(args.cxt_id, args.list_id, args.buffer_id)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidLearnOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_learning_ack_buffer", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_learning_set_timeout(self, seqid, iprot, oprot):
args = bm_learning_set_timeout_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_learning_set_timeout_result()
try:
self._handler.bm_learning_set_timeout(args.cxt_id, args.list_id, args.timeout_ms)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidLearnOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_learning_set_timeout", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_learning_set_buffer_size(self, seqid, iprot, oprot):
args = bm_learning_set_buffer_size_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_learning_set_buffer_size_result()
try:
self._handler.bm_learning_set_buffer_size(args.cxt_id, args.list_id, args.nb_samples)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidLearnOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_learning_set_buffer_size", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_load_new_config(self, seqid, iprot, oprot):
args = bm_load_new_config_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_load_new_config_result()
try:
self._handler.bm_load_new_config(args.config_str)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidSwapOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_load_new_config", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_swap_configs(self, seqid, iprot, oprot):
args = bm_swap_configs_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_swap_configs_result()
try:
self._handler.bm_swap_configs()
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidSwapOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_swap_configs", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_meter_array_set_rates(self, seqid, iprot, oprot):
args = bm_meter_array_set_rates_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_meter_array_set_rates_result()
try:
self._handler.bm_meter_array_set_rates(args.cxt_id, args.meter_array_name, args.rates)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidMeterOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_meter_array_set_rates", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_meter_set_rates(self, seqid, iprot, oprot):
args = bm_meter_set_rates_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_meter_set_rates_result()
try:
self._handler.bm_meter_set_rates(args.cxt_id, args.meter_array_name, args.index, args.rates)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidMeterOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_meter_set_rates", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_register_read(self, seqid, iprot, oprot):
args = bm_register_read_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_register_read_result()
try:
result.success = self._handler.bm_register_read(args.cxt_id, args.register_array_name, args.idx)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidRegisterOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_register_read", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_register_write(self, seqid, iprot, oprot):
args = bm_register_write_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_register_write_result()
try:
self._handler.bm_register_write(args.cxt_id, args.register_array_name, args.index, args.value)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidRegisterOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_register_write", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_register_write_range(self, seqid, iprot, oprot):
args = bm_register_write_range_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_register_write_range_result()
try:
self._handler.bm_register_write_range(args.cxt_id, args.register_array_name, args.start_index, args.end_index, args.value)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidRegisterOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_register_write_range", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_register_reset(self, seqid, iprot, oprot):
args = bm_register_reset_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_register_reset_result()
try:
self._handler.bm_register_reset(args.cxt_id, args.register_array_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidRegisterOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_register_reset", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_dev_mgr_add_port(self, seqid, iprot, oprot):
args = bm_dev_mgr_add_port_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_dev_mgr_add_port_result()
try:
self._handler.bm_dev_mgr_add_port(args.iface_name, args.port_num, args.pcap_path)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidDevMgrOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_dev_mgr_add_port", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_dev_mgr_remove_port(self, seqid, iprot, oprot):
args = bm_dev_mgr_remove_port_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_dev_mgr_remove_port_result()
try:
self._handler.bm_dev_mgr_remove_port(args.port_num)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidDevMgrOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_dev_mgr_remove_port", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_dev_mgr_show_ports(self, seqid, iprot, oprot):
args = bm_dev_mgr_show_ports_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_dev_mgr_show_ports_result()
try:
result.success = self._handler.bm_dev_mgr_show_ports()
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidDevMgrOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_dev_mgr_show_ports", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_mgmt_get_info(self, seqid, iprot, oprot):
args = bm_mgmt_get_info_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_mgmt_get_info_result()
try:
result.success = self._handler.bm_mgmt_get_info()
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_mgmt_get_info", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_set_crc16_custom_parameters(self, seqid, iprot, oprot):
args = bm_set_crc16_custom_parameters_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_set_crc16_custom_parameters_result()
try:
self._handler.bm_set_crc16_custom_parameters(args.cxt_id, args.calc_name, args.crc16_config)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidCrcOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_set_crc16_custom_parameters", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_set_crc32_custom_parameters(self, seqid, iprot, oprot):
args = bm_set_crc32_custom_parameters_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_set_crc32_custom_parameters_result()
try:
self._handler.bm_set_crc32_custom_parameters(args.cxt_id, args.calc_name, args.crc32_config)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except InvalidCrcOperation as ouch:
msg_type = TMessageType.REPLY
result.ouch = ouch
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_set_crc32_custom_parameters", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_reset_state(self, seqid, iprot, oprot):
args = bm_reset_state_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_reset_state_result()
try:
self._handler.bm_reset_state()
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_reset_state", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_get_config(self, seqid, iprot, oprot):
args = bm_get_config_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_get_config_result()
try:
result.success = self._handler.bm_get_config()
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_get_config", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_get_config_md5(self, seqid, iprot, oprot):
args = bm_get_config_md5_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_get_config_md5_result()
try:
result.success = self._handler.bm_get_config_md5()
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_get_config_md5", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_bm_serialize_state(self, seqid, iprot, oprot):
args = bm_serialize_state_args()
args.read(iprot)
iprot.readMessageEnd()
result = bm_serialize_state_result()
try:
result.success = self._handler.bm_serialize_state()
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("bm_serialize_state", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class bm_mt_add_entry_args:
"""
Attributes:
- cxt_id
- table_name
- match_key
- action_name
- action_data
- options
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.LIST, 'match_key', (TType.STRUCT,(BmMatchParam, BmMatchParam.thrift_spec)), None, ), # 3
(4, TType.STRING, 'action_name', None, None, ), # 4
(5, TType.LIST, 'action_data', (TType.STRING,None), None, ), # 5
(6, TType.STRUCT, 'options', (BmAddEntryOptions, BmAddEntryOptions.thrift_spec), None, ), # 6
)
def __init__(self, cxt_id=None, table_name=None, match_key=None, action_name=None, action_data=None, options=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.match_key = match_key
self.action_name = action_name
self.action_data = action_data
self.options = options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.match_key = []
(_etype40, _size37) = iprot.readListBegin()
for _i41 in xrange(_size37):
_elem42 = BmMatchParam()
_elem42.read(iprot)
self.match_key.append(_elem42)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.action_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.action_data = []
(_etype46, _size43) = iprot.readListBegin()
for _i47 in xrange(_size43):
_elem48 = iprot.readString()
self.action_data.append(_elem48)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.options = BmAddEntryOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_add_entry_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.match_key is not None:
oprot.writeFieldBegin('match_key', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.match_key))
for iter49 in self.match_key:
iter49.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.action_name is not None:
oprot.writeFieldBegin('action_name', TType.STRING, 4)
oprot.writeString(self.action_name)
oprot.writeFieldEnd()
if self.action_data is not None:
oprot.writeFieldBegin('action_data', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.action_data))
for iter50 in self.action_data:
oprot.writeString(iter50)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 6)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.match_key)
value = (value * 31) ^ hash(self.action_name)
value = (value * 31) ^ hash(self.action_data)
value = (value * 31) ^ hash(self.options)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_add_entry_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_add_entry_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_set_default_action_args:
"""
Attributes:
- cxt_id
- table_name
- action_name
- action_data
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.STRING, 'action_name', None, None, ), # 3
(4, TType.LIST, 'action_data', (TType.STRING,None), None, ), # 4
)
def __init__(self, cxt_id=None, table_name=None, action_name=None, action_data=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.action_name = action_name
self.action_data = action_data
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.action_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.action_data = []
(_etype54, _size51) = iprot.readListBegin()
for _i55 in xrange(_size51):
_elem56 = iprot.readString()
self.action_data.append(_elem56)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_set_default_action_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.action_name is not None:
oprot.writeFieldBegin('action_name', TType.STRING, 3)
oprot.writeString(self.action_name)
oprot.writeFieldEnd()
if self.action_data is not None:
oprot.writeFieldBegin('action_data', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.action_data))
for iter57 in self.action_data:
oprot.writeString(iter57)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.action_name)
value = (value * 31) ^ hash(self.action_data)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_set_default_action_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_set_default_action_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_delete_entry_args:
"""
Attributes:
- cxt_id
- table_name
- entry_handle
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I64, 'entry_handle', None, None, ), # 3
)
def __init__(self, cxt_id=None, table_name=None, entry_handle=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.entry_handle = entry_handle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.entry_handle = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_delete_entry_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.entry_handle is not None:
oprot.writeFieldBegin('entry_handle', TType.I64, 3)
oprot.writeI64(self.entry_handle)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.entry_handle)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_delete_entry_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_delete_entry_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_modify_entry_args:
"""
Attributes:
- cxt_id
- table_name
- entry_handle
- action_name
- action_data
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I64, 'entry_handle', None, None, ), # 3
(4, TType.STRING, 'action_name', None, None, ), # 4
(5, TType.LIST, 'action_data', (TType.STRING,None), None, ), # 5
)
def __init__(self, cxt_id=None, table_name=None, entry_handle=None, action_name=None, action_data=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.entry_handle = entry_handle
self.action_name = action_name
self.action_data = action_data
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.entry_handle = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.action_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.action_data = []
(_etype61, _size58) = iprot.readListBegin()
for _i62 in xrange(_size58):
_elem63 = iprot.readString()
self.action_data.append(_elem63)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_modify_entry_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.entry_handle is not None:
oprot.writeFieldBegin('entry_handle', TType.I64, 3)
oprot.writeI64(self.entry_handle)
oprot.writeFieldEnd()
if self.action_name is not None:
oprot.writeFieldBegin('action_name', TType.STRING, 4)
oprot.writeString(self.action_name)
oprot.writeFieldEnd()
if self.action_data is not None:
oprot.writeFieldBegin('action_data', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.action_data))
for iter64 in self.action_data:
oprot.writeString(iter64)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.entry_handle)
value = (value * 31) ^ hash(self.action_name)
value = (value * 31) ^ hash(self.action_data)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_modify_entry_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_modify_entry_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_set_entry_ttl_args:
"""
Attributes:
- cxt_id
- table_name
- entry_handle
- timeout_ms
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I64, 'entry_handle', None, None, ), # 3
(4, TType.I32, 'timeout_ms', None, None, ), # 4
)
def __init__(self, cxt_id=None, table_name=None, entry_handle=None, timeout_ms=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.entry_handle = entry_handle
self.timeout_ms = timeout_ms
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.entry_handle = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.timeout_ms = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_set_entry_ttl_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.entry_handle is not None:
oprot.writeFieldBegin('entry_handle', TType.I64, 3)
oprot.writeI64(self.entry_handle)
oprot.writeFieldEnd()
if self.timeout_ms is not None:
oprot.writeFieldBegin('timeout_ms', TType.I32, 4)
oprot.writeI32(self.timeout_ms)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.entry_handle)
value = (value * 31) ^ hash(self.timeout_ms)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_set_entry_ttl_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_set_entry_ttl_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_add_member_args:
"""
Attributes:
- cxt_id
- table_name
- action_name
- action_data
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.STRING, 'action_name', None, None, ), # 3
(4, TType.LIST, 'action_data', (TType.STRING,None), None, ), # 4
)
def __init__(self, cxt_id=None, table_name=None, action_name=None, action_data=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.action_name = action_name
self.action_data = action_data
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.action_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.action_data = []
(_etype68, _size65) = iprot.readListBegin()
for _i69 in xrange(_size65):
_elem70 = iprot.readString()
self.action_data.append(_elem70)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_add_member_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.action_name is not None:
oprot.writeFieldBegin('action_name', TType.STRING, 3)
oprot.writeString(self.action_name)
oprot.writeFieldEnd()
if self.action_data is not None:
oprot.writeFieldBegin('action_data', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.action_data))
for iter71 in self.action_data:
oprot.writeString(iter71)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.action_name)
value = (value * 31) ^ hash(self.action_data)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_add_member_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_add_member_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_delete_member_args:
"""
Attributes:
- cxt_id
- table_name
- mbr_handle
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I32, 'mbr_handle', None, None, ), # 3
)
def __init__(self, cxt_id=None, table_name=None, mbr_handle=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.mbr_handle = mbr_handle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.mbr_handle = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_delete_member_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.mbr_handle is not None:
oprot.writeFieldBegin('mbr_handle', TType.I32, 3)
oprot.writeI32(self.mbr_handle)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.mbr_handle)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_delete_member_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_delete_member_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_modify_member_args:
"""
Attributes:
- cxt_id
- table_name
- mbr_handle
- action_name
- action_data
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I32, 'mbr_handle', None, None, ), # 3
(4, TType.STRING, 'action_name', None, None, ), # 4
(5, TType.LIST, 'action_data', (TType.STRING,None), None, ), # 5
)
def __init__(self, cxt_id=None, table_name=None, mbr_handle=None, action_name=None, action_data=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.mbr_handle = mbr_handle
self.action_name = action_name
self.action_data = action_data
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.mbr_handle = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.action_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.action_data = []
(_etype75, _size72) = iprot.readListBegin()
for _i76 in xrange(_size72):
_elem77 = iprot.readString()
self.action_data.append(_elem77)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_modify_member_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.mbr_handle is not None:
oprot.writeFieldBegin('mbr_handle', TType.I32, 3)
oprot.writeI32(self.mbr_handle)
oprot.writeFieldEnd()
if self.action_name is not None:
oprot.writeFieldBegin('action_name', TType.STRING, 4)
oprot.writeString(self.action_name)
oprot.writeFieldEnd()
if self.action_data is not None:
oprot.writeFieldBegin('action_data', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.action_data))
for iter78 in self.action_data:
oprot.writeString(iter78)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.mbr_handle)
value = (value * 31) ^ hash(self.action_name)
value = (value * 31) ^ hash(self.action_data)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_modify_member_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_modify_member_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_add_entry_args:
"""
Attributes:
- cxt_id
- table_name
- match_key
- mbr_handle
- options
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.LIST, 'match_key', (TType.STRUCT,(BmMatchParam, BmMatchParam.thrift_spec)), None, ), # 3
(4, TType.I32, 'mbr_handle', None, None, ), # 4
(5, TType.STRUCT, 'options', (BmAddEntryOptions, BmAddEntryOptions.thrift_spec), None, ), # 5
)
def __init__(self, cxt_id=None, table_name=None, match_key=None, mbr_handle=None, options=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.match_key = match_key
self.mbr_handle = mbr_handle
self.options = options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.match_key = []
(_etype82, _size79) = iprot.readListBegin()
for _i83 in xrange(_size79):
_elem84 = BmMatchParam()
_elem84.read(iprot)
self.match_key.append(_elem84)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.mbr_handle = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.options = BmAddEntryOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_add_entry_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.match_key is not None:
oprot.writeFieldBegin('match_key', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.match_key))
for iter85 in self.match_key:
iter85.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.mbr_handle is not None:
oprot.writeFieldBegin('mbr_handle', TType.I32, 4)
oprot.writeI32(self.mbr_handle)
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 5)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.match_key)
value = (value * 31) ^ hash(self.mbr_handle)
value = (value * 31) ^ hash(self.options)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_add_entry_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_add_entry_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_modify_entry_args:
"""
Attributes:
- cxt_id
- table_name
- entry_handle
- mbr_handle
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I64, 'entry_handle', None, None, ), # 3
(4, TType.I32, 'mbr_handle', None, None, ), # 4
)
def __init__(self, cxt_id=None, table_name=None, entry_handle=None, mbr_handle=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.entry_handle = entry_handle
self.mbr_handle = mbr_handle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.entry_handle = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.mbr_handle = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_modify_entry_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.entry_handle is not None:
oprot.writeFieldBegin('entry_handle', TType.I64, 3)
oprot.writeI64(self.entry_handle)
oprot.writeFieldEnd()
if self.mbr_handle is not None:
oprot.writeFieldBegin('mbr_handle', TType.I32, 4)
oprot.writeI32(self.mbr_handle)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.entry_handle)
value = (value * 31) ^ hash(self.mbr_handle)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_modify_entry_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_modify_entry_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_delete_entry_args:
"""
Attributes:
- cxt_id
- table_name
- entry_handle
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I64, 'entry_handle', None, None, ), # 3
)
def __init__(self, cxt_id=None, table_name=None, entry_handle=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.entry_handle = entry_handle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.entry_handle = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_delete_entry_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.entry_handle is not None:
oprot.writeFieldBegin('entry_handle', TType.I64, 3)
oprot.writeI64(self.entry_handle)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.entry_handle)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_delete_entry_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_delete_entry_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_set_entry_ttl_args:
"""
Attributes:
- cxt_id
- table_name
- entry_handle
- timeout_ms
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I64, 'entry_handle', None, None, ), # 3
(4, TType.I32, 'timeout_ms', None, None, ), # 4
)
def __init__(self, cxt_id=None, table_name=None, entry_handle=None, timeout_ms=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.entry_handle = entry_handle
self.timeout_ms = timeout_ms
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.entry_handle = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.timeout_ms = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_set_entry_ttl_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.entry_handle is not None:
oprot.writeFieldBegin('entry_handle', TType.I64, 3)
oprot.writeI64(self.entry_handle)
oprot.writeFieldEnd()
if self.timeout_ms is not None:
oprot.writeFieldBegin('timeout_ms', TType.I32, 4)
oprot.writeI32(self.timeout_ms)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.entry_handle)
value = (value * 31) ^ hash(self.timeout_ms)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_set_entry_ttl_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_set_entry_ttl_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_set_default_member_args:
"""
Attributes:
- cxt_id
- table_name
- mbr_handle
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I32, 'mbr_handle', None, None, ), # 3
)
def __init__(self, cxt_id=None, table_name=None, mbr_handle=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.mbr_handle = mbr_handle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.mbr_handle = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_set_default_member_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.mbr_handle is not None:
oprot.writeFieldBegin('mbr_handle', TType.I32, 3)
oprot.writeI32(self.mbr_handle)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.mbr_handle)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_set_default_member_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_set_default_member_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_create_group_args:
"""
Attributes:
- cxt_id
- table_name
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
)
def __init__(self, cxt_id=None, table_name=None,):
self.cxt_id = cxt_id
self.table_name = table_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_create_group_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_create_group_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_create_group_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_delete_group_args:
"""
Attributes:
- cxt_id
- table_name
- grp_handle
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I32, 'grp_handle', None, None, ), # 3
)
def __init__(self, cxt_id=None, table_name=None, grp_handle=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.grp_handle = grp_handle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.grp_handle = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_delete_group_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.grp_handle is not None:
oprot.writeFieldBegin('grp_handle', TType.I32, 3)
oprot.writeI32(self.grp_handle)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.grp_handle)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_delete_group_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_delete_group_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_add_member_to_group_args:
"""
Attributes:
- cxt_id
- table_name
- mbr_handle
- grp_handle
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I32, 'mbr_handle', None, None, ), # 3
(4, TType.I32, 'grp_handle', None, None, ), # 4
)
def __init__(self, cxt_id=None, table_name=None, mbr_handle=None, grp_handle=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.mbr_handle = mbr_handle
self.grp_handle = grp_handle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.mbr_handle = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.grp_handle = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_add_member_to_group_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.mbr_handle is not None:
oprot.writeFieldBegin('mbr_handle', TType.I32, 3)
oprot.writeI32(self.mbr_handle)
oprot.writeFieldEnd()
if self.grp_handle is not None:
oprot.writeFieldBegin('grp_handle', TType.I32, 4)
oprot.writeI32(self.grp_handle)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.mbr_handle)
value = (value * 31) ^ hash(self.grp_handle)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_add_member_to_group_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_add_member_to_group_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_remove_member_from_group_args:
"""
Attributes:
- cxt_id
- table_name
- mbr_handle
- grp_handle
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I32, 'mbr_handle', None, None, ), # 3
(4, TType.I32, 'grp_handle', None, None, ), # 4
)
def __init__(self, cxt_id=None, table_name=None, mbr_handle=None, grp_handle=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.mbr_handle = mbr_handle
self.grp_handle = grp_handle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.mbr_handle = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.grp_handle = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_remove_member_from_group_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.mbr_handle is not None:
oprot.writeFieldBegin('mbr_handle', TType.I32, 3)
oprot.writeI32(self.mbr_handle)
oprot.writeFieldEnd()
if self.grp_handle is not None:
oprot.writeFieldBegin('grp_handle', TType.I32, 4)
oprot.writeI32(self.grp_handle)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.mbr_handle)
value = (value * 31) ^ hash(self.grp_handle)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_remove_member_from_group_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_remove_member_from_group_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_add_entry_args:
"""
Attributes:
- cxt_id
- table_name
- match_key
- grp_handle
- options
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.LIST, 'match_key', (TType.STRUCT,(BmMatchParam, BmMatchParam.thrift_spec)), None, ), # 3
(4, TType.I32, 'grp_handle', None, None, ), # 4
(5, TType.STRUCT, 'options', (BmAddEntryOptions, BmAddEntryOptions.thrift_spec), None, ), # 5
)
def __init__(self, cxt_id=None, table_name=None, match_key=None, grp_handle=None, options=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.match_key = match_key
self.grp_handle = grp_handle
self.options = options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.match_key = []
(_etype89, _size86) = iprot.readListBegin()
for _i90 in xrange(_size86):
_elem91 = BmMatchParam()
_elem91.read(iprot)
self.match_key.append(_elem91)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.grp_handle = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.options = BmAddEntryOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_add_entry_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.match_key is not None:
oprot.writeFieldBegin('match_key', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.match_key))
for iter92 in self.match_key:
iter92.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.grp_handle is not None:
oprot.writeFieldBegin('grp_handle', TType.I32, 4)
oprot.writeI32(self.grp_handle)
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 5)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.match_key)
value = (value * 31) ^ hash(self.grp_handle)
value = (value * 31) ^ hash(self.options)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_add_entry_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_add_entry_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_modify_entry_args:
"""
Attributes:
- cxt_id
- table_name
- entry_handle
- grp_handle
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I64, 'entry_handle', None, None, ), # 3
(4, TType.I32, 'grp_handle', None, None, ), # 4
)
def __init__(self, cxt_id=None, table_name=None, entry_handle=None, grp_handle=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.entry_handle = entry_handle
self.grp_handle = grp_handle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.entry_handle = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.grp_handle = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_modify_entry_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.entry_handle is not None:
oprot.writeFieldBegin('entry_handle', TType.I64, 3)
oprot.writeI64(self.entry_handle)
oprot.writeFieldEnd()
if self.grp_handle is not None:
oprot.writeFieldBegin('grp_handle', TType.I32, 4)
oprot.writeI32(self.grp_handle)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.entry_handle)
value = (value * 31) ^ hash(self.grp_handle)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_modify_entry_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_modify_entry_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_set_default_group_args:
"""
Attributes:
- cxt_id
- table_name
- grp_handle
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I32, 'grp_handle', None, None, ), # 3
)
def __init__(self, cxt_id=None, table_name=None, grp_handle=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.grp_handle = grp_handle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.grp_handle = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_set_default_group_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.grp_handle is not None:
oprot.writeFieldBegin('grp_handle', TType.I32, 3)
oprot.writeI32(self.grp_handle)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.grp_handle)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_set_default_group_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_set_default_group_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_read_counter_args:
"""
Attributes:
- cxt_id
- table_name
- entry_handle
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I64, 'entry_handle', None, None, ), # 3
)
def __init__(self, cxt_id=None, table_name=None, entry_handle=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.entry_handle = entry_handle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.entry_handle = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_read_counter_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.entry_handle is not None:
oprot.writeFieldBegin('entry_handle', TType.I64, 3)
oprot.writeI64(self.entry_handle)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.entry_handle)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_read_counter_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (BmCounterValue, BmCounterValue.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = BmCounterValue()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_read_counter_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_reset_counters_args:
"""
Attributes:
- cxt_id
- table_name
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
)
def __init__(self, cxt_id=None, table_name=None,):
self.cxt_id = cxt_id
self.table_name = table_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_reset_counters_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_reset_counters_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_reset_counters_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_write_counter_args:
"""
Attributes:
- cxt_id
- table_name
- entry_handle
- value
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I64, 'entry_handle', None, None, ), # 3
(4, TType.STRUCT, 'value', (BmCounterValue, BmCounterValue.thrift_spec), None, ), # 4
)
def __init__(self, cxt_id=None, table_name=None, entry_handle=None, value=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.entry_handle = entry_handle
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.entry_handle = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.value = BmCounterValue()
self.value.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_write_counter_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.entry_handle is not None:
oprot.writeFieldBegin('entry_handle', TType.I64, 3)
oprot.writeI64(self.entry_handle)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRUCT, 4)
self.value.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.entry_handle)
value = (value * 31) ^ hash(self.value)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_write_counter_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_write_counter_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_set_meter_rates_args:
"""
Attributes:
- cxt_id
- table_name
- entry_handle
- rates
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.I64, 'entry_handle', None, None, ), # 3
(4, TType.LIST, 'rates', (TType.STRUCT,(BmMeterRateConfig, BmMeterRateConfig.thrift_spec)), None, ), # 4
)
def __init__(self, cxt_id=None, table_name=None, entry_handle=None, rates=None,):
self.cxt_id = cxt_id
self.table_name = table_name
self.entry_handle = entry_handle
self.rates = rates
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.entry_handle = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.rates = []
(_etype96, _size93) = iprot.readListBegin()
for _i97 in xrange(_size93):
_elem98 = BmMeterRateConfig()
_elem98.read(iprot)
self.rates.append(_elem98)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_set_meter_rates_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.entry_handle is not None:
oprot.writeFieldBegin('entry_handle', TType.I64, 3)
oprot.writeI64(self.entry_handle)
oprot.writeFieldEnd()
if self.rates is not None:
oprot.writeFieldBegin('rates', TType.LIST, 4)
oprot.writeListBegin(TType.STRUCT, len(self.rates))
for iter99 in self.rates:
iter99.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
value = (value * 31) ^ hash(self.entry_handle)
value = (value * 31) ^ hash(self.rates)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_set_meter_rates_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_set_meter_rates_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_get_entries_args:
"""
Attributes:
- cxt_id
- table_name
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
)
def __init__(self, cxt_id=None, table_name=None,):
self.cxt_id = cxt_id
self.table_name = table_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_get_entries_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_get_entries_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(BmMtEntry, BmMtEntry.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype103, _size100) = iprot.readListBegin()
for _i104 in xrange(_size100):
_elem105 = BmMtEntry()
_elem105.read(iprot)
self.success.append(_elem105)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_get_entries_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter106 in self.success:
iter106.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_get_default_entry_args:
"""
Attributes:
- cxt_id
- table_name
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
)
def __init__(self, cxt_id=None, table_name=None,):
self.cxt_id = cxt_id
self.table_name = table_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_get_default_entry_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_get_default_entry_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (BmActionEntry, BmActionEntry.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = BmActionEntry()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_get_default_entry_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_get_members_args:
"""
Attributes:
- cxt_id
- table_name
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
)
def __init__(self, cxt_id=None, table_name=None,):
self.cxt_id = cxt_id
self.table_name = table_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_get_members_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_get_members_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(BmMtIndirectMember, BmMtIndirectMember.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype110, _size107) = iprot.readListBegin()
for _i111 in xrange(_size107):
_elem112 = BmMtIndirectMember()
_elem112.read(iprot)
self.success.append(_elem112)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_get_members_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter113 in self.success:
iter113.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_get_groups_args:
"""
Attributes:
- cxt_id
- table_name
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
)
def __init__(self, cxt_id=None, table_name=None,):
self.cxt_id = cxt_id
self.table_name = table_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_get_groups_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.table_name)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mt_indirect_ws_get_groups_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(BmMtIndirectWsGroup, BmMtIndirectWsGroup.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidTableOperation, InvalidTableOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype117, _size114) = iprot.readListBegin()
for _i118 in xrange(_size114):
_elem119 = BmMtIndirectWsGroup()
_elem119.read(iprot)
self.success.append(_elem119)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidTableOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mt_indirect_ws_get_groups_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter120 in self.success:
iter120.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_counter_read_args:
"""
Attributes:
- cxt_id
- counter_name
- index
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'counter_name', None, None, ), # 2
(3, TType.I32, 'index', None, None, ), # 3
)
def __init__(self, cxt_id=None, counter_name=None, index=None,):
self.cxt_id = cxt_id
self.counter_name = counter_name
self.index = index
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.counter_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.index = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_counter_read_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.counter_name is not None:
oprot.writeFieldBegin('counter_name', TType.STRING, 2)
oprot.writeString(self.counter_name)
oprot.writeFieldEnd()
if self.index is not None:
oprot.writeFieldBegin('index', TType.I32, 3)
oprot.writeI32(self.index)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.counter_name)
value = (value * 31) ^ hash(self.index)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_counter_read_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (BmCounterValue, BmCounterValue.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidCounterOperation, InvalidCounterOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = BmCounterValue()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidCounterOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_counter_read_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_counter_reset_all_args:
"""
Attributes:
- cxt_id
- counter_name
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'counter_name', None, None, ), # 2
)
def __init__(self, cxt_id=None, counter_name=None,):
self.cxt_id = cxt_id
self.counter_name = counter_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.counter_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_counter_reset_all_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.counter_name is not None:
oprot.writeFieldBegin('counter_name', TType.STRING, 2)
oprot.writeString(self.counter_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.counter_name)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_counter_reset_all_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidCounterOperation, InvalidCounterOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidCounterOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_counter_reset_all_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_counter_write_args:
"""
Attributes:
- cxt_id
- counter_name
- index
- value
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'counter_name', None, None, ), # 2
(3, TType.I32, 'index', None, None, ), # 3
(4, TType.STRUCT, 'value', (BmCounterValue, BmCounterValue.thrift_spec), None, ), # 4
)
def __init__(self, cxt_id=None, counter_name=None, index=None, value=None,):
self.cxt_id = cxt_id
self.counter_name = counter_name
self.index = index
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.counter_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.index = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.value = BmCounterValue()
self.value.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_counter_write_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.counter_name is not None:
oprot.writeFieldBegin('counter_name', TType.STRING, 2)
oprot.writeString(self.counter_name)
oprot.writeFieldEnd()
if self.index is not None:
oprot.writeFieldBegin('index', TType.I32, 3)
oprot.writeI32(self.index)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRUCT, 4)
self.value.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.counter_name)
value = (value * 31) ^ hash(self.index)
value = (value * 31) ^ hash(self.value)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_counter_write_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidCounterOperation, InvalidCounterOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidCounterOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_counter_write_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_learning_ack_args:
"""
Attributes:
- cxt_id
- list_id
- buffer_id
- sample_ids
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.I32, 'list_id', None, None, ), # 2
(3, TType.I64, 'buffer_id', None, None, ), # 3
(4, TType.LIST, 'sample_ids', (TType.I32,None), None, ), # 4
)
def __init__(self, cxt_id=None, list_id=None, buffer_id=None, sample_ids=None,):
self.cxt_id = cxt_id
self.list_id = list_id
self.buffer_id = buffer_id
self.sample_ids = sample_ids
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.list_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.buffer_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.sample_ids = []
(_etype124, _size121) = iprot.readListBegin()
for _i125 in xrange(_size121):
_elem126 = iprot.readI32()
self.sample_ids.append(_elem126)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_learning_ack_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.list_id is not None:
oprot.writeFieldBegin('list_id', TType.I32, 2)
oprot.writeI32(self.list_id)
oprot.writeFieldEnd()
if self.buffer_id is not None:
oprot.writeFieldBegin('buffer_id', TType.I64, 3)
oprot.writeI64(self.buffer_id)
oprot.writeFieldEnd()
if self.sample_ids is not None:
oprot.writeFieldBegin('sample_ids', TType.LIST, 4)
oprot.writeListBegin(TType.I32, len(self.sample_ids))
for iter127 in self.sample_ids:
oprot.writeI32(iter127)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.list_id)
value = (value * 31) ^ hash(self.buffer_id)
value = (value * 31) ^ hash(self.sample_ids)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_learning_ack_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidLearnOperation, InvalidLearnOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidLearnOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_learning_ack_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_learning_ack_buffer_args:
"""
Attributes:
- cxt_id
- list_id
- buffer_id
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.I32, 'list_id', None, None, ), # 2
(3, TType.I64, 'buffer_id', None, None, ), # 3
)
def __init__(self, cxt_id=None, list_id=None, buffer_id=None,):
self.cxt_id = cxt_id
self.list_id = list_id
self.buffer_id = buffer_id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.list_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.buffer_id = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_learning_ack_buffer_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.list_id is not None:
oprot.writeFieldBegin('list_id', TType.I32, 2)
oprot.writeI32(self.list_id)
oprot.writeFieldEnd()
if self.buffer_id is not None:
oprot.writeFieldBegin('buffer_id', TType.I64, 3)
oprot.writeI64(self.buffer_id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.list_id)
value = (value * 31) ^ hash(self.buffer_id)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_learning_ack_buffer_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidLearnOperation, InvalidLearnOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidLearnOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_learning_ack_buffer_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_learning_set_timeout_args:
"""
Attributes:
- cxt_id
- list_id
- timeout_ms
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.I32, 'list_id', None, None, ), # 2
(3, TType.I32, 'timeout_ms', None, None, ), # 3
)
def __init__(self, cxt_id=None, list_id=None, timeout_ms=None,):
self.cxt_id = cxt_id
self.list_id = list_id
self.timeout_ms = timeout_ms
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.list_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.timeout_ms = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_learning_set_timeout_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.list_id is not None:
oprot.writeFieldBegin('list_id', TType.I32, 2)
oprot.writeI32(self.list_id)
oprot.writeFieldEnd()
if self.timeout_ms is not None:
oprot.writeFieldBegin('timeout_ms', TType.I32, 3)
oprot.writeI32(self.timeout_ms)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.list_id)
value = (value * 31) ^ hash(self.timeout_ms)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_learning_set_timeout_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidLearnOperation, InvalidLearnOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidLearnOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_learning_set_timeout_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_learning_set_buffer_size_args:
"""
Attributes:
- cxt_id
- list_id
- nb_samples
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.I32, 'list_id', None, None, ), # 2
(3, TType.I32, 'nb_samples', None, None, ), # 3
)
def __init__(self, cxt_id=None, list_id=None, nb_samples=None,):
self.cxt_id = cxt_id
self.list_id = list_id
self.nb_samples = nb_samples
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.list_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.nb_samples = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_learning_set_buffer_size_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.list_id is not None:
oprot.writeFieldBegin('list_id', TType.I32, 2)
oprot.writeI32(self.list_id)
oprot.writeFieldEnd()
if self.nb_samples is not None:
oprot.writeFieldBegin('nb_samples', TType.I32, 3)
oprot.writeI32(self.nb_samples)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.list_id)
value = (value * 31) ^ hash(self.nb_samples)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_learning_set_buffer_size_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidLearnOperation, InvalidLearnOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidLearnOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_learning_set_buffer_size_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_load_new_config_args:
"""
Attributes:
- config_str
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'config_str', None, None, ), # 1
)
def __init__(self, config_str=None,):
self.config_str = config_str
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.config_str = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_load_new_config_args')
if self.config_str is not None:
oprot.writeFieldBegin('config_str', TType.STRING, 1)
oprot.writeString(self.config_str)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.config_str)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_load_new_config_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidSwapOperation, InvalidSwapOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidSwapOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_load_new_config_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_swap_configs_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_swap_configs_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_swap_configs_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidSwapOperation, InvalidSwapOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidSwapOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_swap_configs_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_meter_array_set_rates_args:
"""
Attributes:
- cxt_id
- meter_array_name
- rates
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'meter_array_name', None, None, ), # 2
(3, TType.LIST, 'rates', (TType.STRUCT,(BmMeterRateConfig, BmMeterRateConfig.thrift_spec)), None, ), # 3
)
def __init__(self, cxt_id=None, meter_array_name=None, rates=None,):
self.cxt_id = cxt_id
self.meter_array_name = meter_array_name
self.rates = rates
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.meter_array_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.rates = []
(_etype131, _size128) = iprot.readListBegin()
for _i132 in xrange(_size128):
_elem133 = BmMeterRateConfig()
_elem133.read(iprot)
self.rates.append(_elem133)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_meter_array_set_rates_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.meter_array_name is not None:
oprot.writeFieldBegin('meter_array_name', TType.STRING, 2)
oprot.writeString(self.meter_array_name)
oprot.writeFieldEnd()
if self.rates is not None:
oprot.writeFieldBegin('rates', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.rates))
for iter134 in self.rates:
iter134.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.meter_array_name)
value = (value * 31) ^ hash(self.rates)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_meter_array_set_rates_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidMeterOperation, InvalidMeterOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidMeterOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_meter_array_set_rates_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_meter_set_rates_args:
"""
Attributes:
- cxt_id
- meter_array_name
- index
- rates
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'meter_array_name', None, None, ), # 2
(3, TType.I32, 'index', None, None, ), # 3
(4, TType.LIST, 'rates', (TType.STRUCT,(BmMeterRateConfig, BmMeterRateConfig.thrift_spec)), None, ), # 4
)
def __init__(self, cxt_id=None, meter_array_name=None, index=None, rates=None,):
self.cxt_id = cxt_id
self.meter_array_name = meter_array_name
self.index = index
self.rates = rates
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.meter_array_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.index = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.rates = []
(_etype138, _size135) = iprot.readListBegin()
for _i139 in xrange(_size135):
_elem140 = BmMeterRateConfig()
_elem140.read(iprot)
self.rates.append(_elem140)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_meter_set_rates_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.meter_array_name is not None:
oprot.writeFieldBegin('meter_array_name', TType.STRING, 2)
oprot.writeString(self.meter_array_name)
oprot.writeFieldEnd()
if self.index is not None:
oprot.writeFieldBegin('index', TType.I32, 3)
oprot.writeI32(self.index)
oprot.writeFieldEnd()
if self.rates is not None:
oprot.writeFieldBegin('rates', TType.LIST, 4)
oprot.writeListBegin(TType.STRUCT, len(self.rates))
for iter141 in self.rates:
iter141.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.meter_array_name)
value = (value * 31) ^ hash(self.index)
value = (value * 31) ^ hash(self.rates)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_meter_set_rates_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidMeterOperation, InvalidMeterOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidMeterOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_meter_set_rates_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_register_read_args:
"""
Attributes:
- cxt_id
- register_array_name
- idx
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'register_array_name', None, None, ), # 2
(3, TType.I32, 'idx', None, None, ), # 3
)
def __init__(self, cxt_id=None, register_array_name=None, idx=None,):
self.cxt_id = cxt_id
self.register_array_name = register_array_name
self.idx = idx
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.register_array_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.idx = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_register_read_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.register_array_name is not None:
oprot.writeFieldBegin('register_array_name', TType.STRING, 2)
oprot.writeString(self.register_array_name)
oprot.writeFieldEnd()
if self.idx is not None:
oprot.writeFieldBegin('idx', TType.I32, 3)
oprot.writeI32(self.idx)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.register_array_name)
value = (value * 31) ^ hash(self.idx)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_register_read_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidRegisterOperation, InvalidRegisterOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidRegisterOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_register_read_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_register_write_args:
"""
Attributes:
- cxt_id
- register_array_name
- index
- value
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'register_array_name', None, None, ), # 2
(3, TType.I32, 'index', None, None, ), # 3
(4, TType.I64, 'value', None, None, ), # 4
)
def __init__(self, cxt_id=None, register_array_name=None, index=None, value=None,):
self.cxt_id = cxt_id
self.register_array_name = register_array_name
self.index = index
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.register_array_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.index = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.value = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_register_write_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.register_array_name is not None:
oprot.writeFieldBegin('register_array_name', TType.STRING, 2)
oprot.writeString(self.register_array_name)
oprot.writeFieldEnd()
if self.index is not None:
oprot.writeFieldBegin('index', TType.I32, 3)
oprot.writeI32(self.index)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.I64, 4)
oprot.writeI64(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.register_array_name)
value = (value * 31) ^ hash(self.index)
value = (value * 31) ^ hash(self.value)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_register_write_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidRegisterOperation, InvalidRegisterOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidRegisterOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_register_write_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_register_write_range_args:
"""
Attributes:
- cxt_id
- register_array_name
- start_index
- end_index
- value
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'register_array_name', None, None, ), # 2
(3, TType.I32, 'start_index', None, None, ), # 3
(4, TType.I32, 'end_index', None, None, ), # 4
(5, TType.I64, 'value', None, None, ), # 5
)
def __init__(self, cxt_id=None, register_array_name=None, start_index=None, end_index=None, value=None,):
self.cxt_id = cxt_id
self.register_array_name = register_array_name
self.start_index = start_index
self.end_index = end_index
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.register_array_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.start_index = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.end_index = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I64:
self.value = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_register_write_range_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.register_array_name is not None:
oprot.writeFieldBegin('register_array_name', TType.STRING, 2)
oprot.writeString(self.register_array_name)
oprot.writeFieldEnd()
if self.start_index is not None:
oprot.writeFieldBegin('start_index', TType.I32, 3)
oprot.writeI32(self.start_index)
oprot.writeFieldEnd()
if self.end_index is not None:
oprot.writeFieldBegin('end_index', TType.I32, 4)
oprot.writeI32(self.end_index)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.I64, 5)
oprot.writeI64(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.register_array_name)
value = (value * 31) ^ hash(self.start_index)
value = (value * 31) ^ hash(self.end_index)
value = (value * 31) ^ hash(self.value)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_register_write_range_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidRegisterOperation, InvalidRegisterOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidRegisterOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_register_write_range_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_register_reset_args:
"""
Attributes:
- cxt_id
- register_array_name
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'register_array_name', None, None, ), # 2
)
def __init__(self, cxt_id=None, register_array_name=None,):
self.cxt_id = cxt_id
self.register_array_name = register_array_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.register_array_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_register_reset_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.register_array_name is not None:
oprot.writeFieldBegin('register_array_name', TType.STRING, 2)
oprot.writeString(self.register_array_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.register_array_name)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_register_reset_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidRegisterOperation, InvalidRegisterOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidRegisterOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_register_reset_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_dev_mgr_add_port_args:
"""
Attributes:
- iface_name
- port_num
- pcap_path
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'iface_name', None, None, ), # 1
(2, TType.I32, 'port_num', None, None, ), # 2
(3, TType.STRING, 'pcap_path', None, None, ), # 3
)
def __init__(self, iface_name=None, port_num=None, pcap_path=None,):
self.iface_name = iface_name
self.port_num = port_num
self.pcap_path = pcap_path
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.iface_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.port_num = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.pcap_path = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_dev_mgr_add_port_args')
if self.iface_name is not None:
oprot.writeFieldBegin('iface_name', TType.STRING, 1)
oprot.writeString(self.iface_name)
oprot.writeFieldEnd()
if self.port_num is not None:
oprot.writeFieldBegin('port_num', TType.I32, 2)
oprot.writeI32(self.port_num)
oprot.writeFieldEnd()
if self.pcap_path is not None:
oprot.writeFieldBegin('pcap_path', TType.STRING, 3)
oprot.writeString(self.pcap_path)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.iface_name)
value = (value * 31) ^ hash(self.port_num)
value = (value * 31) ^ hash(self.pcap_path)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_dev_mgr_add_port_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidDevMgrOperation, InvalidDevMgrOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidDevMgrOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_dev_mgr_add_port_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_dev_mgr_remove_port_args:
"""
Attributes:
- port_num
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'port_num', None, None, ), # 1
)
def __init__(self, port_num=None,):
self.port_num = port_num
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.port_num = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_dev_mgr_remove_port_args')
if self.port_num is not None:
oprot.writeFieldBegin('port_num', TType.I32, 1)
oprot.writeI32(self.port_num)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.port_num)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_dev_mgr_remove_port_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidDevMgrOperation, InvalidDevMgrOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidDevMgrOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_dev_mgr_remove_port_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_dev_mgr_show_ports_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_dev_mgr_show_ports_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_dev_mgr_show_ports_result:
"""
Attributes:
- success
- ouch
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(DevMgrPortInfo, DevMgrPortInfo.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'ouch', (InvalidDevMgrOperation, InvalidDevMgrOperation.thrift_spec), None, ), # 1
)
def __init__(self, success=None, ouch=None,):
self.success = success
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype145, _size142) = iprot.readListBegin()
for _i146 in xrange(_size142):
_elem147 = DevMgrPortInfo()
_elem147.read(iprot)
self.success.append(_elem147)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidDevMgrOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_dev_mgr_show_ports_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter148 in self.success:
iter148.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mgmt_get_info_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mgmt_get_info_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_mgmt_get_info_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (BmConfig, BmConfig.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = BmConfig()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_mgmt_get_info_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_set_crc16_custom_parameters_args:
"""
Attributes:
- cxt_id
- calc_name
- crc16_config
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'calc_name', None, None, ), # 2
(3, TType.STRUCT, 'crc16_config', (BmCrc16Config, BmCrc16Config.thrift_spec), None, ), # 3
)
def __init__(self, cxt_id=None, calc_name=None, crc16_config=None,):
self.cxt_id = cxt_id
self.calc_name = calc_name
self.crc16_config = crc16_config
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.calc_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.crc16_config = BmCrc16Config()
self.crc16_config.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_set_crc16_custom_parameters_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.calc_name is not None:
oprot.writeFieldBegin('calc_name', TType.STRING, 2)
oprot.writeString(self.calc_name)
oprot.writeFieldEnd()
if self.crc16_config is not None:
oprot.writeFieldBegin('crc16_config', TType.STRUCT, 3)
self.crc16_config.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.calc_name)
value = (value * 31) ^ hash(self.crc16_config)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_set_crc16_custom_parameters_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidCrcOperation, InvalidCrcOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidCrcOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_set_crc16_custom_parameters_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_set_crc32_custom_parameters_args:
"""
Attributes:
- cxt_id
- calc_name
- crc32_config
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'cxt_id', None, None, ), # 1
(2, TType.STRING, 'calc_name', None, None, ), # 2
(3, TType.STRUCT, 'crc32_config', (BmCrc32Config, BmCrc32Config.thrift_spec), None, ), # 3
)
def __init__(self, cxt_id=None, calc_name=None, crc32_config=None,):
self.cxt_id = cxt_id
self.calc_name = calc_name
self.crc32_config = crc32_config
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.cxt_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.calc_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.crc32_config = BmCrc32Config()
self.crc32_config.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_set_crc32_custom_parameters_args')
if self.cxt_id is not None:
oprot.writeFieldBegin('cxt_id', TType.I32, 1)
oprot.writeI32(self.cxt_id)
oprot.writeFieldEnd()
if self.calc_name is not None:
oprot.writeFieldBegin('calc_name', TType.STRING, 2)
oprot.writeString(self.calc_name)
oprot.writeFieldEnd()
if self.crc32_config is not None:
oprot.writeFieldBegin('crc32_config', TType.STRUCT, 3)
self.crc32_config.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.cxt_id)
value = (value * 31) ^ hash(self.calc_name)
value = (value * 31) ^ hash(self.crc32_config)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_set_crc32_custom_parameters_result:
"""
Attributes:
- ouch
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ouch', (InvalidCrcOperation, InvalidCrcOperation.thrift_spec), None, ), # 1
)
def __init__(self, ouch=None,):
self.ouch = ouch
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ouch = InvalidCrcOperation()
self.ouch.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_set_crc32_custom_parameters_result')
if self.ouch is not None:
oprot.writeFieldBegin('ouch', TType.STRUCT, 1)
self.ouch.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.ouch)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_reset_state_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_reset_state_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_reset_state_result:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_reset_state_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_get_config_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_get_config_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_get_config_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_get_config_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_get_config_md5_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_get_config_md5_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_get_config_md5_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_get_config_md5_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_serialize_state_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_serialize_state_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class bm_serialize_state_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('bm_serialize_state_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 31.773003
| 188
| 0.673699
| 47,644
| 382,261
| 5.082907
| 0.007115
| 0.016868
| 0.026312
| 0.024082
| 0.980675
| 0.968204
| 0.948974
| 0.930916
| 0.91482
| 0.90031
| 0
| 0.01027
| 0.216998
| 382,261
| 12,030
| 189
| 31.775644
| 0.798821
| 0.024842
| 0
| 0.886373
| 1
| 0
| 0.034897
| 0.014916
| 0
| 0
| 0
| 0
| 0
| 1
| 0.116681
| false
| 0.005581
| 0.000737
| 0.033488
| 0.225779
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
eae15cb2e90635597cce9d9aee3b73ed80ceafe8
| 25,750
|
py
|
Python
|
ce_api/api/workspaces_api.py
|
maiot-io/cengine
|
3a1946c449e8c5e1d216215df6eeab941eb1640a
|
[
"Apache-2.0"
] | 7
|
2020-10-13T12:47:32.000Z
|
2021-03-12T12:00:14.000Z
|
ce_api/api/workspaces_api.py
|
maiot-io/cengine
|
3a1946c449e8c5e1d216215df6eeab941eb1640a
|
[
"Apache-2.0"
] | null | null | null |
ce_api/api/workspaces_api.py
|
maiot-io/cengine
|
3a1946c449e8c5e1d216215df6eeab941eb1640a
|
[
"Apache-2.0"
] | 1
|
2021-01-23T02:19:42.000Z
|
2021-01-23T02:19:42.000Z
|
# coding: utf-8
"""
maiot Core Engine API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 0.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ce_api.api_client import ApiClient
class WorkspacesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_workspace_api_v1_workspaces_post(self, body, **kwargs): # noqa: E501
"""Create Workspace # noqa: E501
Create new workspace for logged in user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_workspace_api_v1_workspaces_post(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param WorkspaceIn body: (required)
:return: Workspace
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_workspace_api_v1_workspaces_post_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_workspace_api_v1_workspaces_post_with_http_info(body, **kwargs) # noqa: E501
return data
def create_workspace_api_v1_workspaces_post_with_http_info(self, body, **kwargs): # noqa: E501
"""Create Workspace # noqa: E501
Create new workspace for logged in user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_workspace_api_v1_workspaces_post_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param WorkspaceIn body: (required)
:return: Workspace
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_workspace_api_v1_workspaces_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_workspace_api_v1_workspaces_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2PasswordBearer'] # noqa: E501
return self.api_client.call_api(
'/api/v1/workspaces/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workspace', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_hyperparameters_api_v1_workspaces_workspace_id_hyperparameters_get(self, workspace_id, **kwargs): # noqa: E501
"""Get Hyperparameters # noqa: E501
Gets the logged in users workspace hyperparameter list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hyperparameters_api_v1_workspaces_workspace_id_hyperparameters_get(workspace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workspace_id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_hyperparameters_api_v1_workspaces_workspace_id_hyperparameters_get_with_http_info(workspace_id, **kwargs) # noqa: E501
else:
(data) = self.get_hyperparameters_api_v1_workspaces_workspace_id_hyperparameters_get_with_http_info(workspace_id, **kwargs) # noqa: E501
return data
def get_hyperparameters_api_v1_workspaces_workspace_id_hyperparameters_get_with_http_info(self, workspace_id, **kwargs): # noqa: E501
"""Get Hyperparameters # noqa: E501
Gets the logged in users workspace hyperparameter list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hyperparameters_api_v1_workspaces_workspace_id_hyperparameters_get_with_http_info(workspace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workspace_id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_hyperparameters_api_v1_workspaces_workspace_id_hyperparameters_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params or
params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `get_hyperparameters_api_v1_workspaces_workspace_id_hyperparameters_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workspace_id' in params:
path_params['workspace_id'] = params['workspace_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2PasswordBearer'] # noqa: E501
return self.api_client.call_api(
'/api/v1/workspaces/{workspace_id}/hyperparameters', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_loggedin_workspaces_api_v1_workspaces_get(self, **kwargs): # noqa: E501
"""Get Loggedin Workspaces # noqa: E501
Gets the logged in users workspace details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_loggedin_workspaces_api_v1_workspaces_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[Workspace]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_loggedin_workspaces_api_v1_workspaces_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_loggedin_workspaces_api_v1_workspaces_get_with_http_info(**kwargs) # noqa: E501
return data
def get_loggedin_workspaces_api_v1_workspaces_get_with_http_info(self, **kwargs): # noqa: E501
"""Get Loggedin Workspaces # noqa: E501
Gets the logged in users workspace details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_loggedin_workspaces_api_v1_workspaces_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[Workspace]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_loggedin_workspaces_api_v1_workspaces_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2PasswordBearer'] # noqa: E501
return self.api_client.call_api(
'/api/v1/workspaces/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Workspace]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_workspace_api_v1_workspaces_workspace_id_get(self, workspace_id, **kwargs): # noqa: E501
"""Get Workspace # noqa: E501
Gets the workspace specified by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workspace_api_v1_workspaces_workspace_id_get(workspace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workspace_id: (required)
:return: Workspace
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_workspace_api_v1_workspaces_workspace_id_get_with_http_info(workspace_id, **kwargs) # noqa: E501
else:
(data) = self.get_workspace_api_v1_workspaces_workspace_id_get_with_http_info(workspace_id, **kwargs) # noqa: E501
return data
def get_workspace_api_v1_workspaces_workspace_id_get_with_http_info(self, workspace_id, **kwargs): # noqa: E501
"""Get Workspace # noqa: E501
Gets the workspace specified by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workspace_api_v1_workspaces_workspace_id_get_with_http_info(workspace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workspace_id: (required)
:return: Workspace
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_workspace_api_v1_workspaces_workspace_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params or
params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `get_workspace_api_v1_workspaces_workspace_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workspace_id' in params:
path_params['workspace_id'] = params['workspace_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2PasswordBearer'] # noqa: E501
return self.api_client.call_api(
'/api/v1/workspaces/{workspace_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workspace', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_workspaces_pipeline_by_id_api_v1_workspaces_workspace_id_pipelines_pipeline_id_get(self, workspace_id, pipeline_id, **kwargs): # noqa: E501
"""Get Workspaces Pipeline By Id # noqa: E501
Gets the pipeline specified by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workspaces_pipeline_by_id_api_v1_workspaces_workspace_id_pipelines_pipeline_id_get(workspace_id, pipeline_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workspace_id: (required)
:param str pipeline_id: (required)
:return: Pipeline
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_workspaces_pipeline_by_id_api_v1_workspaces_workspace_id_pipelines_pipeline_id_get_with_http_info(workspace_id, pipeline_id, **kwargs) # noqa: E501
else:
(data) = self.get_workspaces_pipeline_by_id_api_v1_workspaces_workspace_id_pipelines_pipeline_id_get_with_http_info(workspace_id, pipeline_id, **kwargs) # noqa: E501
return data
def get_workspaces_pipeline_by_id_api_v1_workspaces_workspace_id_pipelines_pipeline_id_get_with_http_info(self, workspace_id, pipeline_id, **kwargs): # noqa: E501
"""Get Workspaces Pipeline By Id # noqa: E501
Gets the pipeline specified by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workspaces_pipeline_by_id_api_v1_workspaces_workspace_id_pipelines_pipeline_id_get_with_http_info(workspace_id, pipeline_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workspace_id: (required)
:param str pipeline_id: (required)
:return: Pipeline
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'pipeline_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_workspaces_pipeline_by_id_api_v1_workspaces_workspace_id_pipelines_pipeline_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params or
params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `get_workspaces_pipeline_by_id_api_v1_workspaces_workspace_id_pipelines_pipeline_id_get`") # noqa: E501
# verify the required parameter 'pipeline_id' is set
if ('pipeline_id' not in params or
params['pipeline_id'] is None):
raise ValueError("Missing the required parameter `pipeline_id` when calling `get_workspaces_pipeline_by_id_api_v1_workspaces_workspace_id_pipelines_pipeline_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workspace_id' in params:
path_params['workspace_id'] = params['workspace_id'] # noqa: E501
if 'pipeline_id' in params:
path_params['pipeline_id'] = params['pipeline_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2PasswordBearer'] # noqa: E501
return self.api_client.call_api(
'/api/v1/workspaces/{workspace_id}/pipelines/{pipeline_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Pipeline', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_workspaces_pipelines_api_v1_workspaces_workspace_id_pipelines_get(self, workspace_id, **kwargs): # noqa: E501
"""Get Workspaces Pipelines # noqa: E501
Gets the pipeline specified by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workspaces_pipelines_api_v1_workspaces_workspace_id_pipelines_get(workspace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workspace_id: (required)
:param str hparam_name:
:param str hparam_value:
:return: list[Pipeline]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_workspaces_pipelines_api_v1_workspaces_workspace_id_pipelines_get_with_http_info(workspace_id, **kwargs) # noqa: E501
else:
(data) = self.get_workspaces_pipelines_api_v1_workspaces_workspace_id_pipelines_get_with_http_info(workspace_id, **kwargs) # noqa: E501
return data
def get_workspaces_pipelines_api_v1_workspaces_workspace_id_pipelines_get_with_http_info(self, workspace_id, **kwargs): # noqa: E501
"""Get Workspaces Pipelines # noqa: E501
Gets the pipeline specified by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workspaces_pipelines_api_v1_workspaces_workspace_id_pipelines_get_with_http_info(workspace_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workspace_id: (required)
:param str hparam_name:
:param str hparam_value:
:return: list[Pipeline]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'hparam_name', 'hparam_value'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_workspaces_pipelines_api_v1_workspaces_workspace_id_pipelines_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params or
params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `get_workspaces_pipelines_api_v1_workspaces_workspace_id_pipelines_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workspace_id' in params:
path_params['workspace_id'] = params['workspace_id'] # noqa: E501
query_params = []
if 'hparam_name' in params:
query_params.append(('hparam_name', params['hparam_name'])) # noqa: E501
if 'hparam_value' in params:
query_params.append(('hparam_value', params['hparam_value'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2PasswordBearer'] # noqa: E501
return self.api_client.call_api(
'/api/v1/workspaces/{workspace_id}/pipelines', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Pipeline]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.801948
| 193
| 0.642175
| 3,010
| 25,750
| 5.154485
| 0.055814
| 0.071608
| 0.052208
| 0.057235
| 0.951337
| 0.944763
| 0.933355
| 0.919175
| 0.907187
| 0.889655
| 0
| 0.017851
| 0.275573
| 25,750
| 615
| 194
| 41.869919
| 0.813874
| 0.313049
| 0
| 0.761468
| 1
| 0
| 0.21172
| 0.082017
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039755
| false
| 0.018349
| 0.012232
| 0
| 0.110092
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
eaeb90ba285f846c20db04f657d9b6c33dc6ea0c
| 63
|
py
|
Python
|
foo/src/foo/module_name.py
|
knedlsepp/nix-shell-for-python-multi-project-repos
|
bd60bd0001448f8f44d6aeb71d2e186c7fd5d523
|
[
"MIT"
] | null | null | null |
foo/src/foo/module_name.py
|
knedlsepp/nix-shell-for-python-multi-project-repos
|
bd60bd0001448f8f44d6aeb71d2e186c7fd5d523
|
[
"MIT"
] | null | null | null |
foo/src/foo/module_name.py
|
knedlsepp/nix-shell-for-python-multi-project-repos
|
bd60bd0001448f8f44d6aeb71d2e186c7fd5d523
|
[
"MIT"
] | null | null | null |
import bar
def sum(x, y):
return bar.module_name.sum(x, y)
| 15.75
| 36
| 0.666667
| 13
| 63
| 3.153846
| 0.692308
| 0.195122
| 0.243902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 63
| 3
| 37
| 21
| 0.803922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
d802df6e21185f11f3ad61679c5765f9f67baec0
| 182
|
py
|
Python
|
developers_chamber/django/context_processors.py
|
cedel1/developers-chamber
|
06d3e64b333c6c4c2cdad3ca8a11165396a92ca5
|
[
"MIT"
] | null | null | null |
developers_chamber/django/context_processors.py
|
cedel1/developers-chamber
|
06d3e64b333c6c4c2cdad3ca8a11165396a92ca5
|
[
"MIT"
] | null | null | null |
developers_chamber/django/context_processors.py
|
cedel1/developers-chamber
|
06d3e64b333c6c4c2cdad3ca8a11165396a92ca5
|
[
"MIT"
] | null | null | null |
from developers_chamber.django import get_project_info_dict
def get_project_info(request):
return dict(('PROJECT_' + k.upper(), v) for k, v in get_project_info_dict().items())
| 30.333333
| 88
| 0.763736
| 29
| 182
| 4.448276
| 0.62069
| 0.232558
| 0.325581
| 0.27907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120879
| 182
| 5
| 89
| 36.4
| 0.80625
| 0
| 0
| 0
| 0
| 0
| 0.043956
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
dc46138b6e84c5d94ac065cb006fac7e7c6b997d
| 88
|
py
|
Python
|
models/__init__.py
|
allenai/elastic
|
57345c600c63fbde163c41929d6d6dd894d408ce
|
[
"Apache-2.0"
] | 96
|
2019-04-10T14:23:21.000Z
|
2022-01-05T01:30:03.000Z
|
models/__init__.py
|
allenai/elastic
|
57345c600c63fbde163c41929d6d6dd894d408ce
|
[
"Apache-2.0"
] | 9
|
2019-04-22T12:53:22.000Z
|
2022-03-03T13:29:57.000Z
|
models/__init__.py
|
allenai/elastic
|
57345c600c63fbde163c41929d6d6dd894d408ce
|
[
"Apache-2.0"
] | 11
|
2019-07-02T06:09:03.000Z
|
2020-12-01T02:58:48.000Z
|
from .densenet import *
from .dla import *
from .dla_up import *
from .resnext import *
| 17.6
| 23
| 0.727273
| 13
| 88
| 4.846154
| 0.461538
| 0.47619
| 0.412698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 88
| 4
| 24
| 22
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dca9dd0185300613251539ca1ec19b85dbe019fe
| 306
|
py
|
Python
|
pynumdiff/optimize/kalman_smooth/__init__.py
|
fossabot/PyNumDiff
|
dccad2ad7a875f2ecccb0db2bb6e2afa392916d1
|
[
"MIT"
] | null | null | null |
pynumdiff/optimize/kalman_smooth/__init__.py
|
fossabot/PyNumDiff
|
dccad2ad7a875f2ecccb0db2bb6e2afa392916d1
|
[
"MIT"
] | null | null | null |
pynumdiff/optimize/kalman_smooth/__init__.py
|
fossabot/PyNumDiff
|
dccad2ad7a875f2ecccb0db2bb6e2afa392916d1
|
[
"MIT"
] | null | null | null |
from pynumdiff.optimize.kalman_smooth.__kalman_smooth__ import constant_velocity as constant_velocity
from pynumdiff.optimize.kalman_smooth.__kalman_smooth__ import constant_acceleration as constant_acceleration
from pynumdiff.optimize.kalman_smooth.__kalman_smooth__ import constant_jerk as constant_jerk
| 76.5
| 109
| 0.911765
| 39
| 306
| 6.538462
| 0.282051
| 0.282353
| 0.247059
| 0.317647
| 0.694118
| 0.694118
| 0.694118
| 0.694118
| 0.694118
| 0
| 0
| 0
| 0.058824
| 306
| 3
| 110
| 102
| 0.885417
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f4cf9f7748136eb1c82f5d2f92513a308020232c
| 6,226
|
py
|
Python
|
SemifinalRun.py
|
project-radian-robosub/Robosub2019
|
645ea115ba1f19415cd5242e1b3d65ecf215e901
|
[
"MIT"
] | 1
|
2021-04-21T18:01:39.000Z
|
2021-04-21T18:01:39.000Z
|
SemifinalRun.py
|
project-radian-robosub/Robosub2019
|
645ea115ba1f19415cd5242e1b3d65ecf215e901
|
[
"MIT"
] | 1
|
2019-08-01T21:00:53.000Z
|
2019-08-01T21:00:53.000Z
|
SemifinalRun.py
|
project-radian-robosub/Robosub2019
|
645ea115ba1f19415cd5242e1b3d65ecf215e901
|
[
"MIT"
] | 5
|
2019-04-09T18:44:29.000Z
|
2022-03-15T19:37:13.000Z
|
import time
from Vision.BackgroundSubtractor import VisionV3
import Control
v1 = VisionV3(0)
gen = v1.vision_generator(True)
ctr = Control
killed = True
gate_tar = 0
p_tar = 1085
p_thresh = p_tar - 10
depth_increment = 2
def get_killed():
return killed
def set_killed(val):
global killed
killed = val
def drive(m2=0, m3=0, m4=0, m5=0, m6=0, m7=0):
ctr.set_imu_powers()
ctr.set_pressure_powers()
ctr.set_move_powers(m2, m3, m4, m5, m6, m7)
ctr.set_motor_powers()
print(ctr.imu.get_angles(), ctr.imu.get_angle_pid(), ctr.imu.center_z)
if ctr.MotorMovement.check_reset():
set_killed(True)
print('KILLED')
while killed:
ctr.MotorMovement.wait_for_arduino()
killed = False
ctr.stop_all()
time.sleep(0.1)
try:
ctr.imu.set_z(gate_tar)
ctr.pressure.set_tar(p_tar)
while ctr.pressure.get_val() < p_thresh and not killed:
drive()
timer1 = time.perf_counter()
timer2 = time.perf_counter()
gate_flag = False
seen = False
while gate_flag is False and not killed: # forward
if not gen.__next__() is None and not seen:
timer1 = time.perf_counter()
timer2 = time.perf_counter()
seen = True
if gen.__next__() is None and seen:
seen = False
if timer2 - timer1 > 0.2 and seen is True:
gate_flag = True
drive(m2=75, m7=75)
timer1 = time.perf_counter()
timer2 = time.perf_counter()
seen = True
timer3 = time.perf_counter()
timer4 = time.perf_counter()
while gate_flag is True and timer4 - timer3 < 20 and not killed: # forward
if gen.__next__() is None and seen:
timer1 = time.perf_counter()
timer2 = time.perf_counter()
seen = False
if not gen.__next__() is None and not seen:
seen = True
if timer2 - timer1 > 0.2 and seen is True:
gate_flag = False
tl, br = gen.__next__()
x = (tl[0] + br[0]) / 2
y = (tl[1] + br[1]) / 2
x_pow = 320 - x
y_pow = 240 - y
if x_pow > 100:
x_pow = 100
elif x_pow < -100:
x_pow = -100
if y_pow > 100:
y_pow = 100
elif y_pow < -100:
y_pow = -100
drive(m2=75, m3=x_pow * 1.5, m6=x_pow * 1.5, m7=75)
timer4 = time.perf_counter()
timer1 = time.perf_counter()
timer2 = time.perf_counter()
while timer2 - timer1 < 3 and not killed: # stop
drive()
timer2 = time.perf_counter()
ctr.imu.set_z(ctr.change_heading(gate_tar, 120))
while timer2 - timer1 < 6 and not killed: # spin
drive()
timer2 = time.perf_counter()
ctr.imu.set_z(ctr.change_heading(gate_tar, 240))
while timer2 - timer1 < 9 and not killed: # spin
drive()
timer2 = time.perf_counter()
ctr.imu.set_z(gate_tar)
timer1 = time.perf_counter()
timer2 = time.perf_counter()
while timer2 - timer1 < 3 and not killed: # spin
drive()
timer2 = time.perf_counter()
ctr.imu.set_z(ctr.change_heading(gate_tar, 120))
while timer2 - timer1 < 6 and not killed: # spin
drive()
timer2 = time.perf_counter()
ctr.imu.set_z(ctr.change_heading(gate_tar, 240))
while timer2 - timer1 < 9 and not killed: # spin
drive()
timer2 = time.perf_counter()
ctr.imu.set_z(gate_tar)
timer1 = time.perf_counter()
timer2 = time.perf_counter()
while timer2 - timer1 < 4 and not killed: # stabilize
drive()
timer2 = time.perf_counter()
timer1 = time.perf_counter()
timer2 = time.perf_counter()
while timer2 - timer1 < 3 and not killed:
drive(m2=75, m7=75)
timer2 = time.perf_counter()
p_tar = 1270
ctr.pressure.set_tar(p_tar)
while ctr.pressure.get_val() < p_thresh and not killed: # dive
drive()
timer2 = time.perf_counter()
while gate_flag is False and not killed: # forward
if not gen.__next__() is None and not seen:
timer1 = time.perf_counter()
timer2 = time.perf_counter()
seen = True
if gen.__next__() is None and seen:
seen = False
if timer2 - timer1 > 0.2 and seen is True:
gate_flag = True
drive(m2=75, m7=75)
timer1 = time.perf_counter()
timer2 = time.perf_counter()
seen = True
timer3 = time.perf_counter()
timer4 = time.perf_counter()
while gate_flag is True and timer4 - timer3 < 20 and not killed: # forward
if gen.__next__() is None and seen:
timer1 = time.perf_counter()
timer2 = time.perf_counter()
seen = False
if not gen.__next__() is None and not seen:
seen = True
if timer2 - timer1 > 0.2 and seen is True:
gate_flag = False
tl, br = gen.__next__()
x = (tl[0] + br[0]) / 2
y = (tl[1] + br[1]) / 2
x_pow = 320 - x
y_pow = 240 - y
if x_pow > 100:
x_pow = 100
elif x_pow < -100:
x_pow = -100
if y_pow > 100:
y_pow = 100
elif y_pow < -100:
y_pow = -100
if y_pow > 0:
p_tar += depth_increment
ctr.pressure.set_tar(p_tar)
if y_pow < 0:
p_tar -= depth_increment
ctr.pressure.set_tar(p_tar)
drive(m2=75, m3=x_pow * 1.5, m6=x_pow * 1.5, m7=75)
timer4 = time.perf_counter()
except KeyboardInterrupt:
ctr.stop_all()
print("end")
finally:
ctr.stop_all()
print("end")
| 29.647619
| 83
| 0.521523
| 810
| 6,226
| 3.793827
| 0.125926
| 0.096323
| 0.180605
| 0.136674
| 0.79629
| 0.773186
| 0.767654
| 0.762773
| 0.749105
| 0.749105
| 0
| 0.065155
| 0.383713
| 6,226
| 209
| 84
| 29.789474
| 0.735731
| 0.012207
| 0
| 0.77907
| 0
| 0
| 0.001955
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017442
| false
| 0
| 0.017442
| 0.005814
| 0.040698
| 0.023256
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7602cf8cef4268b6ce74c74a91fddd83d61812af
| 36,688
|
py
|
Python
|
GrowCabApi/api/chambers_api.py
|
GrowCab/GC_hardware
|
0ed7f2eca9bac678726aee730741fe28de7a989a
|
[
"MIT"
] | null | null | null |
GrowCabApi/api/chambers_api.py
|
GrowCab/GC_hardware
|
0ed7f2eca9bac678726aee730741fe28de7a989a
|
[
"MIT"
] | null | null | null |
GrowCabApi/api/chambers_api.py
|
GrowCab/GC_hardware
|
0ed7f2eca9bac678726aee730741fe28de7a989a
|
[
"MIT"
] | null | null | null |
"""
GrowCab API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from GrowCabApi.api_client import ApiClient, Endpoint as _Endpoint
from GrowCabApi.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from GrowCabApi.model.chamber import Chamber
from GrowCabApi.model.chamber_power_status import ChamberPowerStatus
from GrowCabApi.model.chamber_status import ChamberStatus
from GrowCabApi.model.error import Error
from GrowCabApi.model.measure import Measure
from GrowCabApi.model.measure_group import MeasureGroup
from GrowCabApi.model.sensor_unit import SensorUnit
from GrowCabApi.model.unit import Unit
class ChambersApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __get_chamber(
self,
chamber_id,
**kwargs
):
"""Get the chamber and related objects :param chamber_id: ID of the chamber :return: Returns a Chamber object # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_chamber(chamber_id, async_req=True)
>>> result = thread.get()
Args:
chamber_id (int):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Chamber
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['chamber_id'] = \
chamber_id
return self.call_with_http_info(**kwargs)
self.get_chamber = _Endpoint(
settings={
'response_type': (Chamber,),
'auth': [],
'endpoint_path': '/api/chamber/{chamber_id}',
'operation_id': 'get_chamber',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'chamber_id',
],
'required': [
'chamber_id',
],
'nullable': [
],
'enum': [
],
'validation': [
'chamber_id',
]
},
root_map={
'validations': {
('chamber_id',): {
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'chamber_id':
(int,),
},
'attribute_map': {
'chamber_id': 'chamber_id',
},
'location_map': {
'chamber_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_chamber
)
def __get_chamber_power_status(
self,
chamber_id,
**kwargs
):
"""get_chamber_power_status # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_chamber_power_status(chamber_id, async_req=True)
>>> result = thread.get()
Args:
chamber_id (int):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ChamberPowerStatus
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['chamber_id'] = \
chamber_id
return self.call_with_http_info(**kwargs)
self.get_chamber_power_status = _Endpoint(
settings={
'response_type': (ChamberPowerStatus,),
'auth': [],
'endpoint_path': '/api/chamber/power/{chamber_id}',
'operation_id': 'get_chamber_power_status',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'chamber_id',
],
'required': [
'chamber_id',
],
'nullable': [
],
'enum': [
],
'validation': [
'chamber_id',
]
},
root_map={
'validations': {
('chamber_id',): {
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'chamber_id':
(int,),
},
'attribute_map': {
'chamber_id': 'chamber_id',
},
'location_map': {
'chamber_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_chamber_power_status
)
def __get_chamber_sensors(
self,
chamber_id,
**kwargs
):
"""Get the sensors for a chamber :param chamber_id: :return: # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_chamber_sensors(chamber_id, async_req=True)
>>> result = thread.get()
Args:
chamber_id (int):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[SensorUnit]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['chamber_id'] = \
chamber_id
return self.call_with_http_info(**kwargs)
self.get_chamber_sensors = _Endpoint(
settings={
'response_type': ([SensorUnit],),
'auth': [],
'endpoint_path': '/api/chamber_sensors/{chamber_id}',
'operation_id': 'get_chamber_sensors',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'chamber_id',
],
'required': [
'chamber_id',
],
'nullable': [
],
'enum': [
],
'validation': [
'chamber_id',
]
},
root_map={
'validations': {
('chamber_id',): {
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'chamber_id':
(int,),
},
'attribute_map': {
'chamber_id': 'chamber_id',
},
'location_map': {
'chamber_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_chamber_sensors
)
def __get_chamber_status(
self,
chamber_id,
**kwargs
):
"""get_chamber_status # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_chamber_status(chamber_id, async_req=True)
>>> result = thread.get()
Args:
chamber_id (int):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
MeasureGroup
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['chamber_id'] = \
chamber_id
return self.call_with_http_info(**kwargs)
self.get_chamber_status = _Endpoint(
settings={
'response_type': (MeasureGroup,),
'auth': [],
'endpoint_path': '/api/chamber_status/{chamber_id}',
'operation_id': 'get_chamber_status',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'chamber_id',
],
'required': [
'chamber_id',
],
'nullable': [
],
'enum': [
],
'validation': [
'chamber_id',
]
},
root_map={
'validations': {
('chamber_id',): {
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'chamber_id':
(int,),
},
'attribute_map': {
'chamber_id': 'chamber_id',
},
'location_map': {
'chamber_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_chamber_status
)
def __get_chamber_units(
self,
chamber_id,
**kwargs
):
"""Get the units available for this chamber # noqa: E501
This is useful for understanding which dials to present but also which values to use for filtering/separating the ExpectedMeasure(s) of a Configuration for a Chamber :param chamber_id: ID of the chamber :return: Returns a list of Unit objects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_chamber_units(chamber_id, async_req=True)
>>> result = thread.get()
Args:
chamber_id (int):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Unit]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['chamber_id'] = \
chamber_id
return self.call_with_http_info(**kwargs)
self.get_chamber_units = _Endpoint(
settings={
'response_type': ([Unit],),
'auth': [],
'endpoint_path': '/api/chamber_units/{chamber_id}',
'operation_id': 'get_chamber_units',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'chamber_id',
],
'required': [
'chamber_id',
],
'nullable': [
],
'enum': [
],
'validation': [
'chamber_id',
]
},
root_map={
'validations': {
('chamber_id',): {
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'chamber_id':
(int,),
},
'attribute_map': {
'chamber_id': 'chamber_id',
},
'location_map': {
'chamber_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_chamber_units
)
def __get_chambers(
self,
**kwargs
):
"""Get the list of configurations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_chambers(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Chamber]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get_chambers = _Endpoint(
settings={
'response_type': ([Chamber],),
'auth': [],
'endpoint_path': '/api/chambers',
'operation_id': 'get_chambers',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_chambers
)
def __put_chamber_status(
self,
chamber_id,
chamber_status,
**kwargs
):
"""put_chamber_status # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_chamber_status(chamber_id, chamber_status, async_req=True)
>>> result = thread.get()
Args:
chamber_id (int):
chamber_status (ChamberStatus):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Measure]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['chamber_id'] = \
chamber_id
kwargs['chamber_status'] = \
chamber_status
return self.call_with_http_info(**kwargs)
self.put_chamber_status = _Endpoint(
settings={
'response_type': ([Measure],),
'auth': [],
'endpoint_path': '/api/chamber_status/{chamber_id}',
'operation_id': 'put_chamber_status',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'chamber_id',
'chamber_status',
],
'required': [
'chamber_id',
'chamber_status',
],
'nullable': [
],
'enum': [
],
'validation': [
'chamber_id',
]
},
root_map={
'validations': {
('chamber_id',): {
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'chamber_id':
(int,),
'chamber_status':
(ChamberStatus,),
},
'attribute_map': {
'chamber_id': 'chamber_id',
},
'location_map': {
'chamber_id': 'path',
'chamber_status': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__put_chamber_status
)
def __set_chamber_power_status(
self,
chamber_id,
chamber_power_status,
**kwargs
):
"""set_chamber_power_status # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_chamber_power_status(chamber_id, chamber_power_status, async_req=True)
>>> result = thread.get()
Args:
chamber_id (int):
chamber_power_status (ChamberPowerStatus):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Chamber
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['chamber_id'] = \
chamber_id
kwargs['chamber_power_status'] = \
chamber_power_status
return self.call_with_http_info(**kwargs)
self.set_chamber_power_status = _Endpoint(
settings={
'response_type': (Chamber,),
'auth': [],
'endpoint_path': '/api/chamber/power/{chamber_id}',
'operation_id': 'set_chamber_power_status',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'chamber_id',
'chamber_power_status',
],
'required': [
'chamber_id',
'chamber_power_status',
],
'nullable': [
],
'enum': [
],
'validation': [
'chamber_id',
]
},
root_map={
'validations': {
('chamber_id',): {
'inclusive_minimum': 0,
},
},
'allowed_values': {
},
'openapi_types': {
'chamber_id':
(int,),
'chamber_power_status':
(ChamberPowerStatus,),
},
'attribute_map': {
'chamber_id': 'chamber_id',
},
'location_map': {
'chamber_id': 'path',
'chamber_power_status': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__set_chamber_power_status
)
| 36.003925
| 268
| 0.460587
| 3,144
| 36,688
| 5.097964
| 0.063613
| 0.056713
| 0.025955
| 0.026953
| 0.885326
| 0.870289
| 0.845084
| 0.834415
| 0.832169
| 0.821126
| 0
| 0.002765
| 0.457861
| 36,688
| 1,018
| 269
| 36.039293
| 0.803067
| 0.324902
| 0
| 0.656848
| 1
| 0
| 0.221966
| 0.036103
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013255
| false
| 0
| 0.017673
| 0
| 0.044183
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
760fbea759f4e4b00e0545c7cb3cba76d41204bf
| 148,355
|
py
|
Python
|
gsf/plot_sed_logA.py
|
mtakahiro/gsf
|
c09c5d32a45b0277c469d2d3cb2f8c11f1fc0278
|
[
"MIT"
] | 9
|
2019-08-23T19:00:54.000Z
|
2022-02-23T17:57:41.000Z
|
gsf/plot_sed_logA.py
|
mtakahiro/gsf
|
c09c5d32a45b0277c469d2d3cb2f8c11f1fc0278
|
[
"MIT"
] | 17
|
2020-05-22T17:41:15.000Z
|
2022-03-20T03:32:48.000Z
|
gsf/plot_sed_logA.py
|
mtakahiro/gsf
|
c09c5d32a45b0277c469d2d3cb2f8c11f1fc0278
|
[
"MIT"
] | 1
|
2020-02-01T22:55:37.000Z
|
2020-02-01T22:55:37.000Z
|
import numpy as np
import sys
import os
import asdf
import matplotlib.pyplot as plt
from numpy import log10
from scipy.integrate import simps
from astropy.io import fits
from matplotlib.ticker import FormatStrFormatter
from .function import *
from .function_class import Func
from .basic_func import Basic
import corner
col = ['violet', 'indigo', 'b', 'lightblue', 'lightgreen', 'g', 'orange', 'coral', 'r', 'darkred']#, 'k']
#col = ['darkred', 'r', 'coral','orange','g','lightgreen', 'lightblue', 'b','indigo','violet','k']
def plot_sed(MB, flim=0.01, fil_path='./', scale=1e-19, f_chind=True, figpdf=False, save_sed=True, inputs=False, \
mmax=300, dust_model=0, DIR_TMP='./templates/', f_label=False, f_bbbox=False, verbose=False, f_silence=True, \
f_fill=False, f_fancyplot=False, f_Alog=True, dpi=300, f_plot_filter=True):
'''
Parameters
----------
MB.SNlim : float
SN limit to show flux or up lim in SED.
f_chind : bool
If include non-detection in chi2 calculation, using Sawicki12.
mmax : int
Number of mcmc realization for plot. Not for calculation.
f_fancy : bool
plot each SED component.
f_fill: bool
if True, and so is f_fancy, fill each SED component.
Returns
-------
plots
'''
from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid1.inset_locator import mark_inset
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from scipy.optimize import curve_fit
from scipy import asarray as ar,exp
import matplotlib
import scipy.integrate as integrate
import scipy.special as special
import os.path
from astropy.io import ascii
import time
if f_silence:
import matplotlib
matplotlib.use("Agg")
def gaus(x,a,x0,sigma):
return a*exp(-(x-x0)**2/(2*sigma**2))
lcb = '#4682b4' # line color, blue
fnc = MB.fnc
bfnc = MB.bfnc
ID = MB.ID
Z = MB.Zall
age = MB.age
nage = MB.nage
tau0 = MB.tau0
#col = ['violet', 'indigo', 'b', 'lightblue', 'lightgreen', 'g', 'orange', 'coral', 'r', 'darkred']#, 'k']
NUM_COLORS = len(age)
cm = plt.get_cmap('gist_rainbow')
col = [cm(1 - 1.*i/NUM_COLORS) for i in range(NUM_COLORS)]
nstep_plot = 1
if MB.f_bpass:
nstep_plot = 30
SNlim = MB.SNlim
################
# RF colors.
home = os.path.expanduser('~')
c = MB.c
chimax = 1.
m0set = MB.m0set
Mpc_cm = MB.Mpc_cm
d = MB.d * scale
##################
# Fitting Results
##################
DIR_FILT = MB.DIR_FILT
SFILT = MB.filts
try:
f_err = MB.ferr
except:
f_err = 0
###########################
# Open result file
###########################
file = MB.DIR_OUT + 'summary_' + ID + '.fits'
hdul = fits.open(file)
ndim_eff = hdul[0].header['NDIM']
# Redshift MC
zp16 = hdul[1].data['zmc'][0]
zp50 = hdul[1].data['zmc'][1]
zp84 = hdul[1].data['zmc'][2]
# Stellar mass MC
M16 = hdul[1].data['ms'][0]
M50 = hdul[1].data['ms'][1]
M84 = hdul[1].data['ms'][2]
if verbose:
print('Total stellar mass is %.2e'%(M50))
# Amplitude MC
A50 = np.zeros(len(age), dtype='float')
A16 = np.zeros(len(age), dtype='float')
A84 = np.zeros(len(age), dtype='float')
for aa in range(len(age)):
A16[aa] = 10**hdul[1].data['A'+str(aa)][0]
A50[aa] = 10**hdul[1].data['A'+str(aa)][1]
A84[aa] = 10**hdul[1].data['A'+str(aa)][2]
Asum = np.sum(A50)
aa = 0
Av16 = hdul[1].data['Av'+str(aa)][0]
Av50 = hdul[1].data['Av'+str(aa)][1]
Av84 = hdul[1].data['Av'+str(aa)][2]
AAv = [Av50]
Z50 = np.zeros(len(age), dtype='float')
Z16 = np.zeros(len(age), dtype='float')
Z84 = np.zeros(len(age), dtype='float')
NZbest = np.zeros(len(age), dtype='int')
for aa in range(len(age)):
Z16[aa] = hdul[1].data['Z'+str(aa)][0]
Z50[aa] = hdul[1].data['Z'+str(aa)][1]
Z84[aa] = hdul[1].data['Z'+str(aa)][2]
NZbest[aa]= bfnc.Z2NZ(Z50[aa])
# Light weighted Z.
ZZ50 = np.sum(Z50*A50)/np.sum(A50)
# FIR Dust;
try:
MD16 = hdul[1].data['MDUST'][0]
MD50 = hdul[1].data['MDUST'][1]
MD84 = hdul[1].data['MDUST'][2]
TD16 = hdul[1].data['TDUST'][0]
TD50 = hdul[1].data['TDUST'][1]
TD84 = hdul[1].data['TDUST'][2]
nTD16 = hdul[1].data['nTDUST'][0]
nTD50 = hdul[1].data['nTDUST'][1]
nTD84 = hdul[1].data['nTDUST'][2]
DFILT = inputs['FIR_FILTER'] # filter band string.
DFILT = [x.strip() for x in DFILT.split(',')]
DFWFILT = fil_fwhm(DFILT, DIR_FILT)
if verbose:
print('Total dust mass is %.2e'%(MD50))
f_dust = True
except:
f_dust = False
chi = hdul[1].data['chi'][0]
chin = hdul[1].data['chi'][1]
fitc = chin
Cz0 = hdul[0].header['Cz0']
Cz1 = hdul[0].header['Cz1']
zbes = zp50
zscl = (1.+zbes)
###############################
# Data taken from
###############################
if MB.f_dust:
MB.dict = MB.read_data(Cz0, Cz1, zbes, add_fir=True)
else:
MB.dict = MB.read_data(Cz0, Cz1, zbes)
NR = MB.dict['NR']
x = MB.dict['x']
fy = MB.dict['fy']
ey = MB.dict['ey']
con0 = (NR<1000)
xg0 = x[con0]
fg0 = fy[con0]
eg0 = ey[con0]
con1 = (NR>=1000) & (NR<10000)
xg1 = x[con1]
fg1 = fy[con1]
eg1 = ey[con1]
if len(xg0)>0 or len(xg1)>0:
f_grsm = True
else:
f_grsm = False
wht = fy * 0
con_wht = (ey>0)
wht[con_wht] = 1./np.square(ey[con_wht])
# BB data points;
NRbb = MB.dict['NRbb']
xbb = MB.dict['xbb']
fybb = MB.dict['fybb']
eybb = MB.dict['eybb']
exbb = MB.dict['exbb']
snbb = fybb/eybb
######################
# Weight by line
######################
wh0 = 1./np.square(eg0)
LW0 = []
model = fg0
wht3 = check_line_man(fy, x, wht, fy, zbes, LW0)
######################
# Mass-to-Light ratio.
######################
ms = np.zeros(len(age), dtype='float')
af = MB.af
sedpar = af['ML']
for aa in range(len(age)):
ms[aa] = sedpar['ML_' + str(int(NZbest[aa]))][aa]
try:
isochrone = af['isochrone']
LIBRARY = af['library']
except:
isochrone = ''
LIBRARY = ''
#############
# Plot.
#############
# Set the inset.
if f_grsm or f_dust:
fig = plt.figure(figsize=(7.,3.2))
fig.subplots_adjust(top=0.98, bottom=0.16, left=0.1, right=0.99, hspace=0.15, wspace=0.25)
ax1 = fig.add_subplot(111)
xsize = 0.29
ysize = 0.25
if f_grsm:
ax2t = ax1.inset_axes((1-xsize-0.01,1-ysize-0.01,xsize,ysize))
if f_dust:
ax3t = ax1.inset_axes((0.7,.35,.28,.25))
else:
fig = plt.figure(figsize=(5.5,2.2))
fig.subplots_adjust(top=0.98, bottom=0.16, left=0.1, right=0.99, hspace=0.15, wspace=0.25)
ax1 = fig.add_subplot(111)
#######################################
# D.Kelson like Box for BB photometry
#######################################
col_dat = 'r'
if f_bbbox:
for ii in range(len(xbb)):
if eybb[ii]<100 and fybb[ii]/eybb[ii]>1:
xx = [xbb[ii]-exbb[ii],xbb[ii]-exbb[ii]]
yy = [(fybb[ii]-eybb[ii])*c/np.square(xbb[ii])/d, (fybb[ii]+eybb[ii])*c/np.square(xbb[ii])/d]
ax1.plot(xx, yy, color='k', linestyle='-', linewidth=0.5, zorder=3)
xx = [xbb[ii]+exbb[ii],xbb[ii]+exbb[ii]]
yy = [(fybb[ii]-eybb[ii])*c/np.square(xbb[ii])/d, (fybb[ii]+eybb[ii])*c/np.square(xbb[ii])/d]
ax1.plot(xx, yy, color='k', linestyle='-', linewidth=0.5, zorder=3)
xx = [xbb[ii]-exbb[ii],xbb[ii]+exbb[ii]]
yy = [(fybb[ii]-eybb[ii])*c/np.square(xbb[ii])/d, (fybb[ii]-eybb[ii])*c/np.square(xbb[ii])/d]
ax1.plot(xx, yy, color='k', linestyle='-', linewidth=0.5, zorder=3)
xx = [xbb[ii]-exbb[ii],xbb[ii]+exbb[ii]]
yy = [(fybb[ii]+eybb[ii])*c/np.square(xbb[ii])/d, (fybb[ii]+eybb[ii])*c/np.square(xbb[ii])/d]
ax1.plot(xx, yy, color='k', linestyle='-', linewidth=0.5, zorder=3)
else: # Normal BB plot;
# Detection;
conbb_hs = (fybb/eybb>SNlim)
ax1.errorbar(xbb[conbb_hs], fybb[conbb_hs] * c / np.square(xbb[conbb_hs]) / d, \
yerr=eybb[conbb_hs]*c/np.square(xbb[conbb_hs])/d, color='k', linestyle='', linewidth=0.5, zorder=4)
ax1.plot(xbb[conbb_hs], fybb[conbb_hs] * c / np.square(xbb[conbb_hs]) / d, \
marker='.', color=col_dat, linestyle='', linewidth=0, zorder=4, ms=8)#, label='Obs.(BB)')
try:
# For any data removed fron fit (i.e. IRAC excess):
data_ex = ascii.read(DIR_TMP + 'bb_obs_' + ID + '_removed.cat')
NR_ex = data_ex['col1']
except:
NR_ex = []
# Upperlim;
sigma = 1.0
leng = np.max(fybb[conbb_hs] * c / np.square(xbb[conbb_hs]) / d) * 0.05 #0.2
conebb_ls = (fybb/eybb<=SNlim) & (eybb>0)
for ii in range(len(xbb)):
if NRbb[ii] in NR_ex[:]:
conebb_ls[ii] = False
ax1.errorbar(xbb[conebb_ls], eybb[conebb_ls] * c / np.square(xbb[conebb_ls]) / d * sigma, yerr=leng,\
uplims=eybb[conebb_ls] * c / np.square(xbb[conebb_ls]) / d * sigma, linestyle='', color=col_dat, marker='', ms=4, label='', zorder=4, capsize=3)
# For any data removed fron fit (i.e. IRAC excess):
f_exclude = False
try:
col_ex = 'lawngreen'
#col_ex = 'limegreen'
#col_ex = 'r'
# Currently, this file is made after FILTER_SKIP;
data_ex = ascii.read(DIR_TMP + 'bb_obs_' + ID + '_removed.cat')
x_ex = data_ex['col2']
fy_ex = data_ex['col3']
ey_ex = data_ex['col4']
ex_ex = data_ex['col5']
ax1.errorbar(x_ex, fy_ex * c / np.square(x_ex) / d, \
xerr=ex_ex, yerr=ey_ex*c/np.square(x_ex)/d, color='k', linestyle='', linewidth=0.5, zorder=5)
ax1.scatter(x_ex, fy_ex * c / np.square(x_ex) / d, marker='s', color=col_ex, edgecolor='k', zorder=5, s=30)
f_exclude = True
except:
pass
#####################################
# Open ascii file and stock to array.
lib = fnc.open_spec_fits(fall=0)
lib_all = fnc.open_spec_fits(fall=1, orig=True)
#lib_all_conv = fnc.open_spec_fits(fall=1)
if f_dust:
DT0 = float(inputs['TDUST_LOW'])
DT1 = float(inputs['TDUST_HIG'])
dDT = float(inputs['TDUST_DEL'])
Temp = np.arange(DT0,DT1,dDT)
iimax = len(nage)-1
# FIR dust plot;
if f_dust:
from lmfit import Parameters
par = Parameters()
par.add('MDUST',value=MD50)
par.add('TDUST',value=nTD50)
par.add('zmc',value=zp50)
y0d, x0d = fnc.tmp04_dust(par.valuesdict())#, zbes, lib_dust_all)
y0d_cut, x0d_cut = fnc.tmp04_dust(par.valuesdict())#, zbes, lib_dust)
# data;
dat_d = ascii.read(MB.DIR_TMP + 'bb_dust_obs_' + MB.ID + '.cat')
NRbbd = dat_d['col1']
xbbd = dat_d['col2']
fybbd = dat_d['col3']
eybbd = dat_d['col4']
exbbd = dat_d['col5']
snbbd = fybbd/eybbd
try:
conbbd_hs = (fybbd/eybbd>SNlim)
ax1.errorbar(xbbd[conbbd_hs], fybbd[conbbd_hs] * c / np.square(xbbd[conbbd_hs]) / d, \
yerr=eybbd[conbbd_hs]*c/np.square(xbbd[conbbd_hs])/d, color='k', linestyle='', linewidth=0.5, zorder=4)
ax1.plot(xbbd[conbbd_hs], fybbd[conbbd_hs] * c / np.square(xbbd[conbbd_hs]) / d, \
'.r', linestyle='', linewidth=0, zorder=4)#, label='Obs.(BB)')
ax3t.plot(xbbd[conbbd_hs], fybbd[conbbd_hs] * c / np.square(xbbd[conbbd_hs]) / d, \
'.r', linestyle='', linewidth=0, zorder=4)#, label='Obs.(BB)')
except:
pass
try:
conebbd_ls = (fybbd/eybbd<=SNlim)
ax1.errorbar(xbbd[conebbd_ls], eybbd[conebbd_ls] * c / np.square(xbbd[conebbd_ls]) / d, \
yerr=fybbd[conebbd_ls]*0+np.max(fybbd[conebbd_ls]*c/np.square(xbbd[conebbd_ls])/d)*0.05, \
uplims=eybbd[conebbd_ls]*c/np.square(xbbd[conebbd_ls])/d, color='r', linestyle='', linewidth=0.5, zorder=4)
ax3t.errorbar(xbbd[conebbd_ls], eybbd[conebbd_ls] * c / np.square(xbbd[conebbd_ls]) / d, \
yerr=fybbd[conebbd_ls]*0+np.max(fybbd[conebbd_ls]*c/np.square(xbbd[conebbd_ls])/d)*0.05, \
uplims=eybbd[conebbd_ls]*c/np.square(xbbd[conebbd_ls])/d, color='r', linestyle='', linewidth=0.5, zorder=4)
except:
pass
#
# This is for UVJ color time evolution.
#
Asum = np.sum(A50[:])
alp = .5
for jj in range(len(age)):
ii = int(len(nage) - jj - 1) # from old to young templates.
if jj == 0:
y0, x0 = fnc.tmp03(A50[ii], AAv[0], ii, Z50[ii], zbes, lib_all)
y0p, x0p = fnc.tmp03(A50[ii], AAv[0], ii, Z50[ii], zbes, lib)
ysum = y0
ysump = y0p
nopt = len(ysump)
f_50_comp = np.zeros((len(age),len(y0)),'float')
# Keep each component;
f_50_comp[ii,:] = y0[:] * c / np.square(x0) / d
if f_dust:
ysump[:] += y0d_cut[:nopt]
ysump = np.append(ysump,y0d_cut[nopt:])
# Keep each component;
f_50_comp_dust = y0d * c / np.square(x0d) / d
else:
y0_r, x0_tmp = fnc.tmp03(A50[ii], AAv[0], ii, Z50[ii], zbes, lib_all)
y0p, x0p = fnc.tmp03(A50[ii], AAv[0], ii, Z50[ii], zbes, lib)
ysum += y0_r
ysump[:nopt] += y0p
f_50_comp[ii,:] = y0_r[:] * c / np.square(x0_tmp) / d
# The following needs revised.
f_uvj = False
if f_uvj:
if jj == 0:
fwuvj = open(MB.DIR_OUT + ID + '_uvj.txt', 'w')
fwuvj.write('# age uv vj\n')
ysum_wid = ysum * 0
for kk in range(0,ii+1,1):
tt = int(len(nage) - kk - 1)
nn = int(len(nage) - ii - 1)
nZ = bfnc.Z2NZ(Z50[tt])
y0_wid, x0_wid = fnc.open_spec_fits_dir(tt, nZ, nn, AAv[0], zbes, A50[tt])
ysum_wid += y0_wid
lmrest_wid = x0_wid/(1.+zbes)
band0 = ['u','v','j']
lmconv,fconv = filconv(band0, lmrest_wid, ysum_wid, fil_path) # f0 in fnu
fu_t = fconv[0]
fv_t = fconv[1]
fj_t = fconv[2]
uvt = -2.5*log10(fu_t/fv_t)
vjt = -2.5*log10(fv_t/fj_t)
fwuvj.write('%.2f %.3f %.3f\n'%(age[ii], uvt, vjt))
fwuvj.close()
#############
# Main result
#############
conbb_ymax = (xbb>0) & (fybb>0) & (eybb>0) & (fybb/eybb>1)
ymax = np.max(fybb[conbb_ymax]*c/np.square(xbb[conbb_ymax])/d) * 1.6
xboxl = 17000
xboxu = 28000
ax1.set_xlabel('Observed wavelength ($\mathrm{\mu m}$)', fontsize=12)
ax1.set_ylabel('Flux ($10^{%d}\mathrm{erg}/\mathrm{s}/\mathrm{cm}^{2}/\mathrm{\AA}$)'%(np.log10(scale)),fontsize=12,labelpad=-2)
x1min = 2000
x1max = 100000
xticks = [2500, 5000, 10000, 20000, 40000, 80000, x1max]
xlabels= ['0.25', '0.5', '1', '2', '4', '8', '']
if f_dust:
x1max = 400000
xticks = [2500, 5000, 10000, 20000, 40000, 80000, 400000]
xlabels= ['0.25', '0.5', '1', '2', '4', '8', '']
#if x1max < np.max(xbb[conbb_ymax]):
# x1max = np.max(xbb[conbb_ymax]) * 1.5
if x1max < np.max(xbb):
x1max = np.max(xbb) * 1.5
if x1min > np.min(xbb[conbb_ymax]):
x1min = np.min(xbb[conbb_ymax]) / 1.5
ax1.set_xlim(x1min, x1max)
ax1.set_xscale('log')
if f_plot_filter:
scl_yaxis = 0.2
else:
scl_yaxis = 0.1
ax1.set_ylim(-ymax*scl_yaxis,ymax)
ax1.text(x1min+100,-ymax*0.08,'SNlimit:%.1f'%(SNlim),fontsize=8)
ax1.set_xticks(xticks)
ax1.set_xticklabels(xlabels)
dely1 = 0.5
while (ymax-0)/dely1<1:
dely1 /= 2.
while (ymax-0)/dely1>4:
dely1 *= 2.
y1ticks = np.arange(0, ymax, dely1)
ax1.set_yticks(y1ticks)
ax1.set_yticklabels(np.arange(0, ymax, dely1), minor=False)
ax1.yaxis.set_major_formatter(FormatStrFormatter('%.1f'))
ax1.yaxis.labelpad = 1.5
xx = np.arange(100,400000)
yy = xx * 0
ax1.plot(xx, yy, ls='--', lw=0.5, color='k')
#############
# Plot
#############
eAAl = np.zeros(len(age),dtype='float')
eAAu = np.zeros(len(age),dtype='float')
eAMl = np.zeros(len(age),dtype='float')
eAMu = np.zeros(len(age),dtype='float')
MSsum = np.sum(ms)
Asum = np.sum(A50)
A50 /= Asum
A16 /= Asum
A84 /= Asum
AM50 = A50 * M50 * ms / MSsum
CM = M50/np.sum(AM50)
AM50 = A50 * M50 * ms / MSsum * CM
AM16 = A16 * M50 * ms / MSsum * CM
AM84 = A84 * M50 * ms / MSsum * CM
AC50 = A50 * 0 # Cumulative
for ii in range(len(A50)):
eAAl[ii] = A50[ii] - A16[ii]
eAAu[ii] = A84[ii] - A50[ii]
eAMl[ii] = AM50[ii] - AM16[ii]
eAMu[ii] = AM84[ii] - AM50[ii]
AC50[ii] = np.sum(AM50[ii:])
################
# Lines
################
LN = ['Mg2', 'Ne5', 'O2', 'Htheta', 'Heta', 'Ne3', 'Hdelta', 'Hgamma', 'Hbeta', 'O3', 'O3', 'Mgb', 'Halpha', 'S2L', 'S2H']
FLW = np.zeros(len(LN),dtype='int')
####################
# For cosmology
####################
DL = MB.cosmo.luminosity_distance(zbes).value * Mpc_cm #, **cosmo) # Luminositydistance in cm
Cons = (4.*np.pi*DL**2/(1.+zbes))
if f_grsm:
print('This function (write_lines) needs to be revised.')
write_lines(ID, zbes, DIR_OUT=MB.DIR_OUT)
##########################
# Zoom in Line regions
##########################
if f_grsm:
conspec = (NR<10000) #& (fy/ey>1)
#ax2t.fill_between(xg1, (fg1-eg1) * c/np.square(xg1)/d, (fg1+eg1) * c/np.square(xg1)/d, lw=0, color='#DF4E00', zorder=10, alpha=0.7, label='')
#ax2t.fill_between(xg0, (fg0-eg0) * c/np.square(xg0)/d, (fg0+eg0) * c/np.square(xg0)/d, lw=0, color='royalblue', zorder=10, alpha=0.2, label='')
ax2t.errorbar(xg1, fg1 * c/np.square(xg1)/d, yerr=eg1 * c/np.square(xg1)/d, lw=0.5, color='#DF4E00', zorder=10, alpha=1., label='', capsize=0)
ax2t.errorbar(xg0, fg0 * c/np.square(xg0)/d, yerr=eg0 * c/np.square(xg0)/d, lw=0.5, linestyle='', color='royalblue', zorder=10, alpha=1., label='', capsize=0)
xgrism = np.concatenate([xg0,xg1])
fgrism = np.concatenate([fg0,fg1])
egrism = np.concatenate([eg0,eg1])
con4000b = (xgrism/zscl>3400) & (xgrism/zscl<3800) & (fgrism>0) & (egrism>0)
con4000r = (xgrism/zscl>4200) & (xgrism/zscl<5000) & (fgrism>0) & (egrism>0)
print('Median SN at 3400-3800 is;', np.median((fgrism/egrism)[con4000b]))
print('Median SN at 4200-5000 is;', np.median((fgrism/egrism)[con4000r]))
#ax1.errorbar(xg1, fg1 * c/np.square(xg1)/d, yerr=eg1 * c/np.square(xg1)/d, lw=0.5, color='#DF4E00', zorder=10, alpha=1., label='', capsize=0)
#ax1.errorbar(xg0, fg0 * c/np.square(xg0)/d, yerr=eg0 * c/np.square(xg0)/d, lw=0.5, linestyle='', color='royalblue', zorder=10, alpha=1., label='', capsize=0)
#
# From MCMC chain
#
file = MB.DIR_OUT + 'chain_' + ID + '_corner.cpkl'
niter = 0
data = loadcpkl(file)
try:
ndim = data['ndim'] # By default, use ndim and burnin values contained in the cpkl file, if present.
burnin = data['burnin']
nmc = data['niter']
nwalk = data['nwalkers']
Nburn = burnin #*20
res = data['chain'][:]
except:
if verbose: print(' = > NO keys of ndim and burnin found in cpkl, use input keyword values')
samples = res
# Saved template;
ytmp = np.zeros((mmax,len(ysum)), dtype='float')
ytmp_each = np.zeros((mmax,len(ysum),len(age)), dtype='float')
ytmpmax = np.zeros(len(ysum), dtype='float')
ytmpmin = np.zeros(len(ysum), dtype='float')
# MUV;
DL = MB.cosmo.luminosity_distance(zbes).value * Mpc_cm # Luminositydistance in cm
DL10 = Mpc_cm/1e6 * 10 # 10pc in cm
Fuv = np.zeros(mmax, dtype='float') # For Muv
Fuv28 = np.zeros(mmax, dtype='float') # For Fuv(1500-2800)
Lir = np.zeros(mmax, dtype='float') # For L(8-1000um)
UVJ = np.zeros((mmax,4), dtype='float') # For UVJ color;
Cmznu = 10**((48.6+m0set)/(-2.5)) # Conversion from m0_25 to fnu
# From random chain;
alp=0.02
for kk in range(0,mmax,1):
nr = np.random.randint(Nburn, len(samples['A%d'%MB.aamin[0]]))
try:
Av_tmp = samples['Av'][nr]
except:
Av_tmp = MB.AVFIX
try:
zmc = samples['zmc'][nr]
except:
zmc = zbes
for ss in MB.aamin:
try:
AA_tmp = 10**samples['A'+str(ss)][nr]
except:
AA_tmp = 0
pass
try:
Ztest = samples['Z'+str(len(age)-1)][nr]
ZZ_tmp = samples['Z'+str(ss)][nr]
except:
try:
ZZ_tmp = samples['Z0'][nr]
except:
ZZ_tmp = MB.ZFIX
if ss == MB.aamin[0]:
mod0_tmp, xm_tmp = fnc.tmp03(AA_tmp, Av_tmp, ss, ZZ_tmp, zmc, lib_all)
fm_tmp = mod0_tmp
else:
mod0_tmp, xx_tmp = fnc.tmp03(AA_tmp, Av_tmp, ss, ZZ_tmp, zmc, lib_all)
fm_tmp += mod0_tmp
# Each;
ytmp_each[kk,:,ss] = mod0_tmp[:] * c / np.square(xm_tmp[:]) / d
#
# Dust component;
#
if f_dust:
if kk == 0:
par = Parameters()
par.add('MDUST',value=samples['MDUST'][nr])
try:
par.add('TDUST',value=samples['TDUST'][nr])
except:
par.add('TDUST',value=0)
par['MDUST'].value = samples['MDUST'][nr]
try:
par['TDUST'].value = samples['TDUST'][nr]
except:
par['TDUST'].value = 0
model_dust, x1_dust = fnc.tmp04_dust(par.valuesdict())#, zbes, lib_dust_all)
if kk == 0:
deldt = (x1_dust[1] - x1_dust[0])
x1_tot = np.append(xm_tmp,np.arange(np.max(xm_tmp),np.max(x1_dust),deldt))
# Redefine??
ytmp = np.zeros((mmax,len(x1_tot)), dtype='float')
ytmp_dust = np.zeros((mmax,len(x1_dust)), dtype='float')
ytmp_comp = np.zeros((mmax,len(x1_tot)), dtype='float')
ytmp_dust[kk,:] = model_dust * c/np.square(x1_dust)/d
model_tot = np.interp(x1_tot,xx_tmp,fm_tmp) + np.interp(x1_tot,x1_dust,model_dust)
ytmp[kk,:] = model_tot[:] * c/np.square(x1_tot[:])/d
else:
x1_tot = xm_tmp
ytmp[kk,:] = fm_tmp[:] * c / np.square(xm_tmp[:]) / d
#
# Grism plot + Fuv flux + LIR.
#
#if f_grsm:
#ax2t.plot(x1_tot, ytmp[kk,:], '-', lw=0.5, color='gray', zorder=3., alpha=0.02)
# Get FUV flux;
Fuv[kk] = get_Fuv(x1_tot[:]/(1.+zbes), (ytmp[kk,:]/(c/np.square(x1_tot)/d)) * (DL**2/(1.+zbes)) / (DL10**2), lmin=1250, lmax=1650)
Fuv28[kk] = get_Fuv(x1_tot[:]/(1.+zbes), (ytmp[kk,:]/(c/np.square(x1_tot)/d)) * (4*np.pi*DL**2/(1.+zbes))*Cmznu, lmin=1500, lmax=2800)
Lir[kk] = 0
# Get UVJ Color;
lmconv,fconv = filconv_fast(MB.filts_rf, MB.band_rf, x1_tot[:]/(1.+zbes), (ytmp[kk,:]/(c/np.square(x1_tot)/d)))
UVJ[kk,0] = -2.5*np.log10(fconv[0]/fconv[2])
UVJ[kk,1] = -2.5*np.log10(fconv[1]/fconv[2])
UVJ[kk,2] = -2.5*np.log10(fconv[2]/fconv[3])
UVJ[kk,3] = -2.5*np.log10(fconv[4]/fconv[3])
# Do stuff...
time.sleep(0.01)
# Update Progress Bar
printProgressBar(kk, mmax, prefix = 'Progress:', suffix = 'Complete', length = 40)
#
# Plot Median SED;
#
ytmp16 = np.percentile(ytmp[:,:],16,axis=0)
ytmp50 = np.percentile(ytmp[:,:],50,axis=0)
ytmp84 = np.percentile(ytmp[:,:],84,axis=0)
if f_dust:
ytmp_dust50 = np.percentile(ytmp_dust[:,:],50, axis=0)
#if not f_fill:
ax1.fill_between(x1_tot[::nstep_plot], ytmp16[::nstep_plot], ytmp84[::nstep_plot], ls='-', lw=.5, color='gray', zorder=-2, alpha=0.5)
ax1.plot(x1_tot[::nstep_plot], ytmp50[::nstep_plot], '-', lw=.5, color='gray', zorder=-1, alpha=1.)
# For grism;
if f_grsm:
from astropy.convolution import convolve
from .maketmp_filt import get_LSF
LSF, lmtmp = get_LSF(MB.inputs, MB.DIR_EXTR, ID, x1_tot[::nstep_plot], c=3e18)
spec_grsm16 = convolve(ytmp16[::nstep_plot], LSF, boundary='extend')
spec_grsm50 = convolve(ytmp50[::nstep_plot], LSF, boundary='extend')
spec_grsm84 = convolve(ytmp84[::nstep_plot], LSF, boundary='extend')
ax2t.plot(x1_tot[::nstep_plot], spec_grsm50, '-', lw=0.5, color='gray', zorder=3., alpha=1.0)
# Attach the data point in MB;
MB.sed_wave_obs = xbb
MB.sed_flux_obs = fybb * c / np.square(xbb) / d
MB.sed_eflux_obs = eybb * c / np.square(xbb) / d
# Attach the best SED to MB;
MB.sed_wave = x1_tot
MB.sed_flux16 = ytmp16
MB.sed_flux50 = ytmp50
MB.sed_flux84 = ytmp84
if f_fancyplot:
alp_fancy = 0.5
#ax1.plot(x1_tot[::nstep_plot], np.percentile(ytmp[:, ::nstep_plot], 50, axis=0), '-', lw=.5, color='gray', zorder=-1, alpha=1.)
ysumtmp = ytmp[0, ::nstep_plot] * 0
ysumtmp2 = ytmp[:, ::nstep_plot] * 0
ysumtmp2_prior = ytmp[0, ::nstep_plot] * 0
for ss in range(len(age)):
ii = int(len(nage) - ss - 1) # from old to young templates.
#ysumtmp += np.percentile(ytmp_each[:, ::nstep_plot, ii], 50, axis=0)
#ax1.plot(x1_tot[::nstep_plot], ysumtmp, linestyle='--', lw=.5, color=col[ii], alpha=0.5)
# !! Take median after summation;
ysumtmp2[:,:len(xm_tmp)] += ytmp_each[:, ::nstep_plot, ii]
if f_fill:
ax1.fill_between(x1_tot[::nstep_plot], ysumtmp2_prior, np.percentile(ysumtmp2[:,:], 50, axis=0), linestyle='None', lw=0., color=col[ii], alpha=alp_fancy, zorder=-3)
else:
ax1.plot(x1_tot[::nstep_plot], np.percentile(ysumtmp2[:, ::nstep_plot], 50, axis=0), linestyle='--', lw=.5, color=col[ii], alpha=alp_fancy, zorder=-3)
ysumtmp2_prior[:] = np.percentile(ysumtmp2[:, :], 50, axis=0)
elif f_fill:
print('f_fancyplot is False. f_fill is set to False.')
#########################
# Calculate non-det chi2
# based on Sawick12
#########################
def func_tmp(xint,eobs,fmodel):
int_tmp = np.exp(-0.5 * ((xint-fmodel)/eobs)**2)
return int_tmp
if f_chind:
conw = (wht3>0) & (ey>0) & (fy/ey>SNlim)
else:
conw = (wht3>0) & (ey>0) #& (fy/ey>SNlim)
#chi2 = sum((np.square(fy-ysump) * np.sqrt(wht3))[conw])
try:
logf = hdul[1].data['logf'][1]
ey_revised = np.sqrt(ey**2+ ysump**2 * np.exp(logf)**2)
except:
ey_revised = ey
chi2 = sum((np.square(fy-ysump) / ey_revised)[conw])
chi_nd = 0.0
if f_chind:
f_ex = np.zeros(len(fy), 'int')
if f_exclude:
for ii in range(len(fy)):
if x[ii] in x_ex:
f_ex[ii] = 1
con_up = (ey>0) & (fy/ey<=SNlim) & (f_ex == 0)
from scipy import special
#x_erf = (ey[con_up] - ysump[con_up]) / (np.sqrt(2) * ey[con_up])
#f_erf = special.erf(x_erf)
#chi_nd = np.sum( np.log(np.sqrt(np.pi / 2) * ey[con_up] * (1 + f_erf)) )
x_erf = (ey_revised[con_up] - ysump[con_up]) / (np.sqrt(2) * ey_revised[con_up])
f_erf = special.erf(x_erf)
chi_nd = np.sum( np.log(np.sqrt(np.pi / 2) * ey_revised[con_up] * (1 + f_erf)) )
# Number of degree;
con_nod = (wht3>0) & (ey>0) #& (fy/ey>SNlim)
nod = int(len(wht3[con_nod])-ndim_eff)
print('\n')
print('No-of-detection : %d'%(len(wht3[conw])))
print('chi2 : %.2f'%(chi2))
if f_chind:
print('No-of-non-detection: %d'%(len(ey[con_up])))
print('chi2 for non-det : %.2f'%(- 2 * chi_nd))
print('No-of-params : %d'%(ndim_eff))
print('Degrees-of-freedom : %d'%(nod))
if nod>0:
fin_chi2 = (chi2 - 2 * chi_nd) / nod
else:
fin_chi2 = -99
print('Final chi2/nu : %.2f'%(fin_chi2))
#
# plot BB model from best template (blue squares)
#
col_dia = 'blue'
if f_dust:
ALLFILT = np.append(SFILT,DFILT)
#for ii in range(len(x1_tot)):
# print(x1_tot[ii], model_tot[ii]*c/np.square(x1_tot[ii])/d)
lbb, fbb, lfwhm = filconv(ALLFILT, x1_tot, ytmp50, DIR_FILT, fw=True)
lbb, fbb16, lfwhm = filconv(ALLFILT, x1_tot, ytmp16, DIR_FILT, fw=True)
lbb, fbb84, lfwhm = filconv(ALLFILT, x1_tot, ytmp84, DIR_FILT, fw=True)
ax1.plot(x1_tot, ytmp50, '--', lw=0.5, color='purple', zorder=-1, label='')
ax3t.plot(x1_tot, ytmp50, '--', lw=0.5, color='purple', zorder=-1, label='')
iix = []
for ii in range(len(fbb)):
iix.append(ii)
con_sed = ()
ax1.scatter(lbb[iix][con_sed], fbb[iix][con_sed], lw=0.5, color='none', edgecolor=col_dia, zorder=3, alpha=1.0, marker='d', s=50)
# plot FIR range;
ax3t.scatter(lbb, fbb, lw=0.5, color='none', edgecolor=col_dia, \
zorder=2, alpha=1.0, marker='d', s=50)
else:
lbb, fbb, lfwhm = filconv(SFILT, x1_tot, ytmp50, DIR_FILT, fw=True, MB=MB, f_regist=False)
lbb, fbb16, lfwhm = filconv(SFILT, x1_tot, ytmp16, DIR_FILT, fw=True, MB=MB, f_regist=False)
lbb, fbb84, lfwhm = filconv(SFILT, x1_tot, ytmp84, DIR_FILT, fw=True, MB=MB, f_regist=False)
iix = []
for ii in range(len(fbb)):
iix.append(np.argmin(np.abs(lbb[ii]-xbb[:])))
con_sed = (eybb>0)
ax1.scatter(lbb[iix][con_sed], fbb[iix][con_sed], lw=0.5, color='none', edgecolor=col_dia, zorder=3, alpha=1.0, marker='d', s=50)
# Calculate EW, if there is excess band;
try:
iix2 = []
for ii in range(len(fy_ex)):
iix2.append(np.argmin(np.abs(lbb[:]-x_ex[ii])))
# Rest-frame EW;
# Note about 16/84 in fbb
EW16 = (fy_ex * c / np.square(x_ex) / d - fbb84[iix2]) / (fbb[iix2]) * lfwhm[iix2] / (1.+zbes)
EW50 = (fy_ex * c / np.square(x_ex) / d - fbb[iix2]) / (fbb[iix2]) * lfwhm[iix2] / (1.+zbes)
EW84 = (fy_ex * c / np.square(x_ex) / d - fbb16[iix2]) / (fbb[iix2]) * lfwhm[iix2] / (1.+zbes)
EW50_er1 = ((fy_ex-ey_ex) * c / np.square(x_ex) / d - fbb[iix2]) / (fbb[iix2]) * lfwhm[iix2] / (1.+zbes)
EW50_er2 = ((fy_ex+ey_ex) * c / np.square(x_ex) / d - fbb[iix2]) / (fbb[iix2]) * lfwhm[iix2] / (1.+zbes)
cnt50 = fbb[iix2]
cnt16 = fbb16[iix2]
cnt84 = fbb84[iix2]
# Luminosity;
#Lsun = 3.839 * 1e33 #erg s-1
L16 = EW16 * cnt16 * (4.*np.pi*DL**2) * scale * (1+zbes) # A * erg/s/A/cm2 * cm2
L50 = EW50 * cnt50 * (4.*np.pi*DL**2) * scale * (1+zbes) # A * erg/s/A/cm2 * cm2
L84 = EW84 * cnt84 * (4.*np.pi*DL**2) * scale * (1+zbes) # A * erg/s/A/cm2 * cm2
ew_label = []
for ii in range(len(fy_ex)):
lres = MB.band['%s_lam'%MB.filts[iix2[ii]]][:]
fres = MB.band['%s_res'%MB.filts[iix2[ii]]][:]
ew_label.append(MB.filts[iix2[ii]])
print('\n')
print('EW016 for', x_ex[ii], 'is %d'%EW16[ii])
print('EW050 for', x_ex[ii], 'is %d'%EW50[ii])
print('EW084 for', x_ex[ii], 'is %d'%EW84[ii])
print('%d_{-%d}^{+%d} , for sed error'%(EW50[ii],EW50[ii]-EW84[ii],EW16[ii]-EW50[ii]))
print('Or, %d\pm{%d} , for flux error'%(EW50[ii],EW50[ii]-EW50_er1[ii]))
except:
pass
if save_sed:
fbb16_nu = flamtonu(lbb, fbb16*scale, m0set=25.0)
fbb_nu = flamtonu(lbb, fbb*scale, m0set=25.0)
fbb84_nu = flamtonu(lbb, fbb84*scale, m0set=25.0)
# Then save full spectrum;
col00 = []
col1 = fits.Column(name='wave_model', format='E', unit='AA', array=x1_tot)
col00.append(col1)
col2 = fits.Column(name='f_model_16', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=ytmp16[:])
col00.append(col2)
col3 = fits.Column(name='f_model_50', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=ytmp50[:])
col00.append(col3)
col4 = fits.Column(name='f_model_84', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=ytmp84[:])
col00.append(col4)
# Each component
# Stellar
col1 = fits.Column(name='wave_model_stel', format='E', unit='AA', array=x0)
col00.append(col1)
for aa in range(len(age)):
col1 = fits.Column(name='f_model_stel_%d'%aa, format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=f_50_comp[aa,:])
col00.append(col1)
if f_dust:
col1 = fits.Column(name='wave_model_dust', format='E', unit='AA', array=x1_dust)
col00.append(col1)
col1 = fits.Column(name='f_model_dust', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=ytmp_dust50)
col00.append(col1)
# Grism;
if f_grsm:
col2 = fits.Column(name='f_model_conv_16', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=spec_grsm16)
col00.append(col2)
col3 = fits.Column(name='f_model_conv_50', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=spec_grsm50)
col00.append(col3)
col4 = fits.Column(name='f_model_conv_84', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=spec_grsm84)
col00.append(col4)
# BB for dust
if f_dust:
xbb = np.append(xbb,xbbd)
fybb = np.append(fybb,fybbd)
eybb = np.append(eybb,eybbd)
col5 = fits.Column(name='wave_obs', format='E', unit='AA', array=xbb)
col00.append(col5)
col6 = fits.Column(name='f_obs', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=fybb[:] * c / np.square(xbb[:]) / d)
col00.append(col6)
col7 = fits.Column(name='e_obs', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=eybb[:] * c / np.square(xbb[:]) / d)
col00.append(col7)
hdr = fits.Header()
hdr['redshift'] = zbes
hdr['id'] = ID
hdr['hierarch isochrone'] = isochrone
hdr['library'] = LIBRARY
hdr['scale'] = scale
try:
# Chi square:
hdr['chi2'] = chi2
hdr['hierarch No-of-effective-data-points'] = len(wht3[conw])
hdr['hierarch No-of-nondetectioin'] = len(ey[con_up])
hdr['hierarch Chi2-of-nondetection'] = chi_nd
hdr['hierarch No-of-params'] = ndim_eff
hdr['hierarch Degree-of-freedom'] = nod
hdr['hierarch reduced-chi2'] = fin_chi2
except:
print('Chi seems to be wrong...')
pass
try:
# Muv
MUV = -2.5 * np.log10(Fuv[:]) + 25.0
hdr['MUV16'] = np.percentile(MUV[:],16)
hdr['MUV50'] = np.percentile(MUV[:],50)
hdr['MUV84'] = np.percentile(MUV[:],84)
# Fuv (!= flux of Muv)
hdr['FUV16'] = np.percentile(Fuv28[:],16)
hdr['FUV50'] = np.percentile(Fuv28[:],50)
hdr['FUV84'] = np.percentile(Fuv28[:],84)
# LIR
hdr['LIR16'] = np.percentile(Lir[:],16)
hdr['LIR50'] = np.percentile(Lir[:],50)
hdr['LIR84'] = np.percentile(Lir[:],84)
except:
pass
# UVJ
try:
hdr['uv16'] = np.percentile(UVJ[:,0],16)
hdr['uv50'] = np.percentile(UVJ[:,0],50)
hdr['uv84'] = np.percentile(UVJ[:,0],84)
hdr['bv16'] = np.percentile(UVJ[:,1],16)
hdr['bv50'] = np.percentile(UVJ[:,1],50)
hdr['bv84'] = np.percentile(UVJ[:,1],84)
hdr['vj16'] = np.percentile(UVJ[:,2],16)
hdr['vj50'] = np.percentile(UVJ[:,2],50)
hdr['vj84'] = np.percentile(UVJ[:,2],84)
hdr['zj16'] = np.percentile(UVJ[:,3],16)
hdr['zj50'] = np.percentile(UVJ[:,3],50)
hdr['zj84'] = np.percentile(UVJ[:,3],84)
except:
print('\nError when writinf UVJ colors;\n')
pass
# EW;
try:
for ii in range(len(EW50)):
hdr['EW_%s_16'%(ew_label[ii])] = EW16[ii]
hdr['EW_%s_50'%(ew_label[ii])] = EW50[ii]
hdr['EW_%s_84'%(ew_label[ii])] = EW84[ii]
hdr['EW_%s_e1'%(ew_label[ii])] = EW50_er1[ii]
hdr['EW_%s_e2'%(ew_label[ii])] = EW50_er2[ii]
hdr['HIERARCH cnt_%s_16'%(ew_label[ii])]= cnt16[ii]
hdr['HIERARCH cnt_%s_50'%(ew_label[ii])]= cnt50[ii]
hdr['HIERARCH cnt_%s_84'%(ew_label[ii])]= cnt84[ii]
hdr['L_%s_16'%(ew_label[ii])] = L16[ii]
hdr['L_%s_50'%(ew_label[ii])] = L50[ii]
hdr['L_%s_84'%(ew_label[ii])] = L84[ii]
except:
pass
# Version;
import gsf
hdr['version'] = gsf.__version__
# Write;
colspec = fits.ColDefs(col00)
hdu0 = fits.BinTableHDU.from_columns(colspec, header=hdr)
hdu0.writeto(MB.DIR_OUT + 'gsf_spec_%s.fits'%(ID), overwrite=True)
# ASDF;
tree_spec = {
'id': ID,
'redshift': '%.3f'%zbes,
'isochrone': '%s'%(isochrone),
'library': '%s'%(LIBRARY),
'scale': scale,
'version_gsf': gsf.__version__
}
# BB;
tree_spec.update({'wave': lbb})
tree_spec.update({'fnu_16': fbb16_nu})
tree_spec.update({'fnu_50': fbb_nu})
tree_spec.update({'fnu_84': fbb84_nu})
# full spectrum;
tree_spec.update({'wave_model': x1_tot})
tree_spec.update({'f_model_16': ytmp16})
tree_spec.update({'f_model_50': ytmp50})
tree_spec.update({'f_model_84': ytmp84})
# EW;
try:
for ii in range(len(EW50)):
tree_spec.update({'EW_%s_16'%(ew_label[ii]): EW16[ii]})
tree_spec.update({'EW_%s_50'%(ew_label[ii]): EW50[ii]})
tree_spec.update({'EW_%s_84'%(ew_label[ii]): EW84[ii]})
tree_spec.update({'EW_%s_e1'%(ew_label[ii]): EW50_er1[ii]})
tree_spec.update({'EW_%s_e2'%(ew_label[ii]): EW50_er2[ii]})
tree_spec.update({'cnt_%s_16'%(ew_label[ii]): cnt16[ii]})
tree_spec.update({'cnt_%s_50'%(ew_label[ii]): cnt50[ii]})
tree_spec.update({'cnt_%s_84'%(ew_label[ii]): cnt84[ii]})
tree_spec.update({'L_%s_16'%(ew_label[ii]): L16[ii]})
tree_spec.update({'L_%s_50'%(ew_label[ii]): L50[ii]})
tree_spec.update({'L_%s_84'%(ew_label[ii]): L84[ii]})
except:
pass
# Each component
# Stellar
tree_spec.update({'wave_model_stel': x0})
for aa in range(len(age)):
tree_spec.update({'f_model_stel_%d'%aa: f_50_comp[aa,:]})
if f_dust:
# dust
tree_spec.update({'wave_model_dust': x1_dust})
tree_spec.update({'f_model_dust': ytmp_dust50})
# BB for dust
tree_spec.update({'wave_obs': xbb})
tree_spec.update({'f_obs': fybb[:] * c / np.square(xbb[:]) / d})
tree_spec.update({'e_obs': eybb[:] * c / np.square(xbb[:]) / d})
# grism:
if f_grsm:
tree_spec.update({'fg0_obs': fg0 * c/np.square(xg0)/d})
tree_spec.update({'eg0_obs': eg0 * c/np.square(xg0)/d})
tree_spec.update({'wg0_obs': xg0})
tree_spec.update({'fg1_obs': fg1 * c/np.square(xg1)/d})
tree_spec.update({'eg1_obs': eg1 * c/np.square(xg1)/d})
tree_spec.update({'wg1_obs': xg1})
af = asdf.AsdfFile(tree_spec)
af.write_to(MB.DIR_OUT + 'gsf_spec_%s.asdf'%(ID), all_array_compression='zlib')
#
# SED params in plot
#
if f_label:
fd = fits.open(MB.DIR_OUT + 'SFH_' + ID + '.fits')[0].header
if f_dust:
label = 'ID: %s\n$z_\mathrm{obs.}:%.2f$\n$\log M_\mathrm{*}/M_\odot:%.2f$\n$\log M_\mathrm{dust}/M_\odot:%.2f$\n$\log Z_\mathrm{*}/Z_\odot:%.2f$\n$\log T_\mathrm{*}$/Gyr$:%.2f$\n$A_V$/mag$:%.2f$\n$\\chi^2/\\nu:%.2f$'\
%(ID, zbes, float(fd['Mstel_50']), MD50, float(fd['Z_MW_50']), float(fd['T_MW_50']), float(fd['AV_50']), fin_chi2)
ylabel = ymax*0.45
else:
label = 'ID: %s\n$z_\mathrm{obs.}:%.2f$\n$\log M_\mathrm{*}/M_\odot:%.2f$\n$\log Z_\mathrm{*}/Z_\odot:%.2f$\n$\log T_\mathrm{*}$/Gyr$:%.2f$\n$A_V$/mag$:%.2f$\n$\\chi^2/\\nu:%.2f$'\
%(ID, zbes, float(fd['Mstel_50']), float(fd['Z_MW_50']), float(fd['T_MW_50']), float(fd['AV_50']), fin_chi2)
ylabel = ymax*0.25
ax1.text(0.77, 0.65, label,\
fontsize=9, bbox=dict(facecolor='w', alpha=0.7), zorder=10,
ha='left', va='center', transform=ax1.transAxes)
#######################################
ax1.xaxis.labelpad = -3
if f_grsm:
if np.max(xg0)<23000: # E.g. WFC3, NIRISS grisms
conlim = (x0>10000) & (x0<25000)
xgmin, xgmax = np.min(x0[conlim]),np.max(x0[conlim]), #7500, 17000
ax2t.set_xlabel('')
ax2t.set_xlim(xgmin, xgmax)
conaa = (x0>xgmin-50) & (x0<xgmax+50)
ymaxzoom = np.max(ysum[conaa]*c/np.square(x0[conaa])/d) * 1.15
yminzoom = np.min(ysum[conaa]*c/np.square(x0[conaa])/d) / 1.15
ax2t.set_ylim(yminzoom, ymaxzoom)
ax2t.xaxis.labelpad = -2
if xgmax>20000:
ax2t.set_xticks([8000, 12000, 16000, 20000, 24000])
ax2t.set_xticklabels(['0.8', '1.2', '1.6', '2.0', '2.4'])
else:
ax2t.set_xticks([8000, 10000, 12000, 14000, 16000])
ax2t.set_xticklabels(['0.8', '1.0', '1.2', '1.4', '1.6'])
else:
conlim = (x0>10000) & (x0<54000) # NIRSPEC spectrum;
xgmin, xgmax = np.min(x0[conlim]),np.max(x0[conlim]), #7500, 17000
ax2t.set_xlabel('')
ax2t.set_xlim(xgmin, xgmax)
conaa = (x0>xgmin-50) & (x0<xgmax+50)
ymaxzoom = np.max(ysum[conaa]*c/np.square(x0[conaa])/d) * 1.15
yminzoom = np.min(ysum[conaa]*c/np.square(x0[conaa])/d) / 1.15
ax2t.set_ylim(yminzoom, ymaxzoom)
ax2t.xaxis.labelpad = -2
if xgmax>40000:
ax2t.set_xticks([8000, 20000, 32000, 44000, 56000])
ax2t.set_xticklabels(['0.8', '2.0', '3.2', '4.4', '5.6'])
else:
ax2t.set_xticks([8000, 20000, 32000, 44000])
ax2t.set_xticklabels(['0.8', '2.0', '3.2', '4.4'])
if f_dust:
try:
contmp = (x1_tot>10*1e4) #& (fybbd/eybbd>SNlim)
y3min, y3max = -.2*np.max((model_tot * c/ np.square(x1_tot) / d)[contmp]), np.max((model_tot * c/ np.square(x1_tot) / d)[contmp])*2.0
ax3t.set_ylim(y3min, y3max)
except:
if verbose:
print('y3 limit is not specified.')
pass
ax3t.set_xlim(1e5, 3e7)
ax3t.set_xscale('log')
ax3t.set_xticks([100000, 1000000, 10000000])
ax3t.set_xticklabels(['10', '100', '1000'])
###############
# Line name
###############
LN0 = ['Mg2', '$NeIV$', '[OII]', 'H$\theta$', 'H$\eta$', 'Ne3?', 'H$\delta$', 'H$\gamma$', 'H$\\beta$', 'O3', 'O3', 'Mgb', 'Halpha', 'S2L', 'S2H']
LW0 = [2800, 3347, 3727, 3799, 3836, 3869, 4102, 4341, 4861, 4959, 5007, 5175, 6563, 6717, 6731]
fsl = 9 # Fontsize for line
if f_grsm:
try:
for ii in range(len(LW)):
ll = np.argmin(np.abs(LW[ii]-LW0[:]))
if ll == 2 and FLW[ii] == 1: # FLW is the flag for line fitting.
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.5,yminzoom+(ymaxzoom-yminzoom)*0.65, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color=lcb, zorder=20, alpha=1., label='', capsize=0)
ax2t.text(xxl[0]-130, yyl[0]*1.28, '%s'%(LN0[ll]), color=lcb, fontsize=9, rotation=90)
elif (ll == 9 and FLW[ii] == 1):
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.5,yminzoom+(ymaxzoom-yminzoom)*0.65, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color=lcb, zorder=20, alpha=1., label='', capsize=0)
elif (ll == 10 and FLW[ii] == 1):
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.5,yminzoom+(ymaxzoom-yminzoom)*0.65, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color=lcb, zorder=20, alpha=1., label='', capsize=0)
ax2t.text(xxl[0]+40, yyl[0]*0.75, '%s'%(LN0[ll]), color=lcb, fontsize=9, rotation=90)
elif FLW[ii] == 1 and (ll == 6 or ll == 7 or ll == 8):
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.2,yminzoom+(ymaxzoom-yminzoom)*0.35, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color=lcb, zorder=20, alpha=1., label='', capsize=0)
ax2t.text(xxl[0]+40, yyl[0]*0.95, '%s'%(LN0[ll]), color=lcb, fontsize=9, rotation=90)
elif ll == 6 or ll == 7 or ll == 8:
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.2,yminzoom+(ymaxzoom-yminzoom)*0.35, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color='gray', zorder=1, alpha=1., label='', capsize=0)
ax2t.text(xxl[0]+40, yyl[0]*0.95, '%s'%(LN0[ll]), color='gray', fontsize=9, rotation=90)
elif FLW[ii] == 1:
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.7,yminzoom+(ymaxzoom-yminzoom)*.95, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color=lcb, zorder=20, alpha=1., label='', capsize=0)
ax2t.text(xxl[0]+40, yyl[0]*1.25, '%s'%(LN0[ll]), color=lcb, fontsize=9, rotation=90)
except:
pass
# Filters
if f_plot_filter:
ax1 = plot_filter(MB, ax1, ymax, scl=scl_yaxis)
####################
## Save
####################
ax1.legend(loc=1, fontsize=11)
if figpdf:
fig.savefig(MB.DIR_OUT + 'SPEC_' + ID + '_spec.pdf', dpi=dpi)
else:
fig.savefig(MB.DIR_OUT + 'SPEC_' + ID + '_spec.png', dpi=dpi)
def plot_sed_tau(MB, flim=0.01, fil_path='./', scale=1e-19, f_chind=True, figpdf=False, save_sed=True, inputs=False, \
mmax=300, dust_model=0, DIR_TMP='./templates/', f_label=False, f_bbbox=False, verbose=False, f_silence=True, \
f_fill=False, f_fancyplot=False, f_Alog=True, dpi=300, f_plot_filter=True):
'''
Parameters
----------
MB.SNlim : float
SN limit to show flux or up lim in SED.
f_chind : bool
If include non-detection in chi2 calculation, using Sawicki12.
mmax : int
Number of mcmc realization for plot. Not for calculation.
f_fancy : bool
plot each SED component.
f_fill : bool
if True, and so is f_fancy, fill each SED component.
Returns
-------
plots
'''
from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid1.inset_locator import mark_inset
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from scipy.optimize import curve_fit
from scipy import asarray as ar,exp
import matplotlib
import scipy.integrate as integrate
import scipy.special as special
import os.path
from astropy.io import ascii
import time
if f_silence:
import matplotlib
matplotlib.use("Agg")
def gaus(x,a,x0,sigma):
return a*exp(-(x-x0)**2/(2*sigma**2))
lcb = '#4682b4' # line color, blue
fnc = MB.fnc
bfnc = MB.bfnc
ID = MB.ID
Z = MB.Zall
age = MB.age
nage = MB.nage
tau0 = MB.tau0
NUM_COLORS = len(age)
cm = plt.get_cmap('gist_rainbow')
col = [cm(1 - 1.*i/NUM_COLORS) for i in range(NUM_COLORS)]
nstep_plot = 1
if MB.f_bpass:
nstep_plot = 30
SNlim = MB.SNlim
################
# RF colors.
home = os.path.expanduser('~')
c = MB.c
chimax = 1.
m0set = MB.m0set
Mpc_cm = MB.Mpc_cm
d = MB.d * scale
##################
# Fitting Results
##################
DIR_FILT = MB.DIR_FILT
SFILT = MB.filts
try:
f_err = MB.ferr
except:
f_err = 0
###########################
# Open result file
###########################
file = MB.DIR_OUT + 'summary_' + ID + '.fits'
hdul = fits.open(file)
ndim_eff = hdul[0].header['NDIM']
vals = {}
# Redshift MC
zp16 = hdul[1].data['zmc'][0]
zp50 = hdul[1].data['zmc'][1]
zp84 = hdul[1].data['zmc'][2]
vals['zmc'] = zp50
# Stellar mass MC
M16 = hdul[1].data['ms'][0]
M50 = hdul[1].data['ms'][1]
M84 = hdul[1].data['ms'][2]
if verbose:
print('Total stellar mass is %.2e'%(M50))
# Amplitude MC
A50 = np.zeros(len(age), dtype='float')
A16 = np.zeros(len(age), dtype='float')
A84 = np.zeros(len(age), dtype='float')
for aa in range(len(age)):
A16[aa] = 10**hdul[1].data['A'+str(aa)][0]
A50[aa] = 10**hdul[1].data['A'+str(aa)][1]
A84[aa] = 10**hdul[1].data['A'+str(aa)][2]
vals['A'+str(aa)] = np.log10(A50[aa])
Asum = np.sum(A50)
# TAU MC
# AGE MC
TAU50 = np.zeros(len(age), dtype='float')
TAU16 = np.zeros(len(age), dtype='float')
TAU84 = np.zeros(len(age), dtype='float')
AGE50 = np.zeros(len(age), dtype='float')
AGE16 = np.zeros(len(age), dtype='float')
AGE84 = np.zeros(len(age), dtype='float')
for aa in range(len(age)):
TAU16[aa] = 10**hdul[1].data['TAU'+str(aa)][0]
TAU50[aa] = 10**hdul[1].data['TAU'+str(aa)][1]
TAU84[aa] = 10**hdul[1].data['TAU'+str(aa)][2]
AGE16[aa] = 10**hdul[1].data['AGE'+str(aa)][0]
AGE50[aa] = 10**hdul[1].data['AGE'+str(aa)][1]
AGE84[aa] = 10**hdul[1].data['AGE'+str(aa)][2]
vals['TAU'+str(aa)] = np.log10(TAU50[aa])
vals['AGE'+str(aa)] = np.log10(AGE50[aa])
aa = 0
Av16 = hdul[1].data['Av'+str(aa)][0]
Av50 = hdul[1].data['Av'+str(aa)][1]
Av84 = hdul[1].data['Av'+str(aa)][2]
AAv = [Av50]
vals['Av'] = Av50
Z50 = np.zeros(len(age), dtype='float')
Z16 = np.zeros(len(age), dtype='float')
Z84 = np.zeros(len(age), dtype='float')
#NZbest = np.zeros(len(age), dtype='int')
for aa in range(len(age)):
Z16[aa] = hdul[1].data['Z'+str(aa)][0]
Z50[aa] = hdul[1].data['Z'+str(aa)][1]
Z84[aa] = hdul[1].data['Z'+str(aa)][2]
#NZbest[aa]= bfnc.Z2NZ(Z50[aa])
vals['Z'+str(aa)] = Z50[aa]
# Light weighted Z.
ZZ50 = np.sum(Z50*A50)/np.sum(A50)
# FIR Dust;
try:
MD16 = hdul[1].data['MDUST'][0]
MD50 = hdul[1].data['MDUST'][1]
MD84 = hdul[1].data['MDUST'][2]
TD16 = hdul[1].data['TDUST'][0]
TD50 = hdul[1].data['TDUST'][1]
TD84 = hdul[1].data['TDUST'][2]
nTD16 = hdul[1].data['nTDUST'][0]
nTD50 = hdul[1].data['nTDUST'][1]
nTD84 = hdul[1].data['nTDUST'][2]
DFILT = inputs['FIR_FILTER'] # filter band string.
DFILT = [x.strip() for x in DFILT.split(',')]
DFWFILT = fil_fwhm(DFILT, DIR_FILT)
if verbose:
print('Total dust mass is %.2e'%(MD50))
f_dust = True
except:
f_dust = False
chi = hdul[1].data['chi'][0]
chin = hdul[1].data['chi'][1]
fitc = chin
Cz0 = hdul[0].header['Cz0']
Cz1 = hdul[0].header['Cz1']
zbes = zp50
zscl = (1.+zbes)
###############################
# Data taken from
###############################
if MB.f_dust:
MB.dict = MB.read_data(Cz0, Cz1, zbes, add_fir=True)
else:
MB.dict = MB.read_data(Cz0, Cz1, zbes)
NR = MB.dict['NR']
x = MB.dict['x']
fy = MB.dict['fy']
ey = MB.dict['ey']
con0 = (NR<1000)
xg0 = x[con0]
fg0 = fy[con0] #* Cz0
eg0 = ey[con0] #* Cz0
con1 = (NR>=1000) & (NR<10000) #& (fy/ey>SNlim)
xg1 = x[con1]
fg1 = fy[con1] #* Cz1
eg1 = ey[con1] #* Cz1
if len(xg0)>0 or len(xg1)>0:
f_grsm = True
else:
f_grsm = False
# Weight is set to zero for those no data (ey<0).
wht = fy * 0
con_wht = (ey>0)
wht[con_wht] = 1./np.square(ey[con_wht])
# BB data points;
NRbb = MB.dict['NRbb'] #dat[:, 0]
xbb = MB.dict['xbb'] #dat[:, 1]
fybb = MB.dict['fybb'] #dat[:, 2]
eybb = MB.dict['eybb'] #dat[:, 3]
exbb = MB.dict['exbb'] #dat[:, 4]
snbb = fybb/eybb
######################
# Weight by line
######################
wh0 = 1./np.square(eg0)
LW0 = []
model = fg0
wht3 = check_line_man(fy, x, wht, fy, zbes, LW0)
######################
# Mass-to-Light ratio.
######################
af = MB.af
sedpar = af['ML']
try:
isochrone = af['isochrone']
LIBRARY = af['library']
except:
isochrone = ''
LIBRARY = ''
#############
# Plot.
#############
# Set the inset.
if f_grsm or f_dust:
fig = plt.figure(figsize=(7.,3.2))
fig.subplots_adjust(top=0.98, bottom=0.16, left=0.1, right=0.99, hspace=0.15, wspace=0.25)
ax1 = fig.add_subplot(111)
xsize = 0.29
ysize = 0.25
if f_grsm:
ax2t = ax1.inset_axes((1-xsize-0.01,1-ysize-0.01,xsize,ysize))
if f_dust:
ax3t = ax1.inset_axes((0.7,.35,.28,.25))
else:
fig = plt.figure(figsize=(5.5,2.2))
fig.subplots_adjust(top=0.98, bottom=0.16, left=0.1, right=0.99, hspace=0.15, wspace=0.25)
ax1 = fig.add_subplot(111)
#######################################
# D.Kelson like Box for BB photometry
#######################################
#col_dat = 'darkgreen'
#col_dat = 'tomato'
col_dat = 'r'
if f_bbbox:
for ii in range(len(xbb)):
if eybb[ii]<100 and fybb[ii]/eybb[ii]>1:
xx = [xbb[ii]-exbb[ii],xbb[ii]-exbb[ii]]
yy = [(fybb[ii]-eybb[ii])*c/np.square(xbb[ii])/d, (fybb[ii]+eybb[ii])*c/np.square(xbb[ii])/d]
ax1.plot(xx, yy, color='k', linestyle='-', linewidth=0.5, zorder=3)
xx = [xbb[ii]+exbb[ii],xbb[ii]+exbb[ii]]
yy = [(fybb[ii]-eybb[ii])*c/np.square(xbb[ii])/d, (fybb[ii]+eybb[ii])*c/np.square(xbb[ii])/d]
ax1.plot(xx, yy, color='k', linestyle='-', linewidth=0.5, zorder=3)
xx = [xbb[ii]-exbb[ii],xbb[ii]+exbb[ii]]
yy = [(fybb[ii]-eybb[ii])*c/np.square(xbb[ii])/d, (fybb[ii]-eybb[ii])*c/np.square(xbb[ii])/d]
ax1.plot(xx, yy, color='k', linestyle='-', linewidth=0.5, zorder=3)
xx = [xbb[ii]-exbb[ii],xbb[ii]+exbb[ii]]
yy = [(fybb[ii]+eybb[ii])*c/np.square(xbb[ii])/d, (fybb[ii]+eybb[ii])*c/np.square(xbb[ii])/d]
ax1.plot(xx, yy, color='k', linestyle='-', linewidth=0.5, zorder=3)
else: # Normal BB plot;
# Detection;
conbb_hs = (fybb/eybb>SNlim)
ax1.errorbar(xbb[conbb_hs], fybb[conbb_hs] * c / np.square(xbb[conbb_hs]) / d, \
yerr=eybb[conbb_hs]*c/np.square(xbb[conbb_hs])/d, color='k', linestyle='', linewidth=0.5, zorder=4)
ax1.plot(xbb[conbb_hs], fybb[conbb_hs] * c / np.square(xbb[conbb_hs]) / d, \
marker='.', color=col_dat, linestyle='', linewidth=0, zorder=4, ms=8)#, label='Obs.(BB)')
try:
# For any data removed fron fit (i.e. IRAC excess):
data_ex = ascii.read(DIR_TMP + 'bb_obs_' + ID + '_removed.cat')
NR_ex = data_ex['col1']
except:
NR_ex = []
# Upperlim;
sigma = 1.0
leng = np.max(fybb[conbb_hs] * c / np.square(xbb[conbb_hs]) / d) * 0.05 #0.2
conebb_ls = (fybb/eybb<=SNlim) & (eybb>0)
for ii in range(len(xbb)):
if NR[ii] in NR_ex[:]:
conebb_ls[ii] = False
ax1.errorbar(xbb[conebb_ls], eybb[conebb_ls] * c / np.square(xbb[conebb_ls]) / d * sigma, yerr=leng,\
uplims=eybb[conebb_ls] * c / np.square(xbb[conebb_ls]) / d * sigma, linestyle='',color=col_dat, marker='', ms=4, label='', zorder=4, capsize=3)
# For any data removed fron fit (i.e. IRAC excess):
f_exclude = False
try:
col_ex = 'lawngreen'
#col_ex = 'limegreen'
#col_ex = 'r'
# Currently, this file is made after FILTER_SKIP;
data_ex = ascii.read(DIR_TMP + 'bb_obs_' + ID + '_removed.cat')
x_ex = data_ex['col2']
fy_ex = data_ex['col3']
ey_ex = data_ex['col4']
ex_ex = data_ex['col5']
ax1.errorbar(x_ex, fy_ex * c / np.square(x_ex) / d, \
xerr=ex_ex, yerr=ey_ex*c/np.square(x_ex)/d, color='k', linestyle='', linewidth=0.5, zorder=5)
ax1.scatter(x_ex, fy_ex * c / np.square(x_ex) / d, marker='s', color=col_ex, edgecolor='k', zorder=5, s=30)
f_exclude = True
except:
pass
#####################################
# Open ascii file and stock to array.
MB.lib = fnc.open_spec_fits(fall=0)
MB.lib_all = fnc.open_spec_fits(fall=1)
if f_dust:
DT0 = float(inputs['TDUST_LOW'])
DT1 = float(inputs['TDUST_HIG'])
dDT = float(inputs['TDUST_DEL'])
Temp = np.arange(DT0,DT1,dDT)
MB.lib_dust = fnc.open_spec_dust_fits(fall=0)
MB.lib_dust_all = fnc.open_spec_dust_fits(fall=1)
# FIR dust plot;
if f_dust:
from lmfit import Parameters
par = Parameters()
par.add('MDUST',value=MD50)
par.add('TDUST',value=nTD50)
par.add('zmc',value=zp50)
y0d, x0d = fnc.tmp04_dust(par.valuesdict())#, zbes, lib_dust_all)
y0d_cut, x0d_cut = fnc.tmp04_dust(par.valuesdict())#, zbes, lib_dust)
# data;
dat_d = ascii.read(MB.DIR_TMP + 'bb_dust_obs_' + MB.ID + '.cat')
NRbbd = dat_d['col1']
xbbd = dat_d['col2']
fybbd = dat_d['col3']
eybbd = dat_d['col4']
exbbd = dat_d['col5']
snbbd = fybbd/eybbd
try:
conbbd_hs = (fybbd/eybbd>SNlim)
ax1.errorbar(xbbd[conbbd_hs], fybbd[conbbd_hs] * c / np.square(xbbd[conbbd_hs]) / d, \
yerr=eybbd[conbbd_hs]*c/np.square(xbbd[conbbd_hs])/d, color='k', linestyle='', linewidth=0.5, zorder=4)
ax1.plot(xbbd[conbbd_hs], fybbd[conbbd_hs] * c / np.square(xbbd[conbbd_hs]) / d, \
'.r', linestyle='', linewidth=0, zorder=4)#, label='Obs.(BB)')
ax3t.plot(xbbd[conbbd_hs], fybbd[conbbd_hs] * c / np.square(xbbd[conbbd_hs]) / d, \
'.r', linestyle='', linewidth=0, zorder=4)#, label='Obs.(BB)')
except:
pass
try:
conebbd_ls = (fybbd/eybbd<=SNlim)
ax1.errorbar(xbbd[conebbd_ls], eybbd[conebbd_ls] * c / np.square(xbbd[conebbd_ls]) / d, \
yerr=fybbd[conebbd_ls]*0+np.max(fybbd[conebbd_ls]*c/np.square(xbbd[conebbd_ls])/d)*0.05, \
uplims=eybbd[conebbd_ls]*c/np.square(xbbd[conebbd_ls])/d, color='r', linestyle='', linewidth=0.5, zorder=4)
ax3t.errorbar(xbbd[conebbd_ls], eybbd[conebbd_ls] * c / np.square(xbbd[conebbd_ls]) / d, \
yerr=fybbd[conebbd_ls]*0+np.max(fybbd[conebbd_ls]*c/np.square(xbbd[conebbd_ls])/d)*0.05, \
uplims=eybbd[conebbd_ls]*c/np.square(xbbd[conebbd_ls])/d, color='r', linestyle='', linewidth=0.5, zorder=4)
except:
pass
#
# This is for UVJ color time evolution.
#
Asum = np.sum(A50[:])
alp = .5
# Get total templates
y0p, x0p = MB.fnc.tmp04(vals, f_val=False, check_bound=False)
y0, x0 = MB.fnc.tmp04(vals, f_val=False, check_bound=False, lib_all=True)
ysum = y0
#f_50_comp = np.zeros((len(age),len(y0)),'float')
f_50_comp = y0[:] * c / np.square(x0) / d
ysump = y0p
nopt = len(ysump)
if f_dust:
ysump[:] += y0d_cut[:nopt]
ysump = np.append(ysump,y0d_cut[nopt:])
f_50_comp_dust = y0d * c / np.square(x0d) / d
# Plot each best fit:
vals_each = vals.copy()
for aa in range(len(age)):
vals_each['A%d'%aa] = -99
for aa in range(len(age)):
vals_each['A%d'%aa] = vals['A%d'%aa]
y0tmp, x0tmp = MB.fnc.tmp04(vals_each, f_val=False, check_bound=False, lib_all=True)
if aa == 0:
y0keep = y0tmp
else:
y0keep += y0tmp
ax1.plot(x0tmp, y0tmp * c / np.square(x0tmp) / d, linestyle='--', lw=0.5, color=col[aa])
vals_each['A%d'%aa] = 0
# Plot best fit;
ax1.plot(x0, f_50_comp, linestyle='-', lw=0.5, color='k')
#############
# Main result
#############
conbb_ymax = (xbb>0) & (fybb>0) & (eybb>0) & (fybb/eybb>1) # (conbb) &
ymax = np.max(fybb[conbb_ymax]*c/np.square(xbb[conbb_ymax])/d) * 1.6
xboxl = 17000
xboxu = 28000
x1max = 22000
if x1max < np.max(xbb[conbb_ymax]):
x1max = np.max(xbb[conbb_ymax]) * 1.5
ax1.set_xlim(2000, 11000)
ax1.set_xscale('log')
if f_plot_filter:
scl_yaxis = 0.2
else:
scl_yaxis = 0.1
ax1.set_ylim(-ymax*scl_yaxis,ymax)
ax1.text(2100,-ymax*0.08,'SNlimit:%.1f'%(SNlim),fontsize=8)
ax1.set_xlabel('Observed wavelength ($\mathrm{\mu m}$)', fontsize=12)
ax1.set_ylabel('Flux ($10^{%d}\mathrm{erg}/\mathrm{s}/\mathrm{cm}^{2}/\mathrm{\AA}$)'%(np.log10(scale)),fontsize=12,labelpad=-2)
xticks = [2500, 5000, 10000, 20000, 40000, 80000, 110000]
xlabels= ['0.25', '0.5', '1', '2', '4', '8', '']
if f_dust:
xticks = [2500, 5000, 10000, 20000, 40000, 80000, 400000]
xlabels= ['0.25', '0.5', '1', '2', '4', '8', '']
ax1.set_xticks(xticks)
ax1.set_xticklabels(xlabels)
dely1 = 0.5
while (ymax-0)/dely1<1:
dely1 /= 2.
while (ymax-0)/dely1>4:
dely1 *= 2.
y1ticks = np.arange(0, ymax, dely1)
ax1.set_yticks(y1ticks)
ax1.set_yticklabels(np.arange(0, ymax, dely1), minor=False)
ax1.yaxis.set_major_formatter(FormatStrFormatter('%.1f'))
ax1.yaxis.labelpad = 1.5
xx = np.arange(1200,400000)
yy = xx * 0
ax1.plot(xx, yy, ls='--', lw=0.5, color='k')
#############
# Plot
#############
ms = np.zeros(len(age), dtype='float')
af = MB.af
sedpar = af['ML']
eAAl = np.zeros(len(age),dtype='float')
eAAu = np.zeros(len(age),dtype='float')
eAMl = np.zeros(len(age),dtype='float')
eAMu = np.zeros(len(age),dtype='float')
MSsum = np.sum(ms)
Asum = np.sum(A50)
A50 /= Asum
A16 /= Asum
A84 /= Asum
AM50 = A50 * M50 * ms / MSsum
CM = M50/np.sum(AM50)
AM50 = A50 * M50 * ms / MSsum * CM
AM16 = A16 * M50 * ms / MSsum * CM
AM84 = A84 * M50 * ms / MSsum * CM
AC50 = A50 * 0 # Cumulative
for ii in range(len(A50)):
eAAl[ii] = A50[ii] - A16[ii]
eAAu[ii] = A84[ii] - A50[ii]
eAMl[ii] = AM50[ii] - AM16[ii]
eAMu[ii] = AM84[ii] - AM50[ii]
AC50[ii] = np.sum(AM50[ii:])
################
# Lines
################
LN = ['Mg2', 'Ne5', 'O2', 'Htheta', 'Heta', 'Ne3', 'Hdelta', 'Hgamma', 'Hbeta', 'O3', 'O3', 'Mgb', 'Halpha', 'S2L', 'S2H']
FLW = np.zeros(len(LN),dtype='int')
####################
# For cosmology
####################
DL = MB.cosmo.luminosity_distance(zbes).value * Mpc_cm
Cons = (4.*np.pi*DL**2/(1.+zbes))
if f_grsm:
print('This function (write_lines) needs to be revised.')
write_lines(ID, zbes, DIR_OUT=MB.DIR_OUT)
##########################
# Zoom in Line regions
##########################
if f_grsm:
conspec = (NR<10000) #& (fy/ey>1)
#ax2t.fill_between(xg1, (fg1-eg1) * c/np.square(xg1)/d, (fg1+eg1) * c/np.square(xg1)/d, lw=0, color='#DF4E00', zorder=10, alpha=0.7, label='')
#ax2t.fill_between(xg0, (fg0-eg0) * c/np.square(xg0)/d, (fg0+eg0) * c/np.square(xg0)/d, lw=0, color='royalblue', zorder=10, alpha=0.2, label='')
ax2t.errorbar(xg1, fg1 * c/np.square(xg1)/d, yerr=eg1 * c/np.square(xg1)/d, lw=0.5, color='#DF4E00', zorder=10, alpha=1., label='', capsize=0)
ax2t.errorbar(xg0, fg0 * c/np.square(xg0)/d, yerr=eg0 * c/np.square(xg0)/d, lw=0.5, linestyle='', color='royalblue', zorder=10, alpha=1., label='', capsize=0)
xgrism = np.concatenate([xg0,xg1])
fgrism = np.concatenate([fg0,fg1])
egrism = np.concatenate([eg0,eg1])
con4000b = (xgrism/zscl>3400) & (xgrism/zscl<3800) & (fgrism>0) & (egrism>0)
con4000r = (xgrism/zscl>4200) & (xgrism/zscl<5000) & (fgrism>0) & (egrism>0)
print('Median SN at 3400-3800 is;', np.median((fgrism/egrism)[con4000b]))
print('Median SN at 4200-5000 is;', np.median((fgrism/egrism)[con4000r]))
#
# From MCMC chain
#
file = MB.DIR_OUT + 'chain_' + ID + '_corner.cpkl'
niter = 0
data = loadcpkl(file)
ndim = data['ndim']
burnin = data['burnin']
nmc = data['niter']
nwalk = data['nwalkers']
Nburn = burnin
res = data['chain'][:]
samples = res
# Saved template;
ytmp = np.zeros((mmax,len(ysum)), dtype='float')
ytmp_each = np.zeros((mmax,len(ysum),len(age)), dtype='float')
ytmpmax = np.zeros(len(ysum), dtype='float')
ytmpmin = np.zeros(len(ysum), dtype='float')
# MUV;
DL = MB.cosmo.luminosity_distance(zbes).value * Mpc_cm # Luminositydistance in cm
DL10 = Mpc_cm/1e6 * 10 # 10pc in cm
Fuv = np.zeros(mmax, dtype='float') # For Muv
Fuv28 = np.zeros(mmax, dtype='float') # For Fuv(1500-2800)
Lir = np.zeros(mmax, dtype='float') # For L(8-1000um)
UVJ = np.zeros((mmax,4), dtype='float') # For UVJ color;
Cmznu = 10**((48.6+m0set)/(-2.5)) # Conversion from m0_25 to fnu
# From random chain;
alp=0.02
for kk in range(0,mmax,1):
nr = np.random.randint(Nburn, len(samples['A%d'%MB.aamin[0]]))
try:
Av_tmp = samples['Av'][nr]
except:
Av_tmp = MB.AVFIX
vals['Av'] = Av_tmp
try:
zmc = samples['zmc'][nr]
except:
zmc = zbes
vals['zmc'] = zmc
for ss in MB.aamin:
try:
AA_tmp = 10**samples['A'+str(ss)][nr]
except:
AA_tmp = 0
vals['A%d'%ss] = np.log10(AA_tmp)
if ss == 0 or MB.ZEVOL:
try:
ZZtmp = samples['Z%d'%ss][nr]
except:
ZZtmp = MB.ZFIX
vals['Z%d'%ss] = ZZtmp
mod0_tmp, xm_tmp = fnc.tmp04(vals, f_val=False, check_bound=False, lib_all=True)
fm_tmp = mod0_tmp
if False:
# Each;
ytmp_each[kk,:,ss] = mod0_tmp[:] * c / np.square(xm_tmp[:]) / d
#if kk == 100:
# ax1.plot(xm_tmp[:], ytmp_each[kk,:,ss], color=col[ss], linestyle='--')
#
# Dust component;
#
if f_dust:
if kk == 0:
par = Parameters()
par.add('MDUST',value=samples['MDUST'][nr])
try:
par.add('TDUST',value=samples['TDUST'][nr])
except:
par.add('TDUST',value=0)
par['MDUST'].value = samples['MDUST'][nr]
try:
par['TDUST'].value = samples['TDUST'][nr]
except:
par['TDUST'].value = 0
model_dust, x1_dust = fnc.tmp04_dust(par.valuesdict())#, zbes, lib_dust_all)
if kk == 0:
deldt = (x1_dust[1] - x1_dust[0])
x1_tot = np.append(xm_tmp,np.arange(np.max(xm_tmp),np.max(x1_dust),deldt))
# Redefine??
ytmp = np.zeros((mmax,len(x1_tot)), dtype='float')
ytmp_dust = np.zeros((mmax,len(x1_dust)), dtype='float')
ytmp_dust[kk,:] = model_dust * c/np.square(x1_dust)/d
model_tot = np.interp(x1_tot,xm_tmp,fm_tmp) + np.interp(x1_tot,x1_dust,model_dust)
ytmp[kk,:] = model_tot[:] * c/np.square(x1_tot[:])/d
else:
x1_tot = xm_tmp
ytmp[kk,:] = fm_tmp[:] * c / np.square(xm_tmp[:]) / d
# plot random sed;
plot_mc = True
if plot_mc:
ax1.plot(x1_tot, ytmp[kk,:], '-', lw=1, color='gray', zorder=-2, alpha=0.02)
# Grism plot + Fuv flux + LIR.
if f_grsm:
ax2t.plot(x1_tot, ytmp[kk,:], '-', lw=0.5, color='gray', zorder=3., alpha=0.02)
if True:
# Get FUV flux;
Fuv[kk] = get_Fuv(x1_tot[:]/(1.+zbes), (ytmp[kk,:]/(c/np.square(x1_tot)/d)) * (DL**2/(1.+zbes)) / (DL10**2), lmin=1250, lmax=1650)
Fuv28[kk] = get_Fuv(x1_tot[:]/(1.+zbes), (ytmp[kk,:]/(c/np.square(x1_tot)/d)) * (4*np.pi*DL**2/(1.+zbes))*Cmznu, lmin=1500, lmax=2800)
Lir[kk] = 0
# Get UVJ Color;
lmconv,fconv = filconv_fast(MB.filts_rf, MB.band_rf, x1_tot[:]/(1.+zbes), (ytmp[kk,:]/(c/np.square(x1_tot)/d)))
UVJ[kk,0] = -2.5*np.log10(fconv[0]/fconv[2])
UVJ[kk,1] = -2.5*np.log10(fconv[1]/fconv[2])
UVJ[kk,2] = -2.5*np.log10(fconv[2]/fconv[3])
UVJ[kk,3] = -2.5*np.log10(fconv[4]/fconv[3])
# Do stuff...
time.sleep(0.01)
# Update Progress Bar
printProgressBar(kk, mmax, prefix = 'Progress:', suffix = 'Complete', length = 40)
print('')
#
# Plot Median SED;
#
ytmp16 = np.percentile(ytmp[:,:],16,axis=0)
ytmp50 = np.percentile(ytmp[:,:],50,axis=0)
ytmp84 = np.percentile(ytmp[:,:],84,axis=0)
if f_dust:
ytmp_dust50 = np.percentile(ytmp_dust[:,:],50, axis=0)
#if not f_fill:
ax1.fill_between(x1_tot[::nstep_plot], ytmp16[::nstep_plot], ytmp84[::nstep_plot], ls='-', lw=.5, color='gray', zorder=-2, alpha=0.5)
ax1.plot(x1_tot[::nstep_plot], ytmp50[::nstep_plot], '-', lw=.5, color='gray', zorder=-1, alpha=1.)
# Attach the data point in MB;
MB.sed_wave_obs = xbb
MB.sed_flux_obs = fybb * c / np.square(xbb) / d
MB.sed_eflux_obs = eybb * c / np.square(xbb) / d
# Attach the best SED to MB;
MB.sed_wave = x1_tot
MB.sed_flux16 = ytmp16
MB.sed_flux50 = ytmp50
MB.sed_flux84 = ytmp84
#########################
# Calculate non-det chi2
# based on Sawick12
#########################
#chi2,fin_chi2 = get_chi2(fy, ey, wht3, ysump, ndim_eff, SNlim=1.0, f_chind=f_chind, f_exclude=f_exclude, xbb=xbb, x_ex=x_ex)
def func_tmp(xint,eobs,fmodel):
int_tmp = np.exp(-0.5 * ((xint-fmodel)/eobs)**2)
return int_tmp
if f_chind:
conw = (wht3>0) & (ey>0) & (fy/ey>SNlim)
else:
conw = (wht3>0) & (ey>0)
chi2 = sum((np.square(fy-ysump) * np.sqrt(wht3))[conw])
chi_nd = 0.0
if f_chind:
f_ex = np.zeros(len(fy), 'int')
for ii in range(len(fy)):
if f_exclude:
if xbb[ii] in x_ex:
f_ex[ii] = 1
con_up = (ey>0) & (fy/ey<=SNlim) & (f_ex == 0)
from scipy import special
x_erf = (ey[con_up] - ysump[con_up]) / (np.sqrt(2) * ey[con_up])
f_erf = special.erf(x_erf)
chi_nd = np.sum( np.log(np.sqrt(np.pi / 2) * ey[con_up] * (1 + f_erf)) )
# Number of degree;
con_nod = (wht3>0) & (ey>0) #& (fy/ey>SNlim)
if MB.ferr:
ndim_eff -= 1
nod = int(len(wht3[con_nod])-ndim_eff)
if nod>0:
fin_chi2 = (chi2 - 2 * chi_nd) / nod
else:
fin_chi2 = -99
if f_chind:
conw = (wht3>0) & (ey>0) & (fy/ey>SNlim)
con_up = (ey>0) & (fy/ey<=SNlim) & (f_ex == 0)
else:
conw = (wht3>0) & (ey>0)
# Print results;
print('\n')
print('No-of-detection : %d'%(len(wht3[conw])))
print('chi2 : %.2f'%(chi2))
if f_chind:
print('No-of-non-detection: %d'%(len(ey[con_up])))
print('chi2 for non-det : %.2f'%(- 2 * chi_nd))
print('No-of-params : %d'%(ndim_eff))
print('Degrees-of-freedom : %d'%(nod))
print('Final chi2/nu : %.2f'%(fin_chi2))
if False:
from lmfit import Model, Parameters, minimize, fit_report, Minimizer
from .posterior_flexible import Post
class_post = Post(MB)
residual = class_post.residual
MB.set_param()
fit_params = MB.fit_params #Parameters()
for key in vals.keys():
try:
fit_params[key].value=vals[key]
except:
pass
out_tmp = minimize(residual, fit_params, args=(fy, ey, wht3, False), method='differential_evolution') # nelder is the most efficient.
csq = out_tmp.chisqr
rcsq = out_tmp.redchi
print(csq, rcsq)
#
# plot BB model from best template (blue squares)
#
col_dia = 'blue'
if f_dust:
ALLFILT = np.append(SFILT,DFILT)
#for ii in range(len(x1_tot)):
# print(x1_tot[ii], model_tot[ii]*c/np.square(x1_tot[ii])/d)
lbb, fbb, lfwhm = filconv(ALLFILT, x1_tot, ytmp50, DIR_FILT, fw=True)
lbb, fbb16, lfwhm = filconv(ALLFILT, x1_tot, ytmp16, DIR_FILT, fw=True)
lbb, fbb84, lfwhm = filconv(ALLFILT, x1_tot, ytmp84, DIR_FILT, fw=True)
ax1.plot(x1_tot, ytmp50, '--', lw=0.5, color='purple', zorder=-1, label='')
ax3t.plot(x1_tot, ytmp50, '--', lw=0.5, color='purple', zorder=-1, label='')
iix = []
for ii in range(len(fbb)):
iix.append(ii)
con_sed = ()
ax1.scatter(lbb[iix][con_sed], fbb[iix][con_sed], lw=0.5, color='none', edgecolor=col_dia, zorder=3, alpha=1.0, marker='d', s=50)
# plot FIR range;
ax3t.scatter(lbb, fbb, lw=0.5, color='none', edgecolor=col_dia, \
zorder=2, alpha=1.0, marker='d', s=50)
else:
lbb, fbb, lfwhm = filconv(SFILT, x1_tot, ytmp50, DIR_FILT, fw=True, MB=MB, f_regist=False)
lbb, fbb16, lfwhm = filconv(SFILT, x1_tot, ytmp16, DIR_FILT, fw=True, MB=MB, f_regist=False)
lbb, fbb84, lfwhm = filconv(SFILT, x1_tot, ytmp84, DIR_FILT, fw=True, MB=MB, f_regist=False)
iix = []
for ii in range(len(fbb)):
iix.append(np.argmin(np.abs(lbb[ii]-xbb[:])))
con_sed = (eybb>0)
ax1.scatter(lbb[iix][con_sed], fbb[iix][con_sed], lw=0.5, color='none', edgecolor=col_dia, zorder=3, alpha=1.0, marker='d', s=50)
# Calculate EW, if there is excess band;
try:
iix2 = []
for ii in range(len(fy_ex)):
iix2.append(np.argmin(np.abs(lbb[:]-x_ex[ii])))
# Rest-frame EW;
# Note about 16/84 in fbb
EW16 = (fy_ex * c / np.square(x_ex) / d - fbb84[iix2]) / (fbb[iix2]) * lfwhm[iix2] / (1.+zbes)
EW50 = (fy_ex * c / np.square(x_ex) / d - fbb[iix2]) / (fbb[iix2]) * lfwhm[iix2] / (1.+zbes)
EW84 = (fy_ex * c / np.square(x_ex) / d - fbb16[iix2]) / (fbb[iix2]) * lfwhm[iix2] / (1.+zbes)
EW50_er1 = ((fy_ex-ey_ex) * c / np.square(x_ex) / d - fbb[iix2]) / (fbb[iix2]) * lfwhm[iix2] / (1.+zbes)
EW50_er2 = ((fy_ex+ey_ex) * c / np.square(x_ex) / d - fbb[iix2]) / (fbb[iix2]) * lfwhm[iix2] / (1.+zbes)
cnt50 = fbb[iix2]
cnt16 = fbb16[iix2]
cnt84 = fbb84[iix2]
# Luminosity;
#Lsun = 3.839 * 1e33 #erg s-1
L16 = EW16 * cnt16 * (4.*np.pi*DL**2) * scale * (1+zbes) # A * erg/s/A/cm2 * cm2
L50 = EW50 * cnt50 * (4.*np.pi*DL**2) * scale * (1+zbes) # A * erg/s/A/cm2 * cm2
L84 = EW84 * cnt84 * (4.*np.pi*DL**2) * scale * (1+zbes) # A * erg/s/A/cm2 * cm2
ew_label = []
for ii in range(len(fy_ex)):
lres = MB.band['%s_lam'%MB.filts[iix2[ii]]][:]
fres = MB.band['%s_res'%MB.filts[iix2[ii]]][:]
ew_label.append(MB.filts[iix2[ii]])
print('\n')
print('EW016 for', x_ex[ii], 'is %d'%EW16[ii])
print('EW050 for', x_ex[ii], 'is %d'%EW50[ii])
print('EW084 for', x_ex[ii], 'is %d'%EW84[ii])
print('%d_{-%d}^{+%d} , for sed error'%(EW50[ii],EW50[ii]-EW84[ii],EW16[ii]-EW50[ii]))
print('Or, %d\pm{%d} , for flux error'%(EW50[ii],EW50[ii]-EW50_er1[ii]))
except:
pass
if save_sed:
fbb16_nu = flamtonu(lbb, fbb16*scale, m0set=25.0)
fbb_nu = flamtonu(lbb, fbb*scale, m0set=25.0)
fbb84_nu = flamtonu(lbb, fbb84*scale, m0set=25.0)
# Then save full spectrum;
col00 = []
col1 = fits.Column(name='wave_model', format='E', unit='AA', array=x1_tot)
col00.append(col1)
col2 = fits.Column(name='f_model_16', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=ytmp16[:])
col00.append(col2)
col3 = fits.Column(name='f_model_50', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=ytmp50[:])
col00.append(col3)
col4 = fits.Column(name='f_model_84', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=ytmp84[:])
col00.append(col4)
f_sed_each = False
if f_sed_each:
# Each component
# Stellar
col1 = fits.Column(name='wave_model_stel', format='E', unit='AA', array=x0)
col00.append(col1)
for aa in range(len(age)):
col1 = fits.Column(name='f_model_stel_%d'%aa, format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=f_50_comp[aa,:])
col00.append(col1)
if f_dust:
col1 = fits.Column(name='wave_model_dust', format='E', unit='AA', array=x1_dust)
col00.append(col1)
col1 = fits.Column(name='f_model_dust', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=ytmp_dust50)
col00.append(col1)
# BB for dust
if f_dust:
xbb = np.append(xbb,xbbd)
fybb = np.append(fybb,fybbd)
eybb = np.append(eybb,eybbd)
col5 = fits.Column(name='wave_obs', format='E', unit='AA', array=xbb)
col00.append(col5)
col6 = fits.Column(name='f_obs', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=fybb[:] * c / np.square(xbb[:]) / d)
col00.append(col6)
col7 = fits.Column(name='e_obs', format='E', unit='1e%derg/s/cm2/AA'%(np.log10(scale)), array=eybb[:] * c / np.square(xbb[:]) / d)
col00.append(col7)
hdr = fits.Header()
hdr['redshift'] = zbes
hdr['id'] = ID
hdr['hierarch isochrone'] = isochrone
hdr['library'] = LIBRARY
hdr['scale'] = scale
try:
# Chi square:
hdr['chi2'] = chi2
hdr['hierarch No-of-effective-data-points'] = len(wht3[conw])
hdr['hierarch No-of-nondetectioin'] = len(ey[con_up])
hdr['hierarch Chi2-of-nondetection'] = chi_nd
hdr['hierarch No-of-params'] = ndim_eff
hdr['hierarch Degree-of-freedom'] = nod
hdr['hierarch reduced-chi2'] = fin_chi2
except:
print('Chi seems to be wrong...')
pass
try:
# Muv
MUV = -2.5 * np.log10(Fuv[:]) + 25.0
hdr['MUV16'] = np.percentile(MUV[:],16)
hdr['MUV50'] = np.percentile(MUV[:],50)
hdr['MUV84'] = np.percentile(MUV[:],84)
# Fuv (!= flux of Muv)
hdr['FUV16'] = np.percentile(Fuv28[:],16)
hdr['FUV50'] = np.percentile(Fuv28[:],50)
hdr['FUV84'] = np.percentile(Fuv28[:],84)
# LIR
hdr['LIR16'] = np.percentile(Lir[:],16)
hdr['LIR50'] = np.percentile(Lir[:],50)
hdr['LIR84'] = np.percentile(Lir[:],84)
except:
pass
# UVJ
try:
hdr['uv16'] = np.percentile(UVJ[:,0],16)
hdr['uv50'] = np.percentile(UVJ[:,0],50)
hdr['uv84'] = np.percentile(UVJ[:,0],84)
hdr['bv16'] = np.percentile(UVJ[:,1],16)
hdr['bv50'] = np.percentile(UVJ[:,1],50)
hdr['bv84'] = np.percentile(UVJ[:,1],84)
hdr['vj16'] = np.percentile(UVJ[:,2],16)
hdr['vj50'] = np.percentile(UVJ[:,2],50)
hdr['vj84'] = np.percentile(UVJ[:,2],84)
hdr['zj16'] = np.percentile(UVJ[:,3],16)
hdr['zj50'] = np.percentile(UVJ[:,3],50)
hdr['zj84'] = np.percentile(UVJ[:,3],84)
except:
print('\nError when writinf UVJ colors;\n')
pass
# EW;
try:
for ii in range(len(EW50)):
hdr['EW_%s_16'%(ew_label[ii])] = EW16[ii]
hdr['EW_%s_50'%(ew_label[ii])] = EW50[ii]
hdr['EW_%s_84'%(ew_label[ii])] = EW84[ii]
hdr['EW_%s_e1'%(ew_label[ii])] = EW50_er1[ii]
hdr['EW_%s_e2'%(ew_label[ii])] = EW50_er2[ii]
hdr['HIERARCH cnt_%s_16'%(ew_label[ii])]= cnt16[ii]
hdr['HIERARCH cnt_%s_50'%(ew_label[ii])]= cnt50[ii]
hdr['HIERARCH cnt_%s_84'%(ew_label[ii])]= cnt84[ii]
hdr['L_%s_16'%(ew_label[ii])] = L16[ii]
hdr['L_%s_50'%(ew_label[ii])] = L50[ii]
hdr['L_%s_84'%(ew_label[ii])] = L84[ii]
except:
pass
# Version;
import gsf
hdr['version'] = gsf.__version__
# Write;
colspec = fits.ColDefs(col00)
hdu0 = fits.BinTableHDU.from_columns(colspec, header=hdr)
hdu0.writeto(MB.DIR_OUT + 'gsf_spec_%s.fits'%(ID), overwrite=True)
# ASDF;
tree_spec = {
'id': ID,
'redshift': '%.3f'%zbes,
'isochrone': '%s'%(isochrone),
'library': '%s'%(LIBRARY),
'scale': scale,
'version_gsf': gsf.__version__
}
# BB;
tree_spec.update({'wave': lbb})
tree_spec.update({'fnu_16': fbb16_nu})
tree_spec.update({'fnu_50': fbb_nu})
tree_spec.update({'fnu_84': fbb84_nu})
# full spectrum;
tree_spec.update({'wave_model': x1_tot})
tree_spec.update({'f_model_16': ytmp16})
tree_spec.update({'f_model_50': ytmp50})
tree_spec.update({'f_model_84': ytmp84})
# EW;
try:
for ii in range(len(EW50)):
tree_spec.update({'EW_%s_16'%(ew_label[ii]): EW16[ii]})
tree_spec.update({'EW_%s_50'%(ew_label[ii]): EW50[ii]})
tree_spec.update({'EW_%s_84'%(ew_label[ii]): EW84[ii]})
tree_spec.update({'EW_%s_e1'%(ew_label[ii]): EW50_er1[ii]})
tree_spec.update({'EW_%s_e2'%(ew_label[ii]): EW50_er2[ii]})
tree_spec.update({'cnt_%s_16'%(ew_label[ii]): cnt16[ii]})
tree_spec.update({'cnt_%s_50'%(ew_label[ii]): cnt50[ii]})
tree_spec.update({'cnt_%s_84'%(ew_label[ii]): cnt84[ii]})
tree_spec.update({'L_%s_16'%(ew_label[ii]): L16[ii]})
tree_spec.update({'L_%s_50'%(ew_label[ii]): L50[ii]})
tree_spec.update({'L_%s_84'%(ew_label[ii]): L84[ii]})
except:
pass
# Each component
# Stellar
tree_spec.update({'wave_model_stel': x0})
if f_sed_each:
for aa in range(len(age)):
tree_spec.update({'f_model_stel_%d'%aa: f_50_comp[aa,:]})
if f_dust:
# dust
tree_spec.update({'wave_model_dust': x1_dust})
tree_spec.update({'f_model_dust': ytmp_dust50})
# BB for dust
tree_spec.update({'wave_obs': xbb})
tree_spec.update({'f_obs': fybb[:] * c / np.square(xbb[:]) / d})
tree_spec.update({'e_obs': eybb[:] * c / np.square(xbb[:]) / d})
# grism:
if f_grsm:
tree_spec.update({'fg0_obs': fg0 * c/np.square(xg0)/d})
tree_spec.update({'eg0_obs': eg0 * c/np.square(xg0)/d})
tree_spec.update({'wg0_obs': xg0})
tree_spec.update({'fg1_obs': fg1 * c/np.square(xg1)/d})
tree_spec.update({'eg1_obs': eg1 * c/np.square(xg1)/d})
tree_spec.update({'wg1_obs': xg1})
af = asdf.AsdfFile(tree_spec)
af.write_to(MB.DIR_OUT + 'gsf_spec_%s.asdf'%(ID), all_array_compression='zlib')
#
# SED params in plot
#
if f_label:
fd = fits.open(MB.DIR_OUT + 'SFH_' + ID + '.fits')[0].header
if f_dust:
label = 'ID: %s\n$z_\mathrm{obs.}:%.2f$\n$\log M_*/M_\odot:%.2f$\n$\log M_\mathrm{dust}/M_\odot:%.2f$\n$\log Z_*/Z_\odot:%.2f$\n$\log T_0$/Gyr$:%.2f$\n$\log \\tau$/Gyr$:%.2f$\n$A_V$/mag$:%.2f$\n$\\chi^2/\\nu:%.2f$'\
%(ID, zbes, float(fd['Mstel_50']), MD50, float(fd['Z_MW_50']), float(fd['T_MW_50']), float(fd['TAU_50']), float(fd['AV_50']), fin_chi2)
ylabel = ymax*0.45
else:
label = 'ID: %s\n$z_\mathrm{obs.}:%.2f$\n$\log M_*/M_\odot:%.2f$\n$\log Z_*/Z_\odot:%.2f$\n$\log T_0$/Gyr$:%.2f$\n$\log \\tau$/Gyr$:%.2f$\n$A_V$/mag$:%.2f$\n$\\chi^2/\\nu:%.2f$'\
%(ID, zbes, float(fd['Mstel_50']), float(fd['Z_MW_50']), float(fd['T_MW_50']), float(fd['TAU_50']), float(fd['AV_50']), fin_chi2)
ylabel = ymax*0.32
ax1.text(2200, ylabel, label,\
fontsize=7, bbox=dict(facecolor='w', alpha=0.7), zorder=10)
#######################################
ax1.xaxis.labelpad = -3
if f_grsm:
conlim = (x0>10000) & (x0<25000)
xgmin, xgmax = np.min(x0[conlim]),np.max(x0[conlim]), #7500, 17000
ax2t.set_xlabel('')
ax2t.set_xlim(xgmin, xgmax)
conaa = (x0>xgmin-50) & (x0<xgmax+50)
ymaxzoom = np.max(ysum[conaa]*c/np.square(x0[conaa])/d) * 1.15
yminzoom = np.min(ysum[conaa]*c/np.square(x0[conaa])/d) / 1.15
ax2t.set_ylim(yminzoom, ymaxzoom)
ax2t.xaxis.labelpad = -2
if xgmax>20000:
ax2t.set_xticks([8000, 12000, 16000, 20000, 24000])
ax2t.set_xticklabels(['0.8', '1.2', '1.6', '2.0', '2.4'])
else:
ax2t.set_xticks([8000, 10000, 12000, 14000, 16000])
ax2t.set_xticklabels(['0.8', '1.0', '1.2', '1.4', '1.6'])
if f_dust:
try:
contmp = (x1_tot>10*1e4)
y3min, y3max = -.2*np.max((model_tot * c/ np.square(x1_tot) / d)[contmp]), np.max((model_tot * c/ np.square(x1_tot) / d)[contmp])*2.0
ax3t.set_ylim(y3min, y3max)
except:
if verbose:
print('y3 limit is not specified.')
pass
ax3t.set_xlim(1e5, 3e7)
ax3t.set_xscale('log')
ax3t.set_xticks([100000, 1000000, 10000000])
ax3t.set_xticklabels(['10', '100', '1000'])
###############
# Line name
###############
LN0 = ['Mg2', '$NeIV$', '[OII]', 'H$\theta$', 'H$\eta$', 'Ne3?', 'H$\delta$', 'H$\gamma$', 'H$\\beta$', 'O3', 'O3', 'Mgb', 'Halpha', 'S2L', 'S2H']
LW0 = [2800, 3347, 3727, 3799, 3836, 3869, 4102, 4341, 4861, 4959, 5007, 5175, 6563, 6717, 6731]
fsl = 9 # Fontsize for line
if f_grsm:
try:
for ii in range(len(LW)):
ll = np.argmin(np.abs(LW[ii]-LW0[:]))
if ll == 2 and FLW[ii] == 1:
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.5,yminzoom+(ymaxzoom-yminzoom)*0.65, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color=lcb, zorder=20, alpha=1., label='', capsize=0)
ax2t.text(xxl[0]-130, yyl[0]*1.28, '%s'%(LN0[ll]), color=lcb, fontsize=9, rotation=90)
elif (ll == 9 and FLW[ii] == 1):
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.5,yminzoom+(ymaxzoom-yminzoom)*0.65, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color=lcb, zorder=20, alpha=1., label='', capsize=0)
elif (ll == 10 and FLW[ii] == 1):
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.5,yminzoom+(ymaxzoom-yminzoom)*0.65, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color=lcb, zorder=20, alpha=1., label='', capsize=0)
ax2t.text(xxl[0]+40, yyl[0]*0.75, '%s'%(LN0[ll]), color=lcb, fontsize=9, rotation=90)
elif FLW[ii] == 1 and (ll == 6 or ll == 7 or ll == 8):
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.2,yminzoom+(ymaxzoom-yminzoom)*0.35, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color=lcb, zorder=20, alpha=1., label='', capsize=0)
ax2t.text(xxl[0]+40, yyl[0]*0.95, '%s'%(LN0[ll]), color=lcb, fontsize=9, rotation=90)
elif ll == 6 or ll == 7 or ll == 8:
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.2,yminzoom+(ymaxzoom-yminzoom)*0.35, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color='gray', zorder=1, alpha=1., label='', capsize=0)
ax2t.text(xxl[0]+40, yyl[0]*0.95, '%s'%(LN0[ll]), color='gray', fontsize=9, rotation=90)
elif FLW[ii] == 1:
yyl = np.arange(yminzoom+(ymaxzoom-yminzoom)*0.7,yminzoom+(ymaxzoom-yminzoom)*.95, 0.01)
xxl = yyl * 0 + LW0[ll]
ax2t.errorbar(xxl, yyl, lw=0.5, color=lcb, zorder=20, alpha=1., label='', capsize=0)
ax2t.text(xxl[0]+40, yyl[0]*1.25, '%s'%(LN0[ll]), color=lcb, fontsize=9, rotation=90)
except:
pass
# Filters
if f_plot_filter:
ax1 = plot_filter(MB, ax1, ymax, scl=scl_yaxis)
####################
## Save
####################
ax1.legend(loc=1, fontsize=11)
if figpdf:
fig.savefig(MB.DIR_OUT + 'SPEC_' + ID + '_spec.pdf', dpi=dpi)
else:
fig.savefig(MB.DIR_OUT + 'SPEC_' + ID + '_spec.png', dpi=dpi)
def plot_filter(MB, ax, ymax, scl=0.3, cmap='gist_rainbow', alp=0.4):
'''
Add filter response curve to ax1.
'''
NUM_COLORS = len(MB.filts)
cm = plt.get_cmap(cmap)
cols = [cm(1 - 1.*i/NUM_COLORS) for i in range(NUM_COLORS)]
wavecen = []
for ii,filt in enumerate(MB.filts):
wave = MB.band['%s_lam'%filt]
flux = MB.band['%s_res'%filt]
#wavecen.append(np.median(wave * flux)/np.median(flux))
con = (flux/flux.max()>0.1)
wavecen.append(np.min(wave[con]))
wavecen = np.asarray(wavecen)
wavecen_sort = np.sort(wavecen)
for ii,filt in enumerate(MB.filts):
iix = np.argmin(np.abs(wavecen_sort[:]-wavecen[ii]))
col = cols[iix]
wave = MB.band['%s_lam'%filt]
flux = MB.band['%s_res'%filt]
ax.plot(wave, ((flux / np.max(flux))*0.8 - 1) * ymax * scl, linestyle='-', color='k', lw=0.2)
ax.fill_between(wave, (wave*0 - ymax)*scl, ((flux / np.max(flux))*0.8 - 1) * ymax * scl, linestyle='-', lw=0, color=col, alpha=alp)
return ax
def plot_corner_physparam_summary(MB, fig=None, out_ind=0, DIR_OUT='./', mmax=300, TMIN=0.0001, tau_lim=0.01, f_plot_filter=True, scale=1e-19):
'''
Purpose
-------
For summary. In the same format as plot_corner_physparam_frame.
Notes
-----
Tau model not supported.
'''
col = ['violet', 'indigo', 'b', 'lightblue', 'lightgreen', 'g', 'orange', 'coral', 'r', 'darkred']#, 'k']
import matplotlib
import matplotlib.cm as cm
import scipy.stats as stats
nage = MB.nage
fnc = MB.fnc
bfnc = MB.bfnc
ID = MB.ID
Z = MB.Zall
age = MB.age
d = MB.d * scale
c = MB.c
tau0 = MB.tau0
dust_model = MB.dust_model
DIR_TMP = MB.DIR_TMP
Txmax = np.max(age) + 1.0
###########################
# Open result file
###########################
lib = fnc.open_spec_fits(fall=0)
lib_all = fnc.open_spec_fits(fall=1)
file = MB.DIR_OUT + 'summary_' + ID + '.fits'
hdul = fits.open(file) # open a FITS file
# Redshift MC
zp50 = hdul[1].data['zmc'][1]
zp16 = hdul[1].data['zmc'][0]
zp84 = hdul[1].data['zmc'][2]
M50 = hdul[1].data['ms'][1]
M16 = hdul[1].data['ms'][0]
M84 = hdul[1].data['ms'][2]
print('Total stellar mass is %.2e'%(M50))
A50 = np.zeros(len(age), dtype='float')
A16 = np.zeros(len(age), dtype='float')
A84 = np.zeros(len(age), dtype='float')
for aa in range(len(age)):
A50[aa] = hdul[1].data['A'+str(aa)][1]
A16[aa] = hdul[1].data['A'+str(aa)][0]
A84[aa] = hdul[1].data['A'+str(aa)][2]
Asum = np.sum(A50)
aa = 0
Av16 = hdul[1].data['Av'+str(aa)][0]
Av50 = hdul[1].data['Av'+str(aa)][1]
Av84 = hdul[1].data['Av'+str(aa)][2]
Z50 = np.zeros(len(age), dtype='float')
Z16 = np.zeros(len(age), dtype='float')
Z84 = np.zeros(len(age), dtype='float')
NZbest = np.zeros(len(age), dtype='int')
for aa in range(len(age)):
Z50[aa] = hdul[1].data['Z'+str(aa)][1]
Z16[aa] = hdul[1].data['Z'+str(aa)][0]
Z84[aa] = hdul[1].data['Z'+str(aa)][2]
NZbest[aa]= bfnc.Z2NZ(Z50[aa])
ZZ50 = np.sum(Z50*A50)/np.sum(A50) # Light weighted Z.
chi = hdul[1].data['chi'][0]
chin = hdul[1].data['chi'][1]
fitc = chin
Cz0 = hdul[0].header['Cz0']
Cz1 = hdul[0].header['Cz1']
zbes = hdul[0].header['z']
zscl = (1.+zbes)
# plot Configuration
if MB.fzmc == 1:
Par = ['$\log M_*/M_\odot$', '$\log T_*$/Gyr', '$A_V$/mag', '$\log Z_* / Z_\odot$', '$z$']
else:
Par = ['$\log M_*/M_\odot$', '$\log T_*$/Gyr', '$A_V$/mag', '$\log Z_* / Z_\odot$']
K = len(Par) # No of params.
factor = 2.0 # size of one side of one panel
lbdim = 0.5 * factor # size of left/bottom margin
trdim = 0.2 * factor # size of top/right margin
whspace= 0.02 # w/hspace size
plotdim= factor * K + factor * (K - 1.) * whspace
dim = lbdim + plotdim + trdim
sclfig = 0.7
# Format the figure.
fig, axes = plt.subplots(K, K, figsize=(dim*sclfig*2, dim*sclfig))
# Format the figure.
lb = lbdim / dim
tr = (lbdim + plotdim) / dim
#fig.subplots_adjust(left=lb*1.06, bottom=lb*.9, right=tr, top=tr*.99,
fig.subplots_adjust(left=0.5, bottom=lb*.9, right=tr, top=tr*.99,
wspace=whspace, hspace=whspace)
# For spec plot
ax0 = fig.add_axes([0.05,0.73,0.37,0.23])
ax1 = fig.add_axes([0.05,0.40,0.37,0.23])
ax2 = fig.add_axes([0.05,0.07,0.37,0.23])
if MB.f_dust:
MB.dict = MB.read_data(MB.Cz0, MB.Cz1, MB.zgal, add_fir=True)
else:
MB.dict = MB.read_data(MB.Cz0, MB.Cz1, MB.zgal)
# Get data points;
NRbb = MB.dict['NRbb']
xbb = MB.dict['xbb']
fybb = MB.dict['fybb']
eybb = MB.dict['eybb']
exbb = MB.dict['exbb']
snbb = fybb/eybb
# Get spec data points;
dat = np.loadtxt(DIR_TMP + 'spec_obs_' + ID + '.cat', comments='#')
NR = dat[:, 0]
x = dat[:, 1]
fy00 = dat[:, 2]
ey00 = dat[:, 3]
con0 = (NR<1000) #& (fy/ey>SNlim)
xg0 = x[con0]
fg0 = fy00[con0] * Cz0
eg0 = ey00[con0] * Cz0
con1 = (NR>=1000) & (NR<10000) #& (fy/ey>SNlim)
xg1 = x[con1]
fg1 = fy00[con1] * Cz1
eg1 = ey00[con1] * Cz1
con2 = (NR>=10000)#& (fy/ey>SNlim)
xg2 = x[con2]
fg2 = fy00[con2]
eg2 = ey00[con2]
fy01 = np.append(fg0,fg1)
ey01 = np.append(eg0,eg1)
fy = np.append(fy01,fg2)
ey = np.append(ey01,eg2)
wht = 1./np.square(ey)
# BB photometry
conspec = (NR<10000)
sigma = 1.
conbb = (fybb/eybb > sigma)
ax0.errorbar(xbb[conbb], fybb[conbb] * c / np.square(xbb[conbb]) / d, yerr=eybb[conbb]*c/np.square(xbb[conbb])/d, color='k', linestyle='', linewidth=0.5, zorder=4)
ax0.plot(xbb[conbb], fybb[conbb] * c / np.square(xbb[conbb]) / d, '.r', ms=10, linestyle='', linewidth=0, zorder=4)
conebb_ls = (fybb/eybb <= sigma)
leng = np.max(fybb[conebb_ls] * c / np.square(xbb[conebb_ls]) / d) * 0.05
ax0.errorbar(xbb[conebb_ls], eybb[conebb_ls] * c / np.square(xbb[conebb_ls]) / d * sigma, yerr=leng,\
uplims=eybb[conebb_ls] * c / np.square(xbb[conebb_ls]) / d * sigma, linestyle='',color='r', marker='', ms=4, label='', zorder=4, capsize=3)
####################
# MCMC corner plot.
####################
file = MB.DIR_OUT + 'chain_' + ID + '_corner.cpkl'
niter = 0
data = loadcpkl(file)
try:
ndim = data['ndim'] # By default, use ndim and burnin values contained in the cpkl file, if present.
burnin = data['burnin']
nmc = data['niter']
nwalk = data['nwalkers']
Nburn = burnin #*20
samples = data['chain'][:]
except:
if verbose: print(' = > NO keys of ndim and burnin found in cpkl, use input keyword values')
af = MB.af
sedpar = af['ML']
getcmap = matplotlib.cm.get_cmap('jet')
nc = np.arange(0, nmc, 1)
col = getcmap((nc-0)/(nmc-0))
#for kk in range(0,nmc,1):
Ntmp = np.zeros(mmax, dtype='float')
lmtmp= np.zeros(mmax, dtype='float')
Avtmp= np.zeros(mmax, dtype='float')
Ztmp = np.zeros(mmax, dtype='float')
Ttmp = np.zeros(mmax, dtype='float')
ACtmp= np.zeros(mmax, dtype='float')
redshifttmp = np.zeros(mmax, dtype='float')
# Time bin
Tuni = MB.cosmo.age(zbes).value
Tuni0 = (Tuni - age[:])
delT = np.zeros(len(age),dtype='float')
delTl = np.zeros(len(age),dtype='float')
delTu = np.zeros(len(age),dtype='float')
if len(age) == 1:
for aa in range(len(age)):
try:
tau_ssp = float(inputs['TAU_SSP'])
except:
tau_ssp = tau_lim
delTl[aa] = tau_ssp/2
delTu[aa] = tau_ssp/2
delT[aa] = delTu[aa] + delTl[aa]
else:
for aa in range(len(age)):
if aa == 0:
delTl[aa] = age[aa]
delTu[aa] = (age[aa+1]-age[aa])/2.
delT[aa] = delTu[aa] + delTl[aa]
elif Tuni < age[aa]:
delTl[aa] = (age[aa]-age[aa-1])/2.
delTu[aa] = 10.
delT[aa] = delTu[aa] + delTl[aa]
elif aa == len(age)-1:
delTl[aa] = (age[aa]-age[aa-1])/2.
delTu[aa] = Tuni - age[aa]
delT[aa] = delTu[aa] + delTl[aa]
else:
delTl[aa] = (age[aa]-age[aa-1])/2.
delTu[aa] = (age[aa+1]-age[aa])/2.
delT[aa] = delTu[aa] + delTl[aa]
if delT[aa] < tau_lim:
# This is because fsps has the minimum tau = tau_lim
delT[aa] = tau_lim
delT[:] *= 1e9 # Gyr to yr
delTl[:] *= 1e9 # Gyr to yr
delTu[:] *= 1e9 # Gyr to yr
files = [] # For gif animation
SFmax = 0
Tsmin = 0
Tsmax = 0
Zsmin = 0
Zsmax = 0
AMtmp = 0
AMtmp16 = 0
AMtmp84 = 0
for ii in range(len(age)):
if aa == 0 or MB.ZEVOL:
ZZ_tmp = Z50[ii]
ZZ_tmp16 = Z16[ii]
ZZ_tmp84 = Z84[ii]
else:
ZZ_tmp = Z50[0]
ZZ_tmp16 = Z16[0]
ZZ_tmp84 = Z84[0]
try:
AA_tmp = 10**np.max(samples['A'+str(ii)][:])
AA_tmp84 = 10**np.percentile(samples['A'+str(ii)][:],95)
AA_tmp16 = 10**np.percentile(samples['A'+str(ii)][:],5)
except:
AA_tmp = 0
AA_tmp84 = 0
AA_tmp16 = 0
nZtmp = bfnc.Z2NZ(ZZ_tmp)
mslist = sedpar['ML_'+str(nZtmp)][ii]
AMtmp16 += mslist*AA_tmp16
AMtmp84 += mslist*AA_tmp84
Tsmax += age[ii] * AA_tmp84 * mslist
Tsmin += age[ii] * AA_tmp16 * mslist
Zsmax += 10**ZZ_tmp84 * AA_tmp84 * mslist
Zsmin += 10**ZZ_tmp16 * AA_tmp16 * mslist
SFtmp = AA_tmp * mslist / delT[ii]
if SFtmp > SFmax:
SFmax = SFtmp
if SFmax > 0.5e4:
SFmax = 0.5e4
delM = np.log10(M84) - np.log10(M16)
if MB.fzmc == 1:
NPARmin = [np.log10(M16)-.1, np.log10(Tsmin/AMtmp16)-0.1, Av16-0.1, np.log10(Zsmin/AMtmp16)-0.2, np.percentile(samples['zmc'],1)-0.1]
NPARmax = [np.log10(M84)+.1, np.log10(Tsmax/AMtmp84)+0.2, Av84+0.1, np.log10(Zsmax/AMtmp84)+0.2, np.percentile(samples['zmc'],99)+0.1]
else:
NPARmin = [np.log10(M16)-.1, np.log10(Tsmin/AMtmp16)-0.1, Av16-0.1, np.log10(Zsmin/AMtmp16)-0.2]
NPARmax = [np.log10(M84)+.1, np.log10(Tsmax/AMtmp84)+0.2, Av84+0.1, np.log10(Zsmax/AMtmp84)+0.2]
# For redshift
if zbes<2:
zred = [zbes, 2, 3, 6]
zredl = ['$z_\mathrm{obs.}$', 2, 3, 6]
elif zbes<2.5:
zred = [zbes, 2.5, 3, 6]
zredl = ['$z_\mathrm{obs.}$', 2.5, 3, 6]
elif zbes<3.:
zred = [zbes, 3, 6]
zredl = ['$z_\mathrm{obs.}$', 3, 6]
elif zbes<6:
zred = [zbes, 6]
zredl = ['$z_\mathrm{obs.}$', 6]
else:
zred = [zbes, 12]
zredl = ['$z_\mathrm{obs.}$', 12]
Tzz = np.zeros(len(zred), dtype='float')
for zz in range(len(zred)):
Tzz[zz] = (Tuni - MB.cosmo.age(zred[zz]).value) #/ cc.Gyr_s
if Tzz[zz] < TMIN:
Tzz[zz] = TMIN
def density_estimation(m1, m2):
xmin, xmax = np.min(m1), np.max(m1)
ymin, ymax = np.min(m2), np.max(m2)
X, Y = np.mgrid[xmin:xmax:100j, ymin:ymax:100j]
positions = np.vstack([X.ravel(), Y.ravel()])
values = np.vstack([m1, m2])
kernel = stats.gaussian_kde(values)
Z = np.reshape(kernel(positions).T, X.shape)
return X, Y, Z
for kk in range(0,mmax,1):
nr = np.random.randint(len(samples))
try:
Avtmp[kk] = samples['Av'][nr]
except:
Avtmp[kk] = MB.AVFIX
ZMM = np.zeros((len(age)), dtype='float') # Mass weighted Z.
ZM = np.zeros((len(age)), dtype='float') # Light weighted T.
ZC = np.zeros((len(age)), dtype='float') # Light weighted T.
SF = np.zeros((len(age)), dtype='float') # SFR
AM = np.zeros((len(age)), dtype='float') # Light weighted T.
II0 = nage
for ss in range(len(age)):
ii = int(len(II0) - ss - 1) # from old to young templates.
try:
AA_tmp = 10**samples['A'+str(ii)][nr]
except:
AA_tmp = 0
pass
try:
ZZ_tmp = samples['Z'+str(ii)][nr]
except:
try:
ZZ_tmp = samples['Z0'][nr]
except:
ZZ_tmp = MB.ZFIX
nZtmp = bfnc.Z2NZ(ZZ_tmp)
mslist = sedpar['ML_'+str(nZtmp)][ii]
lmtmp[kk] += AA_tmp * mslist
Ztmp[kk] += (10 ** ZZ_tmp) * AA_tmp * mslist
Ttmp[kk] += age[ii] * AA_tmp * mslist
ACtmp[kk] += AA_tmp * mslist
if MB.fzmc == 1:
redshifttmp[kk] = samples['zmc'][nr]
AM[ii] = AA_tmp * mslist
SF[ii] = AA_tmp * mslist / delT[ii]
ZM[ii] = ZZ_tmp
ZMM[ii]= (10 ** ZZ_tmp) * AA_tmp * mslist
# SED
flim = 0.05
if ss == 0:
y0, x0 = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib_all)
y0p, x0p = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib)
ysump = y0p #* 1e18
ysum = y0 #* 1e18
if AA_tmp/Asum > flim:
ax0.plot(x0, y0 * c/ np.square(x0) / d, '--', lw=.1, color=col[ii], zorder=-1, label='', alpha=0.1)
else:
y0_r, x0_tmp = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib_all)
y0p, x0p = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib)
ysump += y0p #* 1e18
ysum += y0_r #* 1e18
if AA_tmp/Asum > flim:
ax0.plot(x0, y0_r * c/ np.square(x0) / d, '--', lw=.1, color=col[ii], zorder=-1, label='', alpha=0.1)
for ss in range(len(age)):
ii = ss # from old to young templates.
AC = np.sum(AM[ss:])
if AC > 0:
ZC[ss] = np.log10(np.sum(ZMM[ss:])/AC)
else:
ZC[ss] = -99
# Plot Total
ax0.plot(x0, ysum * c/ np.square(x0) / d, '-', lw=.1, color='gray', zorder=-1, label='', alpha=0.1)
if len(age)==1:
ax1.plot(age[:], SF[:], marker='.', linestyle='-', lw=.1, color='k', zorder=-1, label='', alpha=0.01)
ax2.plot(age[:], ZC[:], marker='.', linestyle='-', lw=.1, color='k', zorder=-1, label='', alpha=0.01)
else:
ax1.plot(age[:], SF[:], marker='', linestyle='-', lw=.1, color='k', zorder=-1, label='', alpha=0.1)
ax2.plot(age[:], ZC[:], marker='', linestyle='-', lw=.1, color='k', zorder=-1, label='', alpha=0.1)
# Get ymax
if f_plot_filter:
scl_yaxis = 0.2
else:
scl_yaxis = 0
ymax_bb = np.max(fybb[conbb] * c / np.square(xbb[conbb]) / d) * 1.10
ymax_temp = np.max(ysum * c/ np.square(x0) / d) * 1.10
ymax = np.max([ymax_bb, ymax_temp])
# Convert into log
Ztmp[kk] /= ACtmp[kk]
Ttmp[kk] /= ACtmp[kk]
Ntmp[kk] = kk
lmtmp[kk] = np.log10(lmtmp[kk])
Ztmp[kk] = np.log10(Ztmp[kk])
Ttmp[kk] = np.log10(Ttmp[kk])
if MB.fzmc == 1:
NPAR = [lmtmp[:kk+1], Ttmp[:kk+1], Avtmp[:kk+1], Ztmp[:kk+1], redshifttmp[:kk+1]]
else:
NPAR = [lmtmp[:kk+1], Ttmp[:kk+1], Avtmp[:kk+1], Ztmp[:kk+1]]
if kk == mmax-1:
# Histogram
for i, x in enumerate(Par):
ax = axes[i, i]
x1min, x1max = NPARmin[i], NPARmax[i]
nbin = 50
binwidth1 = (x1max-x1min)/nbin
bins1 = np.arange(x1min, x1max + binwidth1, binwidth1)
n, bins, patches = ax.hist(NPAR[i], bins=bins1, orientation='vertical', color='b', histtype='stepfilled', alpha=0.6)
yy = np.arange(0,np.max(n)*1.3,1)
try:
ax.plot(yy*0+np.percentile(NPAR[i],16), yy, linestyle='--', color='gray', lw=1)
ax.plot(yy*0+np.percentile(NPAR[i],84), yy, linestyle='--', color='gray', lw=1)
ax.plot(yy*0+np.percentile(NPAR[i],50), yy, linestyle='-', color='gray', lw=1)
ax.text(np.percentile(NPAR[i],16), np.max(yy)*1.02, '%.2f'%(np.percentile(NPAR[i],16)), fontsize=9)
ax.text(np.percentile(NPAR[i],50), np.max(yy)*1.02, '%.2f'%(np.percentile(NPAR[i],50)), fontsize=9)
ax.text(np.percentile(NPAR[i],84), np.max(yy)*1.02, '%.2f'%(np.percentile(NPAR[i],84)), fontsize=9)
except:
print('Failed at i,x=',i,x)
ax.set_xlim(x1min, x1max)
ax.set_yticklabels([])
if i == K-1:
ax.set_xlabel('%s'%(Par[i]), fontsize=12)
if i < K-1:
ax.set_xticklabels([])
# Scatter and contour
for i, x in enumerate(Par):
for j, y in enumerate(Par):
if i > j:
ax = axes[i, j]
ax.scatter(NPAR[j], NPAR[i], c='b', s=1, marker='o', alpha=0.01)
ax.set_xlabel('%s'%(Par[j]), fontsize=12)
if kk == mmax-1:
try:
Xcont, Ycont, Zcont = density_estimation(NPAR[j], NPAR[i])
mZ = np.max(Zcont)
ax.contour(Xcont, Ycont, Zcont, levels=[0.68*mZ, 0.95*mZ, 0.99*mZ], linewidths=[0.8,0.5,0.3], colors='gray')
except:
print('Error occurs when density estimation. Maybe because some params are fixed.')
pass
x1min, x1max = NPARmin[j], NPARmax[j]
y1min, y1max = NPARmin[i], NPARmax[i]
ax.set_xlim(x1min, x1max)
ax.set_ylim(y1min, y1max)
if j==0:
ax.set_ylabel('%s'%(Par[i]), fontsize=12)
if j>0:
ax.set_yticklabels([])
if i<K-1:
ax.set_xticklabels([])
if i == 2:
ax.yaxis.labelpad = 5.
if i < j:
ax = axes[i, j]
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_frame_on(False)
ax.set_xticks([])
ax.set_yticks([])
if i == j:
ax = axes[i, j]
ax.set_yticklabels([])
if i == K-1:
ax.set_xlabel('%s'%(Par[i]), fontsize=12)
if i < K-1:
ax.set_xticklabels([])
if kk%10 == 0 and out_ind == 1:
fname = MB.DIR_OUT + + '%d.png' % kk
print('Saving frame', fname)
plt.savefig(fname, dpi=200)
files.append(fname)
# For the last one
ax0.plot(xg0, fg0 * c / np.square(xg0) / d, marker='', linestyle='-', linewidth=0.5, ms=0.1, color='royalblue', label='')
ax0.plot(xg1, fg1 * c / np.square(xg1) / d, marker='', linestyle='-', linewidth=0.5, ms=0.1, color='#DF4E00', label='')
ax0.set_xlim(2200, 88000)
ax0.set_xscale('log')
ax0.set_ylim(-ymax * scl_yaxis, ymax)
ax0.set_xlabel('Observed wavelength ($\mathrm{\mu m}$)', fontsize=14)
ax0.set_ylabel('Flux ($10^{%d}\mathrm{erg}/\mathrm{s}/\mathrm{cm}^{2}/\mathrm{\AA}$)'%(np.log10(scale)),fontsize=12,labelpad=-2)
ax1.set_xlabel('$t$ (Gyr)', fontsize=12)
ax1.set_ylabel('$\dot{M_*}/M_\odot$yr$^{-1}$', fontsize=12)
ax1.set_xlim(np.min(age)*0.8, Txmax)
ax1.set_ylim(0, SFmax)
ax1.set_xscale('log')
ax2.set_xlabel('$t$ (Gyr)', fontsize=12)
ax2.set_ylabel('$\log Z_*/Z_\odot$', fontsize=12)
ax2.set_xlim(np.min(age)*0.8, Txmax)
if round(np.min(Z),2) == round(np.max(Z),2):
ax2.set_ylim(-0.8, 0.5)
else:
ax2.set_ylim(np.min(Z)-0.05, np.max(Z)+0.05)
ax2.set_xscale('log')
#ax2.yaxis.labelpad = -5
ax1t = ax1.twiny()
ax2t = ax2.twiny()
ax1t.set_xlim(0.008, Txmax)
ax1t.set_xscale('log')
ax1t.set_xticklabels(zredl[:])
ax1t.set_xticks(Tzz[:])
ax1t.tick_params(axis='x', labelcolor='k')
ax1t.xaxis.set_ticks_position('none')
ax1.plot(Tzz, Tzz*0+SFmax, marker='|', color='k', ms=3, linestyle='None')
ax2t.set_xlim(0.008, Txmax)
ax2t.set_xscale('log')
ax2t.set_xticklabels(zredl[:])
ax2t.set_xticks(Tzz[:])
ax2t.tick_params(axis='x', labelcolor='k')
ax2t.xaxis.set_ticks_position('none')
ax2.plot(Tzz, Tzz*0+0.5, marker='|', color='k', ms=3, linestyle='None')
# Filters
if f_plot_filter:
ax0 = plot_filter(MB, ax0, ymax, scl=scl_yaxis)
xx = np.arange(2200,100000,100)
ax0.plot(xx, xx * 0, linestyle='--', lw=0.5, color='k')
plt.savefig(MB.DIR_OUT + 'param_' + ID + '_corner.png', dpi=150)
def plot_corner_physparam_cumulative_frame(ID, Zall=np.arange(-1.2,0.4249,0.05), age=[0.01, 0.1, 0.3, 0.7, 1.0, 3.0], tau0=[0.1,0.2,0.3], fig=None, dust_model=0, out_ind=0, snlimbb=1.0, DIR_OUT='./'):
'''
Creat "cumulative" png for gif image.
If you like to creat temporal png for gif image.
Parameters
----------
snlimbb : float
SN limit to show flux or up lim in SED.
'''
col = ['violet', 'indigo', 'b', 'lightblue', 'lightgreen', 'g', 'orange', 'coral', 'r', 'darkred']#, 'k']
nage = np.arange(0,len(age),1)
fnc = Func(ID, PA, Zall, age, dust_model=dust_model) # Set up the number of Age/ZZ
bfnc = Basic(Zall)
###########################
# Open result file
###########################
# Open ascii file and stock to array.
lib = fnc.open_spec_fits(ID, PA, fall=0)
lib_all = fnc.open_spec_fits(ID, PA, fall=1)
file = 'summary_' + ID + '.fits'
hdul = fits.open(file) # open a FITS file
# Redshift MC
zp50 = hdul[1].data['zmc'][1]
zp16 = hdul[1].data['zmc'][0]
zp84 = hdul[1].data['zmc'][2]
M50 = hdul[1].data['ms'][1]
M16 = hdul[1].data['ms'][0]
M84 = hdul[1].data['ms'][2]
print('Total stellar mass is %.2e'%(M50))
A50 = np.zeros(len(age), dtype='float')
A16 = np.zeros(len(age), dtype='float')
A84 = np.zeros(len(age), dtype='float')
for aa in range(len(age)):
A50[aa] = hdul[1].data['A'+str(aa)][1]
A16[aa] = hdul[1].data['A'+str(aa)][0]
A84[aa] = hdul[1].data['A'+str(aa)][2]
Asum = np.sum(A50)
aa = 0
Av50 = hdul[1].data['Av'+str(aa)][1]
Av16 = hdul[1].data['Av'+str(aa)][0]
Av84 = hdul[1].data['Av'+str(aa)][2]
Z50 = np.zeros(len(age), dtype='float')
Z16 = np.zeros(len(age), dtype='float')
Z84 = np.zeros(len(age), dtype='float')
NZbest = np.zeros(len(age), dtype='int')
for aa in range(len(age)):
Z50[aa] = hdul[1].data['Z'+str(aa)][1]
Z16[aa] = hdul[1].data['Z'+str(aa)][0]
Z84[aa] = hdul[1].data['Z'+str(aa)][2]
NZbest[aa]= bfnc.Z2NZ(Z50[aa])
ZZ50 = np.sum(Z50*A50)/np.sum(A50) # Light weighted Z.
chi = hdul[1].data['chi'][0]
chin = hdul[1].data['chi'][1]
fitc = chin
Cz0 = hdul[0].header['Cz0']
Cz1 = hdul[0].header['Cz1']
zbes = hdul[0].header['z']
zscl = (1.+zbes)
# Repeat no.
nplot = 1000
#DIR_OUT = '/astro/udfcen3/Takahiro/sedfitter/corner/' + ID + '_corner/'
try:
os.makedirs(DIR_OUT)
except:
pass
# plot Configuration
K = 4 # No of params.
Par = ['$\log M_*/M_\odot$', '$\log T$/Gyr', '$A_V$/mag', '$\log Z / Z_\odot$']
factor = 2.0 # size of one side of one panel
lbdim = 0.5 * factor # size of left/bottom margin
trdim = 0.2 * factor # size of top/right margin
whspace = 0.02 # w/hspace size
plotdim = factor * K + factor * (K - 1.) * whspace
dim = lbdim + plotdim + trdim
sclfig = 0.7
# Create a new figure if one wasn't provided.
if fig is None:
fig, axes = plt.subplots(K, K, figsize=(dim*sclfig, dim*sclfig))
else:
try:
axes = np.array(fig.axes).reshape((K, K))
except:
raise ValueError("Provided figure has {0} axes, but data has "
"dimensions K={1}".format(len(fig.axes), K))
# Format the figure.
lb = lbdim / dim
tr = (lbdim + plotdim) / dim
fig.subplots_adjust(left=lb* 1.06, bottom=lb*.9, right=tr, top=tr*.99,
wspace=whspace, hspace=whspace)
# For spec plot
ax0 = fig.add_axes([0.62,0.61,0.37,0.33])
###############################
# Data taken from
###############################
DIR_TMP = './templates/'
dat = np.loadtxt(DIR_TMP + 'spec_obs_' + ID + '.cat', comments='#')
NR = dat[:, 0]
x = dat[:, 1]
fy00 = dat[:, 2]
ey00 = dat[:, 3]
con0 = (NR<1000) #& (fy/ey>SNlim)
xg0 = x[con0]
fg0 = fy00[con0] * Cz0
eg0 = ey00[con0] * Cz0
con1 = (NR>=1000) & (NR<10000) #& (fy/ey>SNlim)
xg1 = x[con1]
fg1 = fy00[con1] * Cz1
eg1 = ey00[con1] * Cz1
con2 = (NR>=10000)#& (fy/ey>SNlim)
xg2 = x[con2]
fg2 = fy00[con2]
eg2 = ey00[con2]
fy01 = np.append(fg0,fg1)
fy = np.append(fy01,fg2)
ey01 = np.append(eg0,eg1)
ey = np.append(ey01,eg2)
wht=1./np.square(ey)
dat = np.loadtxt(DIR_TMP + 'bb_obs_' + ID + '.cat', comments='#')
NRbb = dat[:, 0]
xbb = dat[:, 1]
fybb = dat[:, 2]
eybb = dat[:, 3]
exbb = dat[:, 4]
snbb = fybb/eybb
conspec = (NR<10000) #& (fy/ey>1)
#ax0.plot(xg0, fg0 * c / np.square(xg0) / d, marker='', linestyle='-', linewidth=0.5, ms=0.1, color='royalblue', label='')
#ax0.plot(xg1, fg1 * c / np.square(xg1) / d, marker='', linestyle='-', linewidth=0.5, ms=0.1, color='#DF4E00', label='')
conbb = (fybb/eybb>snlimbb)
ax0.errorbar(xbb[conbb], fybb[conbb] * c / np.square(xbb[conbb]) / d, yerr=eybb[conbb]*c/np.square(xbb[conbb])/d, color='k', linestyle='', linewidth=0.5, zorder=4)
ax0.plot(xbb[conbb], fybb[conbb] * c / np.square(xbb[conbb]) / d, '.r', ms=10, linestyle='', linewidth=0, zorder=4)
conbbe = (fybb/eybb<snlimbb)
ax0.plot(xbb[conbbe], eybb[conbbe] * c / np.square(xbb[conbbe]) / d, 'vr', ms=10, linestyle='', linewidth=0, zorder=4)
ymax = np.max(fybb[conbb] * c / np.square(xbb[conbb]) / d) * 1.10
ax0.set_xlabel('Observed wavelength ($\mathrm{\mu m}$)', fontsize=14)
ax0.set_ylabel('Flux ($\mathrm{erg}/\mathrm{s}/\mathrm{cm}^{2}/\mathrm{\AA}$)', fontsize=13)
ax0.set_xlim(2200, 88000)
#ax1.set_xlim(12500, 16000)
ax0.set_xscale('log')
ax0.set_ylim(-0.05, ymax)
DIR_TMP = './templates/'
####################
# MCMC corner plot.
####################
file = 'chain_' + ID + '_corner.cpkl'
niter = 0
data = loadcpkl(os.path.join('./'+file))
try:
ndim = data['ndim'] # By default, use ndim and burnin values contained in the cpkl file, if present.
burnin = data['burnin']
nmc = data['niter']
nwalk = data['nwalkers']
Nburn = burnin #*20
samples = data['chain'][:]
except:
if verbose: print(' = > NO keys of ndim and burnin found in cpkl, use input keyword values')
f0 = fits.open(DIR_TMP + 'ms_' + ID + '.fits')
sedpar = f0[1]
import matplotlib
import matplotlib.cm as cm
getcmap = matplotlib.cm.get_cmap('jet')
nc = np.arange(0, nmc, 1)
col = getcmap((nc-0)/(nmc-0))
#for kk in range(0,nmc,1):
Ntmp = np.zeros(nplot, dtype='float')
lmtmp= np.zeros(nplot, dtype='float')
Avtmp= np.zeros(nplot, dtype='float')
Ztmp = np.zeros(nplot, dtype='float')
Ttmp = np.zeros(nplot, dtype='float')
ACtmp= np.zeros(nplot, dtype='float')
files = [] # For movie
for kk in range(0,nplot,1):
#nr = kk # np.random.randint(len(samples))
nr = np.random.randint(len(samples))
Avtmp[kk] = samples['Av'][nr]
#Asum = 0
#for ss in range(len(age)):
#Asum += np.sum(samples['A'+str(ss)][nr])
II0 = nage #[0,1,2,3] # Number for templates
for ss in range(len(age)):
ii = int(len(II0) - ss - 1) # from old to young templates.
AA_tmp = samples['A'+str(ii)][nr]
try:
ZZ_tmp = samples['Z'+str(ii)][nr]
except:
ZZ_tmp = samples['Z0'][nr]
nZtmp = bfnc.Z2NZ(ZZ_tmp)
mslist = sedpar.data['ML_'+str(nZtmp)][ii]
lmtmp[kk] += AA_tmp * mslist
Ztmp[kk] += (10 ** ZZ_tmp) * AA_tmp * mslist
Ttmp[kk] += age[ii] * AA_tmp * mslist
ACtmp[kk] += AA_tmp * mslist
# SED
flim = 0.05
if ss == 0:
y0, x0 = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib_all, tau0=tau0)
y0p, x0p = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib, tau0=tau0)
ysump = y0p #* 1e18
ysum = y0 #* 1e18
if AA_tmp/Asum > flim:
ax0.plot(x0, y0 * c/ np.square(x0) / d, '--', lw=0.1, color=col[ii], zorder=-1, label='', alpha=0.1)
else:
y0_r, x0_tmp = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib_all, tau0=tau0)
y0p, x0p = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib, tau0=tau0)
ysump += y0p #* 1e18
ysum += y0_r #* 1e18
if AA_tmp/Asum > flim:
ax0.plot(x0, y0_r * c/ np.square(x0) / d, '--', lw=0.1, color=col[ii], zorder=-1, label='', alpha=0.1)
# Total
ax0.plot(x0, ysum * c/ np.square(x0) / d, '-', lw=0.1, color='gray', zorder=-1, label='', alpha=0.1)
ax0.set_xlim(2200, 88000)
ax0.set_xscale('log')
ax0.set_ylim(0., ymax)
# Convert into log
Ztmp[kk] /= ACtmp[kk]
Ttmp[kk] /= ACtmp[kk]
Ntmp[kk] = kk
lmtmp[kk] = np.log10(lmtmp[kk])
Ztmp[kk] = np.log10(Ztmp[kk])
Ttmp[kk] = np.log10(Ttmp[kk])
NPAR = [lmtmp[:kk+1], Ttmp[:kk+1], Avtmp[:kk+1], Ztmp[:kk+1]]
#NPARmin = [np.log10(M16)-0.1, -0.4, 0, -0.6]
#NPARmax = [np.log10(M84)+0.1, 0.5, 2., 0.5]
NPARmin = [np.log10(M16)-0.1, -0.4, Av16-0.1, -0.5]
NPARmax = [np.log10(M84)+0.1, 0.5, Av84+0.1, 0.5]
#for kk in range(0,nplot,1):
if kk == nplot-1:
# Histogram
for i, x in enumerate(Par):
ax = axes[i, i]
x1min, x1max = NPARmin[i], NPARmax[i]
nbin = 50
binwidth1 = (x1max-x1min)/nbin
bins1 = np.arange(x1min, x1max + binwidth1, binwidth1)
ax.hist(NPAR[i], bins=bins1, orientation='vertical', color='b', histtype='stepfilled', alpha=0.6)
ax.set_xlim(x1min, x1max)
#print(x, x1min, x1max)
#ax2.scatter(np.log10(Ttmp), np.log10(Avtmp), c='r', s=1, marker='.', alpha=0.1)
#ax3.scatter(np.log10(Ztmp), np.log10(Avtmp), c='r', s=1, marker='.', alpha=0.1)
#ax.set_xlabel('$\log T_*$/Gyr', fontsize=12)
#ax.set_ylabel('$\log Z_*/Z_\odot$', fontsize=12)
ax.set_yticklabels([])
#ax.set_xticklabels([])
#ax.set_title('%s'%(Par[i]), fontsize=12)
if i == K-1:
ax.set_xlabel('%s'%(Par[i]), fontsize=12)
if i < K-1:
ax.set_xticklabels([])
# Scatter and contour
for i, x in enumerate(Par):
for j, y in enumerate(Par):
#print(i,j,Par[j], Par[i])
if i > j:
ax = axes[i, j]
ax.scatter(NPAR[j], NPAR[i], c='b', s=1, marker='o', alpha=0.01)
ax.set_xlabel('%s'%(Par[j]), fontsize=12)
#x1min, x1max = np.min(NPAR[j]), np.max(NPAR[j])
#y1min, y1max = np.min(NPAR[i]), np.max(NPAR[i])
x1min, x1max = NPARmin[j], NPARmax[j]
y1min, y1max = NPARmin[i], NPARmax[i]
ax.set_xlim(x1min, x1max)
ax.set_ylim(y1min, y1max)
if j==0:
ax.set_ylabel('%s'%(Par[i]), fontsize=12)
if j>0:
ax.set_yticklabels([])
if i<K-1:
ax.set_xticklabels([])
if i < j:
ax = axes[i, j]
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_frame_on(False)
ax.set_xticks([])
ax.set_yticks([])
if i == j:
ax = axes[i, j]
ax.set_yticklabels([])
if i == K-1:
ax.set_xlabel('%s'%(Par[i]), fontsize=12)
if i < K-1:
ax.set_xticklabels([])
if kk%10 == 0 and out_ind == 1:
fname = DIR_OUT + '%d.png' % kk
print('Saving frame', fname)
plt.savefig(fname, dpi=200)
files.append(fname)
#plt.savefig(DIR_OUT + '%d.pdf'%(kk))
plt.savefig(DIR_OUT + 'param_' + ID + '_corner.png', dpi=200)
plt.close()
def write_lines(ID, zbes, R_grs=45, dw=4, umag=1.0, DIR_OUT='./'):
'''
'''
dlw = R_grs * dw # Can affect the SFR.
ldw = 7
###################################
# To add lines in the plot,
# ,manually edit the following file
# so as Fcont50 have >0.
###################################
flw = open(DIR_OUT + ID + '_lines_fit.txt', 'w')
flw.write('# LW flux_line eflux_line flux_cont EW eEW L_line eL_line\n')
flw.write('# (AA) (Flam_1e-18) (Flam_1e-18) (Flam_1e-18) (AA) (AA) (erg/s) (erg/s)\n')
flw.write('# Error in EW is 1sigma, by pm eflux_line.\n')
flw.write('# If EW=-99, it means gaussian fit failed.\n')
flw.write('# and flux is the sum of excess at WL pm %.1f AA.\n'%(dlw))
flw.write('# Magnification is corrected; mu=%.3f\n'%(umag))
try:
fl = np.loadtxt(DIR_OUT + 'table_' + ID + '_lines.txt', comments='#')
LW = fl[:,2]
Fcont50 = fl[:,3]
Fline50 = fl[:,6]
for ii in range(len(LW)):
if Fcont50[ii] > 0:
WL = LW[ii] * (1.+zbes)
if ii == 7:
contmp = (x > WL - dlw) & (x < WL + dlw*1.5)
else:
contmp = (x > WL - dlw) & (x < WL + dlw)
FLW[ii] = 1
xx = x[contmp]
yy = (fy - ysum_cut)[contmp]
eyy = ey[contmp]
yy2 = (ysum_cut)[contmp]
xyzip = zip(xx,yy,eyy,yy2)
xyzip = sorted(xyzip)
xxs = np.array([p1 for p1,p2,p3,p4 in xyzip])
yys = np.array([p2 for p1,p2,p3,p4 in xyzip])
eyys = np.array([p3 for p1,p2,p3,p4 in xyzip])
yy2s = np.array([p4 for p1,p2,p3,p4 in xyzip])
flux = np.zeros(len(xxs), dtype='float')
efl = np.zeros(len(xxs), dtype='float')
for ff in range(len(xxs)):
flux[ff] = yy2s[ff]/np.square(xxs[ff]) * c/d
efl[ff] = np.square(eyys[ff]/np.square(xxs[ff]) * c/d)
fmed = np.median(flux) # Median of continuum, model flux
esum = np.sqrt(simps(efl, xxs))
try:
popt,pcov = curve_fit(gaus,xxs,yys,p0=[Fline50[ii],WL,10],sigma=eyys)
xxss = xxs/zscl
if ii == 7:
popt,pcov = curve_fit(gaus,xxs,yys,p0=[Fline50[ii],WL+20,10],sigma=eyys)
xxss = xxs/zscl
if f_grsm:
ax2t.plot(xxs/zscl, (gaus(xxs,*popt)+yy2s) * c/np.square(xxs)/d, '#4682b4', linestyle='-', linewidth=1, alpha=0.8, zorder=20)
I1 = simps((gaus(xxs,*popt)) * c/np.square(xxs)/d, xxs)
I2 = I1 - simps((gaus(xxs,*popt)) * c/np.square(xxs)/d, xxs)
fline = I1
Flum = fline*Cons*1e-18 # luminosity in erg/s.
elum = esum *Cons*1e-18 # luminosity in erg/s.
SFR = Flum * 6.58*1e-42
print('SFR is', SFR/umag)
EW_tmp = simps( ((gaus(xxs,*popt)) * c/np.square(xxs)/d)/yy2s, xxs)
EW_tmp_u = simps( ((gaus(xxs,*popt) + eyys/np.sqrt(len(xxs))) * c/np.square(xxs)/d)/yy2s, xxs)
if ii == 7:
contmp2 = (xxs/zscl>4320.) & (xxs/zscl<4380.)
popt,pcov = curve_fit(gaus,xxs[contmp2], yys[contmp2], p0=[Fline50[ii],WL,10], sigma=eyys[contmp2])
I1 = simps((gaus(xxs[contmp2],*popt)) * c/np.square(xxs[contmp2])/d, xxs[contmp2])
I2 = I1 - simps((gaus(xxs[contmp2],*popt)) * c/np.square(xxs[contmp2])/d, xxs[contmp2])
fline = I1
Flum = fline*Cons*1e-18 # luminosity in erg/s.
elum = esum *Cons*1e-18 # luminosity in erg/s.
SFR = Flum * 6.58*1e-42
print('SFR, update, is', SFR/umag)
EW_tmp = simps( ((gaus(xxs[contmp2],*popt)) * c/np.square(xxs[contmp2])/d)/yy2s[contmp2], xxs[contmp2])
EW_tmp_u = simps( ((gaus(xxs[contmp2],*popt) + eyys[contmp2]/np.sqrt(len(xxs[contmp2]))) * c/np.square(xxs[contmp2])/d)/yy2s[contmp2], xxs[contmp2])
flw.write('%d %.2f %.2f %.2f %.2f %.2f %.2e %.2e %.2f\n'%(LW[ii],fline/umag, esum/umag, fmed/umag, EW_tmp,(EW_tmp_u-EW_tmp), Flum*1e-18/umag, elum*1e-18/umag, SFR/umag))
except Exception:
fsum = np.zeros(len(xxs))
for ff in range(len(fsum)):
fsum[ff] = (yys[ff]+yy2s[ff])/np.square(xxs[ff])
fline = np.sum(fsum) / d*c
flw.write('%d %.2f %.2f %.2f %d %d %d %d %d\n'%(LW[ii],fline,esum,fmed, -99, 0, -99, 0, 0))
pass
except:
pass
flw.close()
def plot_corner_physparam_frame(ID, PA, Zall=np.arange(-1.2,0.4249,0.05), age=[0.01, 0.1, 0.3, 0.7, 1.0, 3.0], tau0=[0.1,0.2,0.3], fig=None, dust_model=0):
'''
If you like to
Creat temporal png for gif image.
'''
col = ['violet', 'indigo', 'b', 'lightblue', 'lightgreen', 'g', 'orange', 'coral', 'r', 'darkred']#, 'k']
nage = np.arange(0,len(age),1)
fnc = Func(ID, PA, Zall, age, dust_model=dust_model) # Set up the number of Age/ZZ
bfnc = Basic(Zall)
###########################
# Open result file
###########################
# Open ascii file and stock to array.
lib = fnc.open_spec_fits(fall=0, tau0=tau0)
lib_all = fnc.open_spec_fits(fall=1, tau0=tau0)
file = 'summary_' + ID + '.fits'
hdul = fits.open(file) # open a FITS file
# Redshift MC
zp50 = hdul[1].data['zmc'][1]
zp16 = hdul[1].data['zmc'][0]
zp84 = hdul[1].data['zmc'][2]
M50 = hdul[1].data['ms'][1]
M16 = hdul[1].data['ms'][0]
M84 = hdul[1].data['ms'][2]
print('Total stellar mass is %.2e'%(M50))
A50 = np.zeros(len(age), dtype='float')
A16 = np.zeros(len(age), dtype='float')
A84 = np.zeros(len(age), dtype='float')
for aa in range(len(age)):
A50[aa] = hdul[1].data['A'+str(aa)][1]
A16[aa] = hdul[1].data['A'+str(aa)][0]
A84[aa] = hdul[1].data['A'+str(aa)][2]
Asum = np.sum(A50)
aa = 0
Av50 = hdul[1].data['Av'+str(aa)][1]
Av16 = hdul[1].data['Av'+str(aa)][0]
Av84 = hdul[1].data['Av'+str(aa)][2]
Z50 = np.zeros(len(age), dtype='float')
Z16 = np.zeros(len(age), dtype='float')
Z84 = np.zeros(len(age), dtype='float')
NZbest = np.zeros(len(age), dtype='int')
for aa in range(len(age)):
Z50[aa] = hdul[1].data['Z'+str(aa)][1]
Z16[aa] = hdul[1].data['Z'+str(aa)][0]
Z84[aa] = hdul[1].data['Z'+str(aa)][2]
NZbest[aa]= bfnc.Z2NZ(Z50[aa])
ZZ50 = np.sum(Z50*A50)/np.sum(A50) # Light weighted Z.
chi = hdul[1].data['chi'][0]
chin = hdul[1].data['chi'][1]
fitc = chin
Cz0 = hdul[0].header['Cz0']
Cz1 = hdul[0].header['Cz1']
zbes = hdul[0].header['z']
zscl = (1.+zbes)
# Repeat no.
nplot = 1000
DIR_OUT = '/astro/udfcen3/Takahiro/sedfitter/corner/' + ID + '_corner/'
try:
os.makedirs(DIR_OUT)
except:
pass
# plot Configuration
K = 4 # No of params.
Par = ['$\log M_*/M_\odot$', '$\log T_*$/Gyr', '$A_V$/mag', '$\log Z_* / Z_\odot$']
factor = 2.0 # size of one side of one panel
lbdim = 0.5 * factor # size of left/bottom margin
trdim = 0.2 * factor # size of top/right margin
whspace = 0.02 # w/hspace size
plotdim = factor * K + factor * (K - 1.) * whspace
dim = lbdim + plotdim + trdim
sclfig = 0.7
# Create a new figure if one wasn't provided.
###############################
# Data taken from
###############################
DIR_TMP = './templates/'
dat = np.loadtxt(DIR_TMP + 'spec_obs_' + ID + '.cat', comments='#')
NR = dat[:, 0]
x = dat[:, 1]
fy00 = dat[:, 2]
ey00 = dat[:, 3]
con0 = (NR<1000) #& (fy/ey>SNlim)
xg0 = x[con0]
fg0 = fy00[con0] * Cz0
eg0 = ey00[con0] * Cz0
con1 = (NR>=1000) & (NR<10000) #& (fy/ey>SNlim)
xg1 = x[con1]
fg1 = fy00[con1] * Cz1
eg1 = ey00[con1] * Cz1
con2 = (NR>=10000)#& (fy/ey>SNlim)
xg2 = x[con2]
fg2 = fy00[con2]
eg2 = ey00[con2]
fy01 = np.append(fg0,fg1)
fy = np.append(fy01,fg2)
ey01 = np.append(eg0,eg1)
ey = np.append(ey01,eg2)
wht=1./np.square(ey)
dat = np.loadtxt(DIR_TMP + 'bb_obs_' + ID + '.cat', comments='#')
NRbb = dat[:, 0]
xbb = dat[:, 1]
fybb = dat[:, 2]
eybb = dat[:, 3]
exbb = dat[:, 4]
snbb = fybb/eybb
conspec = (NR<10000) #& (fy/ey>1)
#ax0.plot(xg0, fg0 * c / np.square(xg0) / d, marker='', linestyle='-', linewidth=0.5, ms=0.1, color='royalblue', label='')
#ax0.plot(xg1, fg1 * c / np.square(xg1) / d, marker='', linestyle='-', linewidth=0.5, ms=0.1, color='#DF4E00', label='')
conbb = (fybb/eybb>1)
DIR_TMP = './templates/'
####################
# MCMC corner plot.
####################
file = 'chain_' + ID + '_corner.cpkl'
niter = 0
data = loadcpkl(os.path.join('./'+file))
try:
ndim = data['ndim'] # By default, use ndim and burnin values contained in the cpkl file, if present.
burnin = data['burnin']
nmc = data['niter']
nwalk = data['nwalkers']
Nburn = burnin #*20
samples = data['chain'][:]
except:
if verbose: print(' = > NO keys of ndim and burnin found in cpkl, use input keyword values')
f0 = fits.open(DIR_TMP + 'ms_' + ID + '.fits')
sedpar = f0[1]
import matplotlib
import matplotlib.cm as cm
getcmap = matplotlib.cm.get_cmap('jet')
nc = np.arange(0, nmc, 1)
col = getcmap((nc-0)/(nmc-0))
#for kk in range(0,nmc,1):
Ntmp = np.zeros(nplot, dtype='float')
lmtmp= np.zeros(nplot, dtype='float')
Avtmp= np.zeros(nplot, dtype='float')
Ztmp = np.zeros(nplot, dtype='float')
Ttmp = np.zeros(nplot, dtype='float')
ACtmp= np.zeros(nplot, dtype='float')
# Time bin
Txmax = 4 # Max x value
Tuni = MB.cosmo.age(zbes).value
Tuni0 = (Tuni - age[:])
delT = np.zeros(len(age),dtype='float')
delTl = np.zeros(len(age),dtype='float')
delTu = np.zeros(len(age),dtype='float')
for aa in range(len(age)):
if aa == 0:
delTl[aa] = age[aa]
delTu[aa] = (age[aa+1]-age[aa])/2.
delT[aa] = delTu[aa] + delTl[aa]
elif Tuni < age[aa]:
delTl[aa] = (age[aa]-age[aa-1])/2.
delTu[aa] = 10.
delT[aa] = delTu[aa] + delTl[aa]
elif aa == len(age)-1:
delTl[aa] = (age[aa]-age[aa-1])/2.
delTu[aa] = Tuni - age[aa]
delT[aa] = delTu[aa] + delTl[aa]
else:
delTl[aa] = (age[aa]-age[aa-1])/2.
delTu[aa] = (age[aa+1]-age[aa])/2.
delT[aa] = delTu[aa] + delTl[aa]
delT[:] *= 1e9 # Gyr to yr
delTl[:] *= 1e9 # Gyr to yr
delTu[:] *= 1e9 # Gyr to yr
######
files = [] # For movie
SFmax = 0
Tsmin = 0
Tsmax = 0
Zsmin = 0
Zsmax = 0
AMtmp = 0
AMtmp16 = 0
AMtmp84 = 0
for ii in range(len(age)):
ZZ_tmp = Z50[ii] #samples['Z'+str(ii)][100]
ZZ_tmp16 = Z16[ii] #samples['Z'+str(ii)][100]
ZZ_tmp84 = Z84[ii] #samples['Z'+str(ii)][100]
AA_tmp = np.max(samples['A'+str(ii)][:])
AA_tmp84 = np.percentile(samples['A'+str(ii)][:],95)
AA_tmp16 = np.percentile(samples['A'+str(ii)][:],5)
#AA_tmp84 = A84[ii]
#AA_tmp16 = A16[ii]
nZtmp = bfnc.Z2NZ(ZZ_tmp)
mslist = sedpar.data['ML_'+str(nZtmp)][ii]
AMtmp16 += mslist*AA_tmp16
AMtmp84 += mslist*AA_tmp84
Tsmax += age[ii] * AA_tmp84 * mslist
Tsmin += age[ii] * AA_tmp16 * mslist
Zsmax += 10**ZZ_tmp84 * AA_tmp84 * mslist
Zsmin += 10**ZZ_tmp16 * AA_tmp16 * mslist
SFtmp = AA_tmp * mslist / delT[ii]
if SFtmp > SFmax:
SFmax = SFtmp
#NPARmin = [np.log10(M16)-0.1, -0.4, 0, -0.6]
#NPARmax = [np.log10(M84)+0.1, 0.5, 2., 0.5]
NPARmin = [np.log10(M16)-0.1, np.log10(Tsmin/AMtmp16)-0.1, Av16-0.1, np.log10(Zsmin/AMtmp16)-0.2]
NPARmax = [np.log10(M84)+0.1, np.log10(Tsmax/AMtmp84)+0.2, Av84+0.1, np.log10(Zsmax/AMtmp84)+0.2]
for kk in range(0,nplot,1):
if kk%10 == 0:
#print('New plot; %d'%kk)
fig, axes = plt.subplots(K, K, figsize=(dim*sclfig*2, dim*sclfig))
# Format the figure.
lb = lbdim / dim
tr = (lbdim + plotdim) / dim
#fig.subplots_adjust(left=lb*1.06, bottom=lb*.9, right=tr, top=tr*.99,
fig.subplots_adjust(left=0.5, bottom=lb*.9, right=tr, top=tr*.99,
wspace=whspace, hspace=whspace)
# For spec plot
ax0 = fig.add_axes([0.05,0.73,0.37,0.23])
ax1 = fig.add_axes([0.05,0.40,0.37,0.23])
ax2 = fig.add_axes([0.05,0.07,0.37,0.23])
ax0.errorbar(xbb[conbb], fybb[conbb] * c / np.square(xbb[conbb]) / d, yerr=eybb[conbb]*c/np.square(xbb[conbb])/d, color='k', linestyle='', linewidth=0.5, zorder=4)
ax0.plot(xbb[conbb], fybb[conbb] * c / np.square(xbb[conbb]) / d, '.r', ms=10, linestyle='', linewidth=0, zorder=4)#, label='Obs.(BB)')
ax0.plot(xg0, fg0 * c / np.square(xg0) / d, marker='', linestyle='-', linewidth=0.5, ms=0.1, color='royalblue', label='')
ax0.plot(xg1, fg1 * c / np.square(xg1) / d, marker='', linestyle='-', linewidth=0.5, ms=0.1, color='#DF4E00', label='')
#nr = kk # np.random.randint(len(samples))
nr = np.random.randint(len(samples))
Avtmp[kk] = samples['Av'][nr]
#Asum = 0
#for ss in range(len(age)):
#Asum += np.sum(samples['A'+str(ss)][nr])
ZMM = np.zeros((len(age)), dtype='float') # Mass weighted Z.
ZM = np.zeros((len(age)), dtype='float') # Light weighted T.
ZC = np.zeros((len(age)), dtype='float') # Light weighted T.
SF = np.zeros((len(age)), dtype='float') # SFR
AM = np.zeros((len(age)), dtype='float') # Light weighted T.
II0 = nage #[0,1,2,3] # Number for templates
for ss in range(len(age)):
ii = int(len(II0) - ss - 1) # from old to young templates.
AA_tmp = samples['A'+str(ii)][nr]
try:
ZZ_tmp = samples['Z'+str(ii)][nr]
except:
ZZ_tmp = samples['Z0'][nr]
nZtmp = bfnc.Z2NZ(ZZ_tmp)
mslist = sedpar.data['ML_'+str(nZtmp)][ii]
lmtmp[kk] += AA_tmp * mslist
Ztmp[kk] += (10 ** ZZ_tmp) * AA_tmp * mslist
Ttmp[kk] += age[ii] * AA_tmp * mslist
ACtmp[kk] += AA_tmp * mslist
AM[ii] = AA_tmp * mslist
SF[ii] = AA_tmp * mslist / delT[ii]
ZM[ii] = ZZ_tmp # AAtmp[aa] * mslist[aa]
ZMM[ii]= (10 ** ZZ_tmp) * AA_tmp * mslist
# SED
flim = 0.05
if ss == 0:
y0, x0 = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib_all, tau0=tau0)
y0p, x0p = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib, tau0=tau0)
ysump = y0p #* 1e18
ysum = y0 #* 1e18
if AA_tmp/Asum > flim:
ax0.plot(x0, y0 * c/ np.square(x0) / d, '--', lw=1, color=col[ii], zorder=-1, label='', alpha=0.5)
#ax1.plot(age[ii], SF[ii], marker='o', lw=1, color=col[ii], zorder=1, label='', alpha=0.5)
#ax1.errorbar(age[ii], SF[ii], xerr=[[delTl[ii]/1e9], [delTu[ii]/1e9]], ms=10, marker='o', lw=1, color=col[ii], zorder=1, label='', alpha=0.5)
xx1 = np.arange(age[ii]-delTl[ii]/1e9, age[ii]+delTu[ii]/1e9, 0.01)
ax1.fill_between(xx1, xx1*0, xx1*0+SF[ii], lw=1, facecolor=col[ii], zorder=1, label='', alpha=0.5)
#ax2.plot(age[ii], ZM[ii], marker='o', lw=1, color=col[ii], zorder=1, label='', alpha=0.5)
else:
y0_r, x0_tmp = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib_all, tau0=tau0)
y0p, x0p = fnc.tmp03(AA_tmp, Avtmp[kk], ii, ZZ_tmp, zbes, lib, tau0=tau0)
ysump += y0p #* 1e18
ysum += y0_r #* 1e18
if AA_tmp/Asum > flim:
ax0.plot(x0, y0_r * c/ np.square(x0) / d, '--', lw=1, color=col[ii], zorder=-1, label='', alpha=0.5)
#ax1.plot(age[ii], SF[ii], marker='o', lw=1, color=col[ii], zorder=1, label='', alpha=0.5)
#ax1.errorbar(age[ii], SF[ii], xerr=[[delTl[ii]/1e9], [delTu[ii]/1e9]], ms=10, marker='o', lw=1, color=col[ii], zorder=1, label='', alpha=0.5)
xx1 = np.arange(age[ii]-delTl[ii]/1e9, age[ii]+delTu[ii]/1e9, 0.01)
ax1.fill_between(xx1, xx1*0, xx1*0+SF[ii], lw=1, facecolor=col[ii], zorder=1, label='', alpha=0.5)
#ax2.plot(age[ii], ZM[ii], marker='o', lw=1, color=col[ii], zorder=1, label='', alpha=0.5)
for ss in range(len(age)):
#ii = int(len(II0) - ss - 1) # from old to young templates.
ii = ss # from old to young templates.
AC = np.sum(AM[ss:])
ZC[ss] = np.log10(np.sum(ZMM[ss:])/AC)
#ax2.errorbar(age[ii], ZC[ii], xerr=[[delTl[ii]/1e9], [delTu[ii]/1e9]], ms=10*(SF[ii]/np.sum(SF[:]))+1, marker='o', lw=1, color=col[ii], zorder=1, label='', alpha=0.5)
ax2.errorbar(age[ii], ZC[ii], xerr=[[delTl[ii]/1e9], [delTu[ii]/1e9]], ms=10*(AC/np.sum(AM[:]))+1, marker='o', lw=1, color=col[ii], zorder=1, label='', alpha=0.5)
# Total
ymax = np.max(fybb[conbb] * c / np.square(xbb[conbb]) / d) * 1.10
ax0.plot(x0, ysum * c/ np.square(x0) / d, '-', lw=1., color='gray', zorder=-1, label='', alpha=0.8)
ax0.set_xlim(2200, 88000)
ax0.set_xscale('log')
ax0.set_ylim(0., ymax)
ax0.set_xlabel('Observed wavelength ($\mathrm{\mu m}$)', fontsize=14)
ax0.set_ylabel('Flux ($\mathrm{erg}/\mathrm{s}/\mathrm{cm}^{2}/\mathrm{\AA}$)', fontsize=13)
ax1.plot(age[:], SF[:], marker='', linestyle='-', lw=1, color='k', zorder=-1, label='', alpha=0.5)
ax1.set_xlabel('$t$ (Gyr)', fontsize=12)
ax1.set_ylabel('$\log \dot{M_*}/M_\odot$yr$^{-1}$', fontsize=12)
ax1.set_xlim(0.008, Txmax)
ax1.set_ylim(0, SFmax)
ax1.set_xscale('log')
ax2.plot(age[:], ZC[:], marker='', linestyle='-', lw=1, color='k', zorder=-1, label='', alpha=0.5)
ax2.set_xlabel('$t$ (Gyr)', fontsize=12)
ax2.set_ylabel('$\log Z_*/Z_\odot$', fontsize=12)
ax2.set_xlim(0.008, Txmax)
#ax2.set_ylim(NPARmin[3], NPARmax[3])
ax2.set_ylim(-0.6, 0.5)
ax2.set_xscale('log')
#ax2.yaxis.labelpad = -5
# For redshift
if zbes<2:
zred = [zbes, 2, 3, 6]
#zredl = ['$z_\mathrm{obs.}$', 2, 3, 6]
zredl = ['$z_\mathrm{obs.}$', 2, 3, 6]
elif zbes<2.5:
zred = [zbes, 2.5, 3, 6]
zredl = ['$z_\mathrm{obs.}$', 2.5, 3, 6]
elif zbes<3.:
zred = [zbes, 3, 6]
zredl = ['$z_\mathrm{obs.}$', 3, 6]
elif zbes<6:
zred = [zbes, 6]
zredl = ['$z_\mathrm{obs.}$', 6]
Tzz = np.zeros(len(zred), dtype='float')
for zz in range(len(zred)):
Tzz[zz] = (Tuni - MB.cosmo.age(zbes).value)
if Tzz[zz] < 0.01:
Tzz[zz] = 0.01
ax1t = ax1.twiny()
ax2t = ax2.twiny()
ax1t.set_xlim(0.008, Txmax)
ax1t.set_xscale('log')
ax1t.set_xticklabels(zredl[:])
ax1t.set_xticks(Tzz[:])
ax1t.tick_params(axis='x', labelcolor='k')
ax1t.xaxis.set_ticks_position('none')
ax1.plot(Tzz, Tzz*0+SFmax, marker='|', color='k', ms=3, linestyle='None')
ax2t.set_xlim(0.008, Txmax)
ax2t.set_xscale('log')
ax2t.set_xticklabels(zredl[:])
ax2t.set_xticks(Tzz[:])
ax2t.tick_params(axis='x', labelcolor='k')
ax2t.xaxis.set_ticks_position('none')
ax2.plot(Tzz, Tzz*0+0.5, marker='|', color='k', ms=3, linestyle='None')
# Convert into log
Ztmp[kk] /= ACtmp[kk]
Ttmp[kk] /= ACtmp[kk]
Ntmp[kk] = kk
lmtmp[kk] = np.log10(lmtmp[kk])
Ztmp[kk] = np.log10(Ztmp[kk])
Ttmp[kk] = np.log10(Ttmp[kk])
NPAR = [lmtmp[kk], Ttmp[kk], Avtmp[kk], Ztmp[kk]]
# Scatter and contour
for i, x in enumerate(Par):
for j, y in enumerate(Par):
#print(i,j,Par[j], Par[i])
if i > j:
ax = axes[i, j]
ax.scatter(NPAR[j], NPAR[i], c='b', s=10, marker='o', alpha=0.5)
ax.set_xlabel('%s'%(Par[j]), fontsize=12)
#x1min, x1max = np.min(NPAR[j]), np.max(NPAR[j])
#y1min, y1max = np.min(NPAR[i]), np.max(NPAR[i])
x1min, x1max = NPARmin[j], NPARmax[j]
y1min, y1max = NPARmin[i], NPARmax[i]
ax.set_xlim(x1min, x1max)
ax.set_ylim(y1min, y1max)
if j==0:
ax.set_ylabel('%s'%(Par[i]), fontsize=12)
if j>0:
ax.set_yticklabels([])
if i<K-1:
ax.set_xticklabels([])
if i == 2:
ax.yaxis.labelpad = 5.
if i < j:
ax = axes[i, j]
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_frame_on(False)
ax.set_xticks([])
ax.set_yticks([])
if i == j:
ax = axes[i, j]
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_frame_on(False)
ax.set_xticks([])
ax.set_yticks([])
'''
ax.set_yticklabels([])
if i == K-1:
ax.set_xlabel('%s'%(Par[i]), fontsize=12)
if i < K-1:
ax.set_xticklabels([])
'''
if kk%10 == 0:
fname = DIR_OUT + '%d.png' % (kk)
#fname = DIR_OUT + '%d.png' % (kk+1)
print('Saving frame', fname)
plt.savefig(fname, dpi=150)
#files.append(fname)
plt.close()
| 38.206284
| 229
| 0.510775
| 21,621
| 148,355
| 3.409602
| 0.056103
| 0.021161
| 0.022097
| 0.012168
| 0.901884
| 0.891805
| 0.87843
| 0.863672
| 0.848641
| 0.839973
| 0
| 0.070185
| 0.295164
| 148,355
| 3,882
| 230
| 38.216126
| 0.634814
| 0.090074
| 0
| 0.848582
| 0
| 0.002837
| 0.073451
| 0.008992
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004255
| false
| 0.011348
| 0.019149
| 0.000709
| 0.025532
| 0.02234
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
871214a07795609f5a55aa6053fa6c232ac65a7e
| 8,949
|
py
|
Python
|
tests/test_engine/test_fieldwalker/test_fieldwalker_get.py
|
bobuk/montydb
|
9ee299e7f1d3a7236abb683e0dfe4f7817859b2c
|
[
"BSD-3-Clause"
] | 478
|
2019-07-31T00:48:11.000Z
|
2022-03-18T09:12:29.000Z
|
tests/test_engine/test_fieldwalker/test_fieldwalker_get.py
|
bobuk/montydb
|
9ee299e7f1d3a7236abb683e0dfe4f7817859b2c
|
[
"BSD-3-Clause"
] | 47
|
2019-07-28T10:12:22.000Z
|
2022-01-04T16:25:12.000Z
|
tests/test_engine/test_fieldwalker/test_fieldwalker_get.py
|
bobuk/montydb
|
9ee299e7f1d3a7236abb683e0dfe4f7817859b2c
|
[
"BSD-3-Clause"
] | 26
|
2019-08-09T14:28:29.000Z
|
2022-02-22T02:49:51.000Z
|
from montydb.engine.field_walker import FieldWalker
def test_fieldwalker_value_get_1():
# single value
doc = {"a": 1}
path = "a"
value = [1]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_2():
doc = {"a": {"b": 1}}
path = "a"
value = [{"b": 1}]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_3():
doc = {"a": {"b": 1}}
path = "a.b"
value = [1]
field_value = FieldWalker(doc).go(path).get().value
assert list(field_value.iter_full()) == value
def test_fieldwalker_value_get_4():
# array value
doc = {"a": [1]}
path = "a"
value = [1, [1]]
field_value = FieldWalker(doc).go(path).get().value
assert list(field_value.iter_full()) == value
def test_fieldwalker_value_get_5():
doc = {"a": [0, 1]}
path = "a"
value = [0, 1, [0, 1]]
field_value = FieldWalker(doc).go(path).get().value
assert list(field_value.iter_full()) == value
def test_fieldwalker_value_get_6():
doc = {"a": [0, [1], 2]}
path = "a"
value = [0, [1], 2, [0, [1], 2]]
field_value = FieldWalker(doc).go(path).get().value
assert list(field_value.iter_full()) == value
def test_fieldwalker_value_get_7():
doc = {"a": {"b": [1]}}
path = "a.b"
value = [1, [1]]
field_value = FieldWalker(doc).go(path).get().value
assert list(field_value.iter_full()) == value
def test_fieldwalker_value_get_8():
doc = {"a": [{"b": [0, 1]}, {"b": [2, 3]}, {"b": [4, 5]}]}
path = "a.b"
value = [0, 1, [0, 1], 2, 3, [2, 3], 4, 5, [4, 5]]
field_value = FieldWalker(doc).go(path).get().value
assert list(field_value.iter_full()) == value
def test_fieldwalker_value_get_9():
# array opsitioned
doc = {"a": {"b": [1]}}
path = "a.b.0"
value = [1]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_10():
doc = {"a": [0, 1]}
path = "a.1"
value = [1]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_11():
doc = {"a": [{"1": 0}, 1]}
path = "a.0"
value = [{"1": 0}]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_12():
doc = {"a": [[0, 1], 1]}
path = "a.0.1"
value = [1]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_13():
doc = {"a": [{"1": 0}, 1]}
path = "a.0.1"
value = [0]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_14():
doc = {"a": [{"b": [0, 1]}, {"b": [2, 3]}, {"b": [4, 5]}]}
path = "a.b.1"
value = [1, 3, 5]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_15():
# array opsitioned and digit-str field
doc = {"a": [{"1": 0}, 1]}
path = "a.1"
value = [0, 1]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_16():
doc = {"a": [{"1": 0}, 1, {"1": 2}, 3, {"1": 4}]}
path = "a.1"
value = [0, 2, 4, 1] # notice that doc values are before array element
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_17():
doc = {"a": [{"b": [0, 1, {"1": 99}]},
{"b": [2, 3]}]}
path = "a.b.1"
value = [99, 1, 3] # doc value (99) are before array elements (1, 3)
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_18():
# array opsitioned and embedded documents
doc = {"a": [{"b": [0, {"c": 1}]},
{"b": [2, {"c": 3}]},
{"b": [4, {"c": 5}]}]}
path = "a.b.1.c"
value = [1, 3, 5]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_19():
doc = {"a": [{"b": [0, {"c": [1, "x"]}]},
{"b": [2, {"c": [3, "y"]}]},
{"b": [4, {"c": [5, "z"]}]}]}
path = "a.b.1.c"
value = [1, "x", [1, "x"], 3, "y", [3, "y"], 5, "z", [5, "z"]]
field_value = FieldWalker(doc).go(path).get().value
assert list(field_value.iter_full()) == value
def test_fieldwalker_value_get_20():
doc = {"a": [{"b": [{"c": [0, {"d": [1, "x"]}]},
{"c": [2, {"d": [3, "y"]}]},
{"c": [4, {"d": [5, "z"]}]}]},
{"b": [{"c": [10, {"d": [11, "i"]}]},
{"c": [12, {"d": [13, "j"]}]},
{"c": [14, {"d": [15, "k"]}]}]}
]}
path = "a.b.c.1.d"
value = [
1, "x", [1, "x"], 3, "y", [3, "y"], 5, "z", [5, "z"],
11, "i", [11, "i"], 13, "j", [13, "j"], 15, "k", [15, "k"]
]
field_value = FieldWalker(doc).go(path).get().value
assert list(field_value.iter_full()) == value
def test_fieldwalker_value_get_21():
doc = {"a": [{"b": [{"c": [0, {"d": [1, "x"]}]},
{"c": [2, {"d": [3, "y"]}]},
{"c": [4, {"d": [5, "z"]}]}]},
{"b": [{"c": [10, {"d": [11, "i"]}]},
{"c": [12, {"d": [13, "j"]}]},
{"c": [14, {"d": [15, "k"]}]}]}
]}
path = "a.b.c.1.d.1"
value = ["x", "y", "z", "i", "j", "k"]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_22():
# array opsitioned and embedded documents and digit-str field
doc = {"a": [{"b": [0, {"c": 1}, {"1": 99}]},
{"b": [2, {"c": 3}]}]}
path = "a.b.1.c"
value = [1, 3]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_23():
doc = {"a": [{"b": [{"c": [0, {"d": [1, "x"]}, {"1": 99}]},
{"c": [2, {"d": [3, "y"]}]},
{"c": [4, {"d": [5, "z"]}]}]},
{"b": [{"c": [10, {"d": [11, "i"]}]},
{"c": [12, {"d": [13, "j"]}]},
{"c": [14, {"d": [15, "k"]}]}]}
]}
path = "a.b.c.1.d.1"
value = ["x", "y", "z", "i", "j", "k"]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_24():
# with missing field
doc = {"a": [{"X": [{"c": [0, {"d": [1, "x"]}]},
{"c": [2, {"d": [3, "y"]}]},
{"c": [4, {"d": [5, "z"]}]}]},
{"b": [{"c": [10, {"d": [11, "i"]}]},
{"c": [12, {"d": [13, "j"]}]},
{"c": [14, {"d": [15, "k"]}]}]}
]}
path = "a.b.c.1.d.1"
value = ["i", "j", "k"]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_25():
doc = {"a": [{"b": [{"c": [0, {"d": [1, "x"]}]},
{"X": [2, {"d": [3, "y"]}]},
{"c": [4, {"d": [5, "z"]}]}]},
{"b": [{"c": [10, {"d": [11, "i"]}]},
{"c": [12, {"d": [13, "j"]}]},
{"c": [14, {"d": [15, "k"]}]}]}
]}
path = "a.b.c.1.d.1"
value = ["x", "z", "i", "j", "k"]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_26():
# array element shortage
doc = {"a": [{"b": [{"c": [0, {"d": [1]}]},
{"c": [2, {"d": [3, "y"]}]},
{"c": [4, {"d": [5, "z"]}]}]},
{"b": [{"c": [10, {"d": [11, "i"]}]},
{"c": [12, {"d": [13, "j"]}]},
{"c": [14, {"d": [15, "k"]}]}]}
]}
path = "a.b.c.1.d.1"
value = ["y", "z", "i", "j", "k"]
assert FieldWalker(doc).go(path).get().value == value
def test_fieldwalker_value_get_27():
doc = {"a": [{"b": [{"c": [{"0": [{"d": [0, 1]}]}, {"d": [1]}]},
{"c": [{"0": [{"d": [0, 2]}]}, {"d": [3, "y"]}]},
{"c": [{"d": [5, "z"]}, {"0": [{"d": [0, 3]}]}]}]},
{"b": [{"c": [{"0": [{"d": [0, 4]}]}, {"d": [11, "i"]}]},
{"c": [{"0": [{"d": [0, 5]}]}, {"d": [13, "j"]}]},
{"c": [{"0": [{"d": [0, 6]}]}, {"d": [15, "k"]}]}]}
]}
path = "a.b.c.0.d.1"
value = [1, 2, 3, "z", 4, 5, 6]
fw = FieldWalker(doc).go(path).get()
assert fw.value == value
def test_fieldwalker_value_get_28():
doc = {"a": [{"1": {"b": 5}}, 1]}
path = "a.1.b"
value = [5]
fw = FieldWalker(doc).go(path).get()
assert fw.value == value
def test_fieldwalker_value_get_29():
doc = {"a": [True, True, {"2": True, "3": True}]}
path = "a.3"
value = [True]
fw = FieldWalker(doc).go(path).get()
assert fw.value == value
def test_fieldwalker_clean_result_1():
doc = {"a": 5, "b": 8}
fieldwalker = FieldWalker(doc)
with fieldwalker:
fieldwalker.go("b").get()
fieldwalker.go("a").get()
assert fieldwalker.value == [5]
| 31.181185
| 75
| 0.424293
| 1,235
| 8,949
| 2.957085
| 0.065587
| 0.017525
| 0.147864
| 0.18264
| 0.847207
| 0.795455
| 0.76287
| 0.748631
| 0.72207
| 0.714129
| 0
| 0.061215
| 0.299028
| 8,949
| 286
| 76
| 31.29021
| 0.520963
| 0.035311
| 0
| 0.576744
| 0
| 0
| 0.04964
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 1
| 0.139535
| false
| 0
| 0.004651
| 0
| 0.144186
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
873d4dc552241d96e943e88194f5cd5684583309
| 2,064
|
py
|
Python
|
apps/vmware/migrations/0002_auto_20200602_1853.py
|
death-finger/get2unix
|
1ff6f729f076040d6493251471cc0ee9cdcdc661
|
[
"MIT"
] | null | null | null |
apps/vmware/migrations/0002_auto_20200602_1853.py
|
death-finger/get2unix
|
1ff6f729f076040d6493251471cc0ee9cdcdc661
|
[
"MIT"
] | null | null | null |
apps/vmware/migrations/0002_auto_20200602_1853.py
|
death-finger/get2unix
|
1ff6f729f076040d6493251471cc0ee9cdcdc661
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.6 on 2020-06-02 10:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vmware', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='deploylists',
name='cluster',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='deploylists',
name='cpu',
field=models.SmallIntegerField(null=True),
),
migrations.AlterField(
model_name='deploylists',
name='datacenter',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='deploylists',
name='datastore',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='deploylists',
name='gateway',
field=models.GenericIPAddressField(null=True),
),
migrations.AlterField(
model_name='deploylists',
name='ip',
field=models.GenericIPAddressField(null=True),
),
migrations.AlterField(
model_name='deploylists',
name='mask',
field=models.CharField(max_length=30, null=True),
),
migrations.AlterField(
model_name='deploylists',
name='memory',
field=models.SmallIntegerField(null=True),
),
migrations.AlterField(
model_name='deploylists',
name='profile',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='deploylists',
name='vlan',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='deploylists',
name='vm_name',
field=models.CharField(max_length=100, null=True),
),
]
| 29.913043
| 62
| 0.551357
| 181
| 2,064
| 6.176796
| 0.276243
| 0.19678
| 0.245975
| 0.285331
| 0.814848
| 0.788909
| 0.749553
| 0.749553
| 0.667263
| 0.667263
| 0
| 0.02689
| 0.333333
| 2,064
| 68
| 63
| 30.352941
| 0.78561
| 0.021802
| 0
| 0.693548
| 1
| 0
| 0.101636
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016129
| 0
| 0.064516
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
875d7431018c5024726d8593a8cda3c1ea20647b
| 963
|
py
|
Python
|
paa191t1/dijkstra/datastructs/heap/__init__.py
|
dmmoura/PAA-2021
|
435005f6494ece76f03807fb524e0d4a3e1d7222
|
[
"Apache-2.0"
] | null | null | null |
paa191t1/dijkstra/datastructs/heap/__init__.py
|
dmmoura/PAA-2021
|
435005f6494ece76f03807fb524e0d4a3e1d7222
|
[
"Apache-2.0"
] | null | null | null |
paa191t1/dijkstra/datastructs/heap/__init__.py
|
dmmoura/PAA-2021
|
435005f6494ece76f03807fb524e0d4a3e1d7222
|
[
"Apache-2.0"
] | null | null | null |
from paa191t1.dijkstra.datastructs.tree import DistanceNode
class MinHeapNode(DistanceNode):
"""Estrutura de comparação de um nó da heap de mínimo."""
def __gt__(self, other):
if other is not None:
return (self.distance > other.distance) or (self.distance == other.distance and self.vertex > other.vertex)
return False
def __ge__(self, other):
if other is not None:
return (self.distance >= other.distance) or (self.distance == other.distance and self.vertex > other.vertex)
return False
def __lt__(self, other):
if other is not None:
return (self.distance < other.distance) or (self.distance == other.distance and self.vertex < other.vertex)
return False
def __le__(self, other):
if other is not None:
return (self.distance <= other.distance) or (self.distance == other.distance and self.vertex < other.vertex)
return False
| 37.038462
| 120
| 0.654206
| 124
| 963
| 4.951613
| 0.274194
| 0.156352
| 0.221498
| 0.325733
| 0.783388
| 0.783388
| 0.783388
| 0.783388
| 0.783388
| 0.783388
| 0
| 0.005548
| 0.251298
| 963
| 25
| 121
| 38.52
| 0.846047
| 0.05296
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.055556
| 0
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5ecb2f13861f299734373602bd961857467cbc8a
| 14,695
|
py
|
Python
|
tests/test_utils.py
|
jplusplus/siris_scraper
|
7d4f2e6c5daad70689278ef08d60bb3be172787b
|
[
"MIT"
] | 1
|
2020-05-20T22:21:10.000Z
|
2020-05-20T22:21:10.000Z
|
tests/test_utils.py
|
jplusplus/siris_scraper
|
7d4f2e6c5daad70689278ef08d60bb3be172787b
|
[
"MIT"
] | 1
|
2021-09-20T11:54:24.000Z
|
2021-09-24T13:34:24.000Z
|
tests/test_utils.py
|
jplusplus/siris_scraper
|
7d4f2e6c5daad70689278ef08d60bb3be172787b
|
[
"MIT"
] | 1
|
2020-01-09T03:58:11.000Z
|
2020-01-09T03:58:11.000Z
|
# encoding: utf-8
"""Test utility functions."""
from unittest import TestCase
import os
from siris.utils import get_data_from_xml, iter_options, parse_value, parse_period
from requests.exceptions import HTTPError
DATA_DIR = "tests/data"
class TestUtils(TestCase):
def setUp(self):
pass
def get_data_from_xml(self):
file_path = os.path.join(DATA_DIR, "exp_kostnader_kommun_fklass_2016.xml")
with open(file_path) as f:
content = f.read()
data = [x for x in get_data_from_xml(content)]
assert len(data) == 1740
assert data[0]["niva"] == "skola"
def test_get_data_from_xml_with_uttag_dimension(self):
file_path = os.path.join(DATA_DIR, "exp_pers_amne_gr_skola_2014_sample.xml")
with open(file_path) as f:
content = f.read()
data = [x for x in get_data_from_xml(content)]
assert data[0]["uttag"] == "2015-08-17"
def test_get_data_from_xml_with_amne_dimension(self):
file_path = os.path.join(DATA_DIR, "exp_personal_alder_gr_kommun_2017_sample.xml")
with open(file_path) as f:
content = f.read()
data = [x for x in get_data_from_xml(content)]
assert "amne" in data[0]
assert data[0]["amne"] == u"Samtliga lärare"
def test_get_data_at_different_levels(self):
file_path = os.path.join(DATA_DIR, "exp_pers_amne_gy_kommun_amne_2018.xml")
with open(file_path) as f:
content = f.read()
data = [x for x in get_data_from_xml(content)]
assert data[0]["niva"] == "kommun"
def test_iter_options(self):
select_elem = """
<select name="psAr" onchange="reload(this.form);">\n
<option selected="" value="2016">2016/17\n
<option value="2015">2015/16\n
<option value="2014">2014/15\n
<option value="2013">2013/14\n
<option value="2012">2012/13\n
<option value="2011">2011/12\n
<option value="2010">2010/11\n
<option value="2009">2009/10\n
<option value="2008">2008/09\n
<option value="2007">2007/08\n
<option value="2006">2006/07\n
<option value="2005">2005/06\n
<option value="2004">2004/05\n
<option value="2003">2003/04\n
<option value="2002">2002/03\n
<option value="2001">2001/02\n
<option value="2000">2000/01\n
<option value="1999">1999/00\n
<option value="1998">1998/99\n
<option value="1997">1997/98\n
</option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></select>
"""
options = [x for x in iter_options(select_elem)]
assert len(options) == 20
select_elem = """
<select name="psOmgang" onchange="reload(this.form);" title="Uttag ur l\xe4rarlegitimationsregistret">\n<option value="2">2015-08-17\n<option selected="" value="1">2015-02-04\n</option></option></select>
"""
options = [x for x in iter_options(select_elem)]
assert len(options) == 2
def test_iter_options_complex(self):
select_elem = '<select name="pnExport" onchange="reload(this.form);">\n<option value="23">Antal elever per \xc3\xa5rskurs\n<option value="6">Antal elever per \xc3\xa5rskurs\n<option value="146">Antal elever per \xc3\xa5rskurs\n<option value="53">Beh\xc3\xb6righet till gymnasieskolan, fr.o.m. 2011\n<option value="155">Beh\xc3\xb6righet till gymnasieskolan, fr.o.m. 2011\n<option value="5">Beh\xc3\xb6righet till gymnasieskolan, fr.o.m. 2011\n<option value="30">Beh\xc3\xb6righet till gymnasieskolan, t.o.m. 2010\n<option value="8">Beh\xc3\xb6righet till gymnasieskolan, t.o.m. 2010\n<option value="202">Beslutade anm\xc3\xa4lningar\n<option value="204">Beslutade anm\xc3\xa4lningar\n<option value="71">Betyg per \xc3\xa4mne \xc3\xa5rskurs 6\n<option value="72">Betyg per \xc3\xa4mne \xc3\xa5rskurs 6\n<option value="173">Betyg per \xc3\xa4mne \xc3\xa5rskurs 6\n<option value="145">Betyg \xc3\xa5rskurs 6, andel som uppn\xc3\xa5tt kunskapskraven (A-E), med/utan nyinvandrade och ok\xc3\xa4nd bakgrund\n<option value="142">Betyg \xc3\xa5rskurs 6, andel som uppn\xc3\xa5tt kunskapskraven (A-E), med/utan nyinvandrade och ok\xc3\xa4nd bakgrund\n<option value="175">Betyg \xc3\xa5rskurs 6, andel som uppn\xc3\xa5tt kunskapskraven (A-E), med/utan nyinvandrade och ok\xc3\xa4nd bakgrund\n<option value="176">Betyg \xc3\xa5rskurs 6, andel som uppn\xc3\xa5tt kunskapskraven (A-E), per k\xc3\xb6n\n<option value="144">Betyg \xc3\xa5rskurs 6, andel som uppn\xc3\xa5tt kunskapskraven (A-E), per k\xc3\xb6n\n<option value="94">Betyg \xc3\xa5rskurs 6, andel som uppn\xc3\xa5tt kunskapskraven (A-E), per k\xc3\xb6n\n<option value="143">Betyg \xc3\xa5rskurs 6, andel som uppn\xc3\xa5tt kunskapskraven (A-E), samtliga elever\n<option value="87">Betyg \xc3\xa5rskurs 6, andel som uppn\xc3\xa5tt kunskapskraven (A-E), samtliga elever\n<option value="174">Betyg \xc3\xa5rskurs 6, andel som uppn\xc3\xa5tt kunskapskraven (A-E), samtliga elever\n<option value="203">Inkomna anm\xc3\xa4lningar\n<option value="201">Inkomna anm\xc3\xa4lningar\n<option value="32">Kostnader per kommun\n<option value="235">Kostnader per l\xc3\xa4n\n<option value="60">Pendling mellan hem- och skolkommun per typ av huvudman\n<option value="265">Personalstatistik\n<option value="81">Personalstatistik\n<option value="154">Personalstatistik\n<option value="16">Personalstatistik\n<option value="101">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne\n<option value="177">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne\n<option value="99">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne\n<option value="254">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne\n<option value="281">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne, j\xc3\xa4mf\xc3\xb6rt med f\xc3\xb6reg\xc3\xa5ende \xc3\xa5r, antal heltidstj\xc3\xa4nster\n<option value="278">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne, j\xc3\xa4mf\xc3\xb6rt med f\xc3\xb6reg\xc3\xa5ende \xc3\xa5r, antal heltidstj\xc3\xa4nster\n<option value="280">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne, j\xc3\xa4mf\xc3\xb6rt med f\xc3\xb6reg\xc3\xa5ende \xc3\xa5r, antal l\xc3\xa4rare\n<option value="276">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne, j\xc3\xa4mf\xc3\xb6rt med f\xc3\xb6reg\xc3\xa5ende \xc3\xa5r, antal l\xc3\xa4rare\n<option value="285">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne, \xc3\xa5k 1-3\n<option value="163">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne, \xc3\xa5k 1-3\n<option value="164">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne, \xc3\xa5k 4-6\n<option value="286">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne, \xc3\xa5k 4-6\n<option value="284">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne, \xc3\xa5k 7-9\n<option value="111">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet i minst ett \xc3\xa4mne, \xc3\xa5k 7-9\n<option value="102">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet per \xc3\xa4mne\n<option value="100">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet per \xc3\xa4mne\n<option value="267">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet per \xc3\xa4mne\n<option value="179">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet per \xc3\xa4mne\n<option value="161">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet per \xc3\xa4mne, \xc3\xa5k 1-3\n<option value="183">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet per \xc3\xa4mne, \xc3\xa5k 1-3\n<option value="182">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet per \xc3\xa4mne, \xc3\xa5k 4-6\n<option value="162">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet per \xc3\xa4mne, \xc3\xa5k 4-6\n<option value="112">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet per \xc3\xa4mne, \xc3\xa5k 7-9\n<option value="181">Personalstatistik med l\xc3\xa4rarlegitimation och beh\xc3\xb6righet per \xc3\xa4mne, \xc3\xa5k 7-9\n<option value="168">Personalstatistik, \xc3\xa5ldersf\xc3\xb6rdelning\n<option value="274">Personalstatistik, \xc3\xa5ldersf\xc3\xb6rdelning med leg.\n<option value="171">Relationen mellan nationella prov och slutbetyg \xc3\xa5rskurs 9, per huvudman, svenska/svenska som andraspr\xc3\xa5k, matematik och engelska\n<option value="11">Relationen mellan nationella prov och slutbetyg \xc3\xa5rskurs 9, per kommun, svenska/svenska som andraspr\xc3\xa5k, matematik och engelska\n<option value="2">Relationen mellan nationella prov och slutbetyg \xc3\xa5rskurs 9, per k\xc3\xb6n, svenska/svenska som andraspr\xc3\xa5k, matematik och engelska\n<option value="273">Relationen mellan nationella prov och slutbetyg \xc3\xa5rskurs 9, per l\xc3\xa4n, svenska/svenska som andraspr\xc3\xa5k, matematik och engelska\n<option value="1">Relationen mellan nationella prov och slutbetyg \xc3\xa5rskurs 9, svenska/svenska som andraspr\xc3\xa5k, matematik och engelska\n<option value="4">Relationen mellan nationella prov och slutbetyg \xc3\xa5rskurs 9, totalt och per k\xc3\xb6n, biologi, fysik och kemi\n<option value="123">Relationen mellan nationella prov och slutbetyg \xc3\xa5rskurs 9, totalt och per k\xc3\xb6n, geografi, historia, religionskunskap och samh\xc3\xa4llskunskap\n<option value="84">Relationen mellan nationella prov och terminsbetyg \xc3\xa5rskurs 6\n<option value="192">Relationen mellan nationella prov och terminsbetyg \xc3\xa5rskurs 6\n<option value="83">Relationen mellan nationella prov och terminsbetyg \xc3\xa5rskurs 6\n<option value="12">Resultat nationella prov \xc3\xa5rskurs 3\n<option value="52">Resultat nationella prov \xc3\xa5rskurs 3\n<option value="191">Resultat nationella prov \xc3\xa5rskurs 3\n<option value="190">Resultat nationella prov \xc3\xa5rskurs 6\n<option value="51">Resultat nationella prov \xc3\xa5rskurs 6\n<option value="49">Resultat nationella prov \xc3\xa5rskurs 6\n<option value="194">Resultat nationella prov \xc3\xa5rskurs 9, per delprov och k\xc3\xb6n - Engelska, Matematik och Svenska/svenska som andraspr\xc3\xa5k\n<option value="193">Resultat nationella prov \xc3\xa5rskurs 9, per delprov och k\xc3\xb6n - Engelska, Matematik och Svenska/svenska som andraspr\xc3\xa5k\n<option value="195">Resultat nationella prov \xc3\xa5rskurs 9, per delprov och k\xc3\xb6n - Engelska, Matematik och Svenska/svenska som andraspr\xc3\xa5k\n<option value="199">Resultat nationella prov \xc3\xa5rskurs 9, provbetyg per k\xc3\xb6n - Biologi, Fysik och Kemi\n<option value="196">Resultat nationella prov \xc3\xa5rskurs 9, provbetyg per k\xc3\xb6n - Engelska, Matematik och Svenska/svenska som andraspr\xc3\xa5k\n<option value="198">Resultat nationella prov \xc3\xa5rskurs 9, provbetyg per k\xc3\xb6n - Engelska, Matematik och Svenska/svenska som andraspr\xc3\xa5k\n<option value="197">Resultat nationella prov \xc3\xa5rskurs 9, provbetyg per k\xc3\xb6n - Engelska, Matematik och Svenska/svenska som andraspr\xc3\xa5k\n<option value="200">Resultat nationella prov \xc3\xa5rskurs 9, provbetyg per k\xc3\xb6n - Geografi, Historia, Religionskunskap, Samh\xc3\xa4llskunskap\n<option value="95">Salsa, skolenheters resultat av slutbetygen i \xc3\xa5rskurs 9 med h\xc3\xa4nsyn till elevsammans\xc3\xa4ttningen\n<option value="148">Slutbetyg per \xc3\xa4mne \xc3\xa5rskurs 9, fr.o.m. 2013\n<option value="92">Slutbetyg per \xc3\xa4mne \xc3\xa5rskurs 9, fr.o.m. 2013\n<option value="93">Slutbetyg per \xc3\xa4mne \xc3\xa5rskurs 9, fr.o.m. 2013\n<option value="138">Slutbetyg \xc3\xa5rskurs 9, samtliga elever\n<option value="150">Slutbetyg \xc3\xa5rskurs 9, samtliga elever\n<option value="139">Slutbetyg \xc3\xa5rskurs 9, samtliga elever\n<option value="26">Slutbetyg \xc3\xa5rskurs 9, uppdelat per f\xc3\xb6r\xc3\xa4ldrarnas h\xc3\xb6gsta utbildningsniv\xc3\xa5\n<option value="29">Slutbetyg \xc3\xa5rskurs 9, uppdelat per f\xc3\xb6r\xc3\xa4ldrarnas h\xc3\xb6gsta utbildningsniv\xc3\xa5\n<option value="140">Slutbetyg \xc3\xa5rskurs 9, uppdelat per k\xc3\xb6n\n<option value="141">Slutbetyg \xc3\xa5rskurs 9, uppdelat per k\xc3\xb6n\n<option value="109">Slutbetyg \xc3\xa5rskurs 9, uppdelat per nyinvandrade och exklusive nyinvandrade\n<option value="110">Slutbetyg \xc3\xa5rskurs 9, uppdelat per nyinvandrade och exklusive nyinvandrade\n<option value="25">Slutbetyg \xc3\xa5rskurs 9, uppdelat per svensk och utl\xc3\xa4ndsk bakgrund\n<option value="28">Slutbetyg \xc3\xa5rskurs 9, uppdelat per svensk och utl\xc3\xa4ndsk bakgrund\n</option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></option></select>'
options = [x for x in iter_options(select_elem)]
assert len(options) == 96
def test_parse_value(self):
assert parse_value(".") == (None, "missing or 0")
assert parse_value("..") == (None, "too few")
assert parse_value("5") == (5.0, None)
assert parse_value("1 234") == (1234.0, None)
def test_parse_period(self):
assert parse_period(u"Valt år: 2016 Endast kommunal huvudman") == (u"2016", u"år")
assert parse_period(u"Valt läsår: 2016/17 ") == (u"2016/17", u"läsår")
assert parse_period(u"Vald termin: HT12") == (u"HT12", "termin")
| 151.494845
| 10,853
| 0.737462
| 2,281
| 14,695
| 4.707146
| 0.148619
| 0.078886
| 0.129645
| 0.245879
| 0.812331
| 0.795753
| 0.78439
| 0.760361
| 0.749744
| 0.701965
| 0
| 0.086762
| 0.118408
| 14,695
| 96
| 10,854
| 153.072917
| 0.74203
| 0.002722
| 0
| 0.240506
| 0
| 0.037975
| 0.851048
| 0.329715
| 0
| 0
| 0
| 0
| 0.202532
| 1
| 0.113924
| false
| 0.012658
| 0.050633
| 0
| 0.177215
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0d60f25b30e5555c36ecb812f7ee466a0a60b6dc
| 39,887
|
py
|
Python
|
Python/windwardrestapi/Api/WindwardClient.py
|
windward-studios/Windward-REST-version-2-Clients
|
8fd467e6f4ece6fcc435609ffb23448d07af3131
|
[
"MIT"
] | null | null | null |
Python/windwardrestapi/Api/WindwardClient.py
|
windward-studios/Windward-REST-version-2-Clients
|
8fd467e6f4ece6fcc435609ffb23448d07af3131
|
[
"MIT"
] | 1
|
2020-10-12T20:32:05.000Z
|
2020-10-12T20:38:04.000Z
|
Python/windwardrestapi/Api/WindwardClient.py
|
windward-studios/Windward-REST-version-2-Clients
|
8fd467e6f4ece6fcc435609ffb23448d07af3131
|
[
"MIT"
] | null | null | null |
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x08\x00\x55\x0d\x0d\x0a\x04\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\xaa\x26\x00\x00\x00\x00\x00\x10\x0f\x10\x3c\xe5\x7a\xba\x56\x5e\xd7\x15\x7a\x99\x3d\x75\x00\x48\x00\x00\x00\x00\x00\x00\x00\x00\xc6\xae\x2f\xf3\xe7\x85\xae\x36\x5a\xf2\xf8\x52\x2a\x7b\x0c\xeb\x92\xff\x62\x7c\xcd\x78\xd9\xba\x58\xe0\x1f\xe7\xb7\x4f\x9f\x7d\xb2\xea\x19\x0a\x37\x2f\xa0\x8e\x3b\x39\x9b\x73\xcd\x3e\xa1\x08\xbc\xb4\xc2\xbc\xb5\x8c\x83\x4a\xa0\xa7\x43\x43\xd9\xb5\x65\x26\x72\xae\x79\xe7\xab\x4f\xda\x70\x2b\xd9\xda\xd4\x14\x56\x19\xd4\x9d\x8a\xa5\x78\x2a\xb0\xfe\x07\xe6\x2e\x46\x0b\xe7\xdb\xb4\x9f\x0f\x51\x4d\x11\x8d\x99\x4b\xd6\xbc\x73\xdc\xc7\x1c\x63\x90\x68\x26\x85\xe3\x45\x90\x91\x57\xd8\xc7\x85\xc0\x04\x9e\x72\x08\xca\x62\xe4\x59\x71\x76\xbe\xd7\x56\x93\x7b\x0b\x2b\xf8\x24\x01\xda\x55\x24\x26\x95\x55\xe3\x23\xc5\x59\x5d\x50\x85\x58\x4f\x05\x95\xbc\x87\x17\xb2\xe5\x65\x48\xc4\x39\x7c\x15\xd1\xbd\xff\x86\x4b\x57\xd2\xfe\x4e\x57\xe9\x18\xc7\x23\x45\x51\xe5\x15\x0f\x47\x44\xb1\x6e\xfb\x16\x96\x4b\xc8\xd1\xb0\xbf\x34\x84\xf9\xdf\xcd\x70\x7d\x46\x49\xe6\x35\xdd\xe5\xfa\x35\xa8\xf5\x88\x1e\xb0\x5c\xcb\x5f\xfd\xd0\xac\xbd\xbb\xac\xd1\x57\xe2\xd9\xda\xa2\x90\xdf\xb7\x74\xe9\x23\x08\x4b\x65\xc8\x03\x91\xc8\x5f\x8a\x57\xd8\x57\xd8\xeb\xde\x46\xc6\xc2\x2f\x31\x0d\x1c\x35\x2d\xd9\x5d\x31\x39\xb5\x10\xbc\xe7\x57\xa2\x52\xf4\x5a\xc2\x1b\x7f\xf2\xeb\x00\xcd\xb7\xc3\x1d\xea\xf2\x58\x38\x12\x4e\xb5\xb7\xb4\x63\x52\x45\x7b\xc8\x7a\xc7\x40\x15\xa0\x5b\x45\x2b\x8a\xd3\x4c\x3e\x48\x53\x99\x5f\x18\x4d\xc6\x15\x59\xf8\xb8\xd8\x54\x2f\xe6\x6f\x55\xaa\x93\x3c\x82\xb1\x39\xa1\x80\xec\x0d\x03\x14\xac\x20\x73\x83\x0e\x69\xab\x40\xcc\x74\x2f\xae\xa7\x35\xfd\x0d\x94\xf3\x8d\xdf\x8d\xca\x86\x06\x25\xb4\xbe\x0c\x4f\xa5\xb2\x0d\x29\x30\x1c\x6a\x75\xc1\x0e\xca\x79\x97\x15\xda\x49\x2a\x05\xd4\x71\xd0\xca\x9a\x0a\x59\x08\x49\xbe\xe6\x24\x90\x48\x2e\x6c\xa4\xef\x92\x7f\x86\xaa\x64\xaf\xc2\x70\x47\x5c\xf0\x57\x17\x5c\x18\x9b\xdd\x61\xed\xa0\xc1\xf2\x91\x23\x5e\x08\xfa\xb6\x2e\xa1\x51\x9f\x9d\xfe\x9e\xdb\x6c\xa2\xeb\xf0\x85\x5a\xd8\x8a\xf0\x65\xe2\x9f\x60\x02\xc7\x30\xaf\x48\x8c\xd7\x1d\x73\xd6\xb0\x53\x67\x9c\xb2\x12\x77\x73\xf5\x0d\x65\xcd\xa0\x80\x92\x0f\x84\x01\x2c\xfc\x68\x34\xf1\x6d\x01\x39\xd6\xd8\x76\x90\x38\xd8\x62\x5c\x4a\x89\x1f\x6a\xe4\x08\x54\x0c\x1c\x62\x73\x6a\xaa\xac\x47\x47\xcc\xd6\xb3\xe1\xe1\x38\x3d\xf4\x4b\xbc\x13\xbb\x77\x11\x5d\xd8\x88\x4d\xe2\x00\x21\x6d\xb1\x31\x64\x77\x42\x4a\x1c\x6a\xce\x53\xcf\xdf\xb8\x2d\xbb\x07\x2d\xea\x8a\xc3\x9b\x2a\xe5\xba\xb8\x10\x85\xd0\xa4\x61\xb3\xe0\x7c\x69\x6f\x74\x91\x2d\xe4\x3c\x0a\x23\x41\xaa\x83\xff\xe4\x9f\x74\x51\xf9\xc4\x3a\x8b\x21\x2d\x49\x20\xa4\x15\xd9\x5d\x31\x16\x5b\x8b\x63\x5b\x47\x97\x89\xd6\xdc\xb7\xf9\x2a\xa6\x3f\x3b\x6c\xa8\x69\x4f\x29\xab\x67\x73\xd3\xdc\x59\xa5\xa1\x3e\xe0\xdb\x5b\xb1\xaa\x66\x9e\xa1\x2a\xa5\xf7\xf3\x6f\x20\x6e\xfd\xe8\x67\xdd\x76\xa8\x87\xdd\x26\x58\x90\x4d\x41\x4d\x51\xc6\x0a\x26\x3a\x15\xf3\x80\x70\x67\xd8\xb2\xf8\xd4\x84\xa7\x84\x6a\xa6\xd5\xa0\xc2\x30\x1a\x8f\xeb\xda\x8b\x1c\x99\x48\x77\x5b\xf0\x31\x39\x84\x19\x7c\xce\x9d\x0f\x92\xa6\x73\x5f\xac\xf2\x2c\x19\x18\x49\x1b\x79\xee\x92\xe0\xb6\xa7\x52\x3b\xb2\x9c\x79\x48\x81\xdf\x8a\xf9\xfe\x83\xfc\x1b\xac\x37\xd3\xda\x8b\xf0\x20\x59\x77\xfd\xf6\x12\x75\xec\xa6\x0b\x27\x09\xd9\xa5\x96\xda\xe9\x8c\xf9\x4a\xd3\x5b\x77\xe4\x6f\x20\xdc\x8a\xbe\x3e\xb4\x62\x78\x43\x6f\x73\x41\xc7\xe0\xde\x5e\x1e\x5e\xa3\xa1\xc7\x3e\x6e\xfe\x96\x3f\xf7\x85\x41\x49\xed\x83\xc3\x84\xfb\x73\x6c\x20\x69\x84\xd9\x71\x52\xa4\x7d\xe5\x43\x34\xcb\xa1\xeb\xc5\xf6\xe9\x5b\xba\xb7\xba\x4c\x34\xf2\x12\xd7\xaa\x8b\x2e\x66\x55\x84\x51\x67\x64\xa9\xa0\x0f\x1b\x8d\x45\x62\x26\xc8\x1f\x68\x44\x78\x04\x92\xe7\x63\xed\x5a\x49\x8a\x62\xeb\x5d\xae\x7d\xdd\x2c\x12\xe6\x77\xbb\x18\xe4\x68\x2a\x1d\x8e\xdb\x15\xf2\xe9\x8a\xa3\x18\x27\x03\x6f\xe8\xbb\x07\x50\x67\xdf\x55\xcc\xf4\xb9\xb3\x2b\x69\x91\x93\x52\xa7\x44\x8c\xbf\x8a\x92\x1f\xad\x82\xdf\x12\x85\x68\x52\xa5\x3c\xb7\x0d\x70\xb7\x7e\x0a\x1a\x8f\xf4\xf5\x68\x54\x5a\xde\x46\x5c\xe1\x6a\xaa\xa0\xa9\x20\xc9\x4d\x71\x18\x5b\x0c\xe0\x19\x55\x60\xc3\xed\xbc\x7d\x0a\x09\xb8\xe9\x51\x54\xee\xc4\xf2\xdf\x6c\x6f\x52\x35\x31\x31\x5c\x6d\x9a\xf7\xba\x22\x1b\x25\xf8\x6f\x4c\x37\xb1\xd9\xc9\xe2\x94\x1a\x7b\xae\x96\xda\x72\xcc\x64\x8d\x32\xf7\x22\xfa\xed\x5a\x1d\xbb\x80\xba\x98\x81\x83\xd1\xdb\x0c\xc5\xba\x18\x54\xb1\xf6\xcd\xc7\x5b\x52\x4b\x7a\x96\xf5\x2d\xbf\x55\xb1\x47\x1c\xbf\xac\x5e\xb8\x9f\x3b\xdc\xb2\xcb\x22\x7b\xa8\x26\x43\x08\x2e\xfb\xab\xdf\x43\x59\xdf\x7e\x54\x23\x4c\x3c\x2e\x18\x43\x11\x6e\xa3\xd3\x1a\x71\x00\xb4\xb3\x5e\xef\x65\xab\xbf\xe7\x54\xdb\xe7\xbb\x71\xcd\x6c\x0a\x12\x97\xec\x20\x83\x5b\x25\x69\x36\xff\x4a\x4b\x5d\xdd\xa9\x80\x49\x33\x95\x66\x3a\x72\x56\xd8\x8b\x0e\x53\x3b\x4d\x0f\xf5\x38\xa0\x90\x04\x95\xe5\x05\xa3\x0e\x8e\x62\xed\x81\x44\x17\x69\xcb\x6c\xf1\xae\x38\xe4\xa0\xf2\xae\x42\xb6\xe5\x94\xea\xc5\xaa\x84\xfa\x68\x5b\x67\x19\x69\x00\xcc\x09\x5e\x62\x3f\x79\xc5\x77\x30\xc1\xf5\xb4\x04\x2a\x76\x22\x53\x0d\xa6\xda\xb0\x6e\x3f\x41\x47\xfe\xac\xfb\x24\xde\x5c\xa5\xe6\xcf\x63\x50\xd1\xea\x4c\xae\x95\xae\x4b\x47\xa5\xe8\xb5\xf8\x5e\x08\x36\xb7\x32\xee\x44\xac\x72\x8e\xb1\xe2\x4c\xa9\x96\xea\x6c\xa7\x4d\x44\x85\x6f\x25\x18\x3c\x33\xbe\x1b\x15\x62\x86\x6f\x7f\x54\x8f\xef\x9e\xde\x17\x92\xed\xfd\xd0\xd7\x86\x5d\x4d\x6e\x36\xb5\xdc\x94\x37\x04\x46\x48\x98\xa9\xb3\x5c\x2b\x4b\x5e\x21\xab\x37\x2e\xfd\xbe\xea\xc2\xfe\x37\x0e\x0c\x5f\x35\x54\xec\x0e\x3e\x69\x2d\x1c\xfb\x75\x94\x5e\x4f\x23\xf1\x85\x58\xe4\x7c\x08\x3f\x60\xa6\x10\x49\x6e\xdc\xcd\x01\x5a\x14\x6e\x1c\xf5\xb7\x2e\xe8\xe0\xa3\x67\x55\xe3\xb0\xf0\x83\x43\xf9\x1a\xaa\x0d\xfd\x20\x30\x92\x56\x33\x34\x6a\xdf\xb4\x28\xba\x39\xc8\x86\xcc\xa2\xfb\x40\x68\xf9\x30\x78\xf8\x5a\x13\x1b\x1a\x93\xa2\xe6\x20\xe7\x8f\x7a\x82\x33\x7b\x48\x86\x16\xb5\x0f\xdb\x04\xaf\x98\x41\xb6\x22\x1d\xc1\x56\x66\x4f\xca\xcf\x19\x70\x38\xea\xc8\x7b\x3f\x44\x0f\x3a\x95\x2d\xae\x27\x08\x5f\xa7\x6b\x34\x11\x15\xf9\xf2\x69\x35\x8c\xdc\x25\xc1\x07\x11\x1d\xa1\xc6\x8c\x3d\x67\xf2\xb7\x67\x32\x63\x95\xd9\xa5\x32\x13\xac\xdf\xac\xca\x1d\x38\x6f\x17\x4b\xee\xac\xf4\x0a\x3c\x89\x91\x65\x84\x1e\x51\xe1\xaa\x8d\x71\xb7\xf2\xff\xc2\xe0\x98\xd6\x5b\x02\xa3\xbb\x01\x1e\x30\x04\x2e\x27\x2b\xe3\x0c\x90\xdf\x23\x77\x05\x7e\x9a\x64\x25\x14\x72\x45\x58\xfb\x7e\xf1\x40\x7b\x03\xc3\x3a\xd3\x6a\x68\xfd\xb1\x7b\xe8\xfa\x6f\x52\xc9\x81\xbb\xe1\xac\x2e\xe4\xc2\xc8\x4c\x37\x07\x8e\x24\x9a\x2a\x71\xcb\xfd\x02\xf3\xcc\xa1\xf4\x1a\xe6\x95\x0b\xfd\x19\x1c\x76\xe1\xec\xdb\x84\x59\x1f\xd5\xd9\x3b\xcc\xc9\x15\xcc\xbc\xea\xac\xf2\x6a\xec\xe5\x0c\x20\x3b\xc9\xc4\x8f\x4d\x66\x9c\x91\x90\x6e\x5b\x81\x45\xcb\x18\xac\xeb\x13\xd2\x1a\x78\xfd\x73\x0d\x88\x49\x11\xf4\x56\xa8\xa6\xbe\x81\xb0\xd3\x9c\x1d\xe2\xf1\x68\x97\x6d\xd4\xeb\x83\x0c\xbc\x0f\x7b\xb7\x1b\x45\x1e\x12\x16\x9f\x06\xce\x5c\x83\x00\x8e\x1e\x37\x42\x6a\xab\x13\xa9\x71\xb7\x89\x59\x2e\x7a\xf6\x83\xc5\x8d\x7e\x65\x44\x96\x6b\x2d\x8d\x13\x7e\xd5\x60\x8d\x2e\x50\xb6\xb9\x81\xe3\x74\xdd\x6d\x2c\x7c\x0d\x22\x3d\x90\xd6\xe8\x75\x9d\xb1\xda\x50\xbe\x87\xb1\x58\xbc\x43\x27\x33\x22\x74\x35\xe8\xe5\xe5\x1c\x92\x18\xf5\x7a\xcb\x51\xfe\xa5\xea\x88\xa3\x83\x5f\x48\x73\x28\x3f\x29\x5f\xd0\x67\xae\xd9\x76\x7a\x39\xec\xe1\x4a\x10\x1b\xad\x4e\xc8\xe2\x98\xe0\xf0\xcc\x52\xc9\x02\x1a\xa2\x71\x70\x75\x37\x04\x02\xe3\x93\x98\x1c\xbe\x55\x91\x78\x88\x97\xa1\x72\xfa\x05\x01\x8c\xbb\x00\xbb\x4b\xba\xae\x5b\x49\x4f\xd7\x6a\x68\x02\x3b\xaa\xda\x1c\x19\x0f\x90\xe5\x57\x58\x31\xa3\x45\xc7\x16\x61\xe8\xe9\x98\x53\xc7\x53\xa4\x8e\xd1\x54\x67\xfb\x79\xac\x9e\x21\xc0\x7f\x87\x71\x91\x32\x5b\x40\x19\xf8\xee\xb5\x25\x54\xcf\x0c\x6c\xc7\x98\x6d\x4e\x50\x8c\x02\x5e\xf8\x52\x09\xeb\xba\x7f\x3e\x03\x94\x49\x75\x0f\xfc\x2b\x24\xfc\x7b\x05\xf7\xc2\x7b\xa8\xdc\x16\x6b\xac\xee\x0b\x51\x7e\x71\xff\x3a\xf2\xd8\xc1\xcd\xe2\x03\xf9\x1a\x28\xfd\x12\x30\x2b\x1f\x93\xef\x56\x41\xaf\xad\x33\x22\x79\x6a\x3f\x9d\x22\xcb\xdf\xad\xd2\x5f\xc7\x5f\x5c\x85\xc8\xa4\x36\xc5\xb6\x00\x93\xbf\x1f\x5b\x90\x1e\xc7\x28\xad\x53\x93\xfc\xf4\xc6\x8a\xaa\x95\xc6\x63\x93\xe4\x78\x35\xe0\xe9\x18\xcf\x67\x15\xd3\x37\xa0\xc4\x87\x7b\x92\x1b\xcc\x87\x7b\x8e\x0b\x42\xbc\x51\x55\x2f\x1b\x17\xec\xea\x22\xd1\x16\xbc\x7a\x48\x91\xb0\x79\x86\xff\x33\x75\x6d\x75\xd4\x72\xab\xcc\x05\x95\xe5\x91\xc2\x16\xc0\x6f\x59\x3c\xb9\xf4\x50\x75\x36\x08\x33\x68\x60\xb2\x6a\xd2\x28\x6d\x29\x79\x93\x4e\x63\x1d\xdc\xe2\x44\x61\xc5\x8f\x05\xa8\xca\xb8\x04\xfe\x8d\xa9\x89\x3f\x53\x2b\x7c\xe8\x2e\x34\x55\x70\xe6\x81\x82\x19\xf6\xe0\x6c\xb5\x81\xc2\x29\x84\x2e\x75\x1d\x01\xc7\x88\xd4\x0d\xb9\xde\xa6\xc8\x88\x93\xd2\x0d\x9b\xb5\x21\x9d\x28\x5a\x93\x3f\xe3\xf5\x55\x67\xae\x59\xb5\x57\x64\x75\x8d\xbc\x18\x04\xf7\x39\xbb\x80\x9f\x98\x72\xc5\xc1\x2d\x4d\xa9\x70\x69\x5e\x88\x30\xbc\x3d\x06\x2f\x92\x2f\x5d\xdd\x9a\x2a\x82\x8a\x3f\x33\x50\x64\x50\x61\xff\x8b\xb6\xde\xea\xe5\x25\x81\xac\x2e\xf8\xcc\x6e\xe1\xeb\xbb\xc6\x7d\xc0\xc4\x1e\x62\xaf\x65\xe0\x2d\x3d\xf7\xc4\x96\xce\x53\x2a\x8c\xb8\x73\x76\x5e\xef\xee\x3c\xf6\xbe\x10\x52\xd4\x02\xaf\xe2\x92\x37\x8a\x97\xc1\xd8\xff\xb8\x63\x69\x8a\xe9\x49\x3c\x77\x7c\xe7\xab\x30\x9e\xa3\x32\x82\xc8\x6a\xcc\x40\x07\x9f\xbd\xa7\x46\xed\xab\xe7\xd0\x09\x20\x3e\x11\xd5\x5d\x65\x15\xfb\xdd\x6c\x47\x6f\x3d\x20\x25\xd5\x46\x6c\x04\x74\xe5\xa7\xc0\x3b\x4e\x2d\x15\x6e\x20\x45\x9c\x9e\x91\x74\x97\x58\x55\x1d\x98\xdd\x90\x42\x8b\xd0\x6f\x23\x38\x20\x4a\x55\xb6\xeb\x18\x08\x0b\x5f\xa3\x14\x04\xe1\x96\xbc\x6a\xa9\x34\xc8\x36\x4b\x81\x9f\xd2\x78\x10\x1c\x68\xed\x01\x5d\xc2\x98\xa4\x9a\xc5\x43\xec\xb2\xcf\xb5\xf5\xf5\xa8\x6f\x71\xc3\x81\xec\xf3\x98\xa7\xe1\x44\x33\xa2\xc8\x3c\x74\x54\x5b\x95\x00\xe1\xd8\x8c\xa6\xbb\xae\x3a\xe8\x02\x2d\x21\xee\x75\x43\xea\x70\x40\x91\x61\xd4\x05\x90\x8a\xab\x79\xf8\x70\xd2\xdf\x6b\x7f\xd8\xbd\x48\x13\xb2\xc8\x22\x94\xce\x53\x33\x01\x9e\xcd\x20\xe5\xbf\xb8\x97\xde\xb7\x4c\x00\xfc\x95\x99\xa6\x17\xc4\x8a\xab\xd6\x80\xd6\x87\xac\x55\x7a\x9c\xeb\x5f\x18\x57\x00\xe4\xd5\x47\x6e\x75\xa9\xc1\x63\x61\x5b\xeb\xa1\xce\xf9\xd4\xb1\x3a\xc5\x33\xd8\x54\x01\xce\x09\x06\x6b\x03\x81\x89\x8d\x5d\x07\x83\x77\x93\xa7\x92\x6a\x26\x0c\x7f\x19\xbd\x30\x93\xf3\x5e\xf4\xb9\xc1\x87\x96\xad\xca\x82\x60\x3f\x24\x55\x0a\x51\x37\x1f\x6a\x8f\xbd\x94\x9d\xe9\x99\xb0\x8b\x95\x46\x95\x0e\xe0\xba\xe3\xe4\xe0\x86\x1b\xf5\x16\xd2\xd6\x6c\x9a\x6b\x5d\x06\x28\xb8\x0e\xab\xd4\x32\x6d\x96\x23\x6f\x50\x81\x93\x5d\xc9\xa4\xd9\x72\x6c\x8d\xdc\xaf\xb1\x19\x11\x50\x57\x98\xa7\x0d\x63\xa6\x72\xa7\xf7\xea\x96\xad\xdb\x3b\x6d\x08\x4b\xce\x0d\xc3\xe3\x21\x84\x05\xe4\x01\x4d\xfd\x47\xf4\xe8\x07\x3a\xaf\x59\x9f\x82\x82\x06\xf3\xad\xb0\xbd\xf6\x1d\xc6\xb0\x3d\xac\x43\xa3\x59\x18\x86\xbd\x58\xbe\x8c\xa7\x09\x03\xba\xd6\xc0\x5f\x19\xc0\xce\x2e\x7f\x00\x73\x76\xb6\xc3\x7a\x51\x80\xf2\xdd\xbb\xd8\xfb\x2c\x68\x38\x64\xd9\xfc\xfe\x92\xff\xf1\x58\x9e\x3f\x4f\xd8\x50\x98\x52\x23\x75\xfe\xd1\xc9\x43\xa9\x34\xcc\x4d\x69\xb6\x50\xda\x31\x44\x8d\xd2\x44\x5c\x2c\xcd\xb4\xce\xaa\xcd\x0f\xf2\x81\xb5\xae\x0e\x00\x8e\xaf\xb1\x61\xf3\xec\x58\xac\xa2\x3d\xab\xb6\x76\xfe\x51\x91\xb4\xa2\xde\x2f\x74\x08\x44\xb6\x61\x95\x8d\x8b\xa1\x4c\x6c\x50\x16\x15\x68\x5f\xe8\x4d\x96\xf9\x17\x69\x9e\xea\xa7\x6c\xab\xef\x70\x39\x90\xba\x13\x4d\xed\x17\x7d\x18\x92\x72\x21\x26\x63\x5a\x1a\xca\x5c\xee\xe7\xc9\x6d\x22\xc4\x4e\xd6\xd5\xf4\x13\x24\xb9\x68\xc5\xe4\x3d\x47\x8a\x7e\x74\x2e\xa0\x61\xea\xe2\x40\x4c\x8c\x71\xd3\x45\x7d\x23\x2d\xaa\xe7\x0c\xc3\xd0\x4d\xf1\x1e\x66\xf2\xe4\xbd\x99\x56\x8f\xb2\x36\xe8\xe7\x2e\x38\xdf\x6a\x30\x8e\x88\x03\x03\x81\xb1\xf4\xef\x3a\x35\xb6\xc5\xf2\x67\x35\x19\xe7\x11\x15\x6c\x6d\x3a\x6c\x52\xc1\x09\x7a\x86\xe0\xeb\x3b\x8b\x35\x92\x52\xe0\x33\x32\xfc\x20\x8e\x39\xac\x1f\x9c\xd1\x91\x9d\x51\x90\xa5\x39\xfc\xac\x5d\x95\xad\x32\xc8\x9f\x23\x63\xde\xcd\x24\x53\xb8\x0e\x91\xc4\x98\xe2\xa7\x7d\x37\x31\xa5\x31\x22\x3e\x22\x91\x2e\x61\x0e\x38\x04\xc8\x81\x29\x05\xa4\x08\x27\x0b\xff\x95\x82\x93\x40\x58\x33\xb5\xf0\xdb\xf0\x3c\xeb\xae\x4b\x83\xb3\xd7\x5c\xfc\xa4\x80\xa1\xe1\x8f\x5c\x72\x2c\xc5\x01\x51\x63\xd9\x77\x6a\xab\x01\x67\xad\x9a\x50\x6d\x53\xe6\x48\xa5\xa2\x0d\xb6\xd8\xb4\x70\xa3\x3d\x31\xe4\x94\x45\x0c\xbf\xc5\xd5\x1a\x8f\x1d\xc0\x7b\x89\x92\x82\x12\xc0\x9e\x78\x48\x46\x83\x3f\xb9\xe1\xea\x03\x4d\x86\x48\x19\x61\x8a\x06\xda\x23\xbc\xc6\xd3\xfd\x26\x9a\xc0\x75\xf0\x8a\x51\xbf\x84\xf5\xab\x5a\xb3\x62\xd5\xde\x72\x6f\x5a\x0c\xd6\xfb\x28\xbb\x9d\x8c\x2a\xb4\xaf\x37\xc2\xcb\xcd\x93\x9f\x77\x94\xb9\x77\x8d\xce\x6b\x67\xdd\xb6\x1f\xea\xb5\x0b\x87\xc9\xdc\x82\x6b\x58\xd1\xd3\x9f\x5b\x45\xb8\x31\x94\x6f\x5a\x4a\x96\x15\x45\x3c\x50\x61\x28\xe9\x98\x80\xef\xd7\x2f\x2d\x2a\xe3\xef\x7a\xd0\x3b\x3d\xcf\xa4\xaa\x1d\xa4\x2f\xaa\x9d\xec\x43\xb1\x64\xb0\x5b\x71\x84\x03\x8c\xdc\x71\x01\x15\xe0\x7d\x29\x08\x6f\x07\x9b\xa7\x51\x44\xca\x53\x7a\x55\xe4\x40\x0b\x35\xf2\x9e\xdf\x0b\xee\xa8\x7e\x5d\x53\xd2\xc2\x42\xe3\x6d\x97\x32\x63\xe8\x5c\x41\x2a\x61\x93\xce\x0b\x9d\xd7\x63\xf3\x97\x8e\x5f\x67\xcb\x07\x2d\x53\xcf\x57\xa8\x62\xb4\xec\x94\xe0\xc5\xd1\x0d\x94\x01\x79\x9b\x7e\xfa\xa8\xdc\xd7\x18\xbd\x3f\xcb\xbb\xab\x7e\x66\x03\x09\x02\xec\xc1\xf6\xb6\xaf\x6a\xef\x8d\x0a\x1c\x95\x95\xdb\x41\xf8\x18\x83\xa6\x29\xda\xa0\x27\x47\x9d\x33\x05\xd7\x5b\x8b\x2d\x86\x26\xa4\x7a\x3d\x08\x4b\xa4\x35\xbc\xa5\x0c\xcc\xd4\x70\x89\x89\xd2\xae\x07\xf4\xac\x06\xf8\x74\xed\x29\x81\x66\x8a\x93\xd7\xae\x80\x27\x3e\xe0\xab\xf2\x4a\xf4\x11\xe8\xa7\x75\x8d\xc1\x2d\xfd\xf0\xb6\x3f\xb3\x05\xd0\xba\x3b\x69\xdc\x73\x76\x48\xe0\x3a\x6b\x08\x65\x1e\xfa\x4c\x02\xc7\xf3\x95\xb8\xfa\x92\xd6\x06\x4d\x20\x8c\x15\x5e\x89\x6d\x24\x29\xbf\x61\xd8\x1e\x15\xc2\xcb\x72\x36\x5f\xcd\x58\xfe\xe0\xe7\x06\xa2\x6d\x2c\x7f\x92\xb3\x1a\xd9\xdd\x3e\x88\x13\x1f\x3a\x88\x23\xe0\xe9\xfb\x06\x9c\x9f\xfa\x31\x0f\x3e\x27\x7c\x5b\xa6\xf4\x2d\x7d\x69\xcb\x6a\x02\xc1\x06\x0c\x6b\x09\x9e\xa1\x7a\x38\xd6\x20\xb7\x85\x7e\x0f\x83\x28\x5e\x44\x16\x06\x5b\x81\x5b\x54\x72\x13\xb2\x7c\xab\x80\xdc\xd4\xd5\x5d\xa6\x76\xbc\xd2\x39\x9e\x3f\x91\x35\x39\xc8\x25\xfa\xc5\x3d\x3b\x8a\x88\xc3\xc0\x53\xa0\x9a\x7a\xd2\xe2\xfa\x52\x4b\x82\x6e\x91\x19\x54\x1a\xa8\xe8\x2a\x1c\xb8\xae\x93\x0b\x41\xa4\x35\x7f\x70\x46\x5f\xbc\x77\x34\xd5\xb7\x06\xcc\xb7\x49\xcd\xf8\xc0\x8e\x16\xe9\xdc\x71\x4d\xd5\x74\x58\xcb\xd8\x0a\x9a\xf3\x76\x93\xfc\x74\x5e\x45\x3e\x2c\xc4\x2e\xfe\x87\x43\x3c\x8f\x70\xa7\x5a\xc2\x96\xfc\x43\x64\xad\x4d\xcb\x9d\xc7\x9a\xa7\x63\x86\x9b\x33\xd4\xe7\x75\x3a\xfc\x09\x13\xc2\xa6\xf2\x96\x24\x28\x37\x50\x40\x20\x02\xad\x3e\xd2\x98\x0b\x51\xe7\x42\xd9\xa5\x44\x13\x4b\x84\xb3\x16\xdc\xae\x27\x69\xee\xb3\x44\x63\x52\xe6\xd0\xac\x2b\x78\x4b\x0e\xc6\xf5\x48\x9c\x0d\x2e\xc6\x7b\xda\x86\xc5\x9f\x07\x30\x7a\xc8\x0c\xe5\x9c\x2f\x26\x51\x1d\x9c\x8b\xc2\x01\x94\x8e\xdc\xe2\x82\xb7\x2d\xac\xdd\xf8\x0e\x16\x22\xaa\xd0\x59\x13\x4a\x58\x94\x44\x7d\x22\xaa\x76\xaf\xdd\x38\x38\x66\x2f\x1e\x8f\x09\x5f\x98\x06\xd2\x40\x83\x93\x0e\x5c\xe8\xa0\x51\xd4\xda\x89\xbf\x26\xc7\x9d\x37\x16\xb3\x0c\x84\x32\xb3\xef\xcd\x0b\xa6\xaf\xec\x3a\xd0\x50\xb9\xf2\x3b\xa3\x0e\xe1\x48\xd6\x82\xb2\xcb\xb8\x3a\x13\x90\x76\x27\x6e\x69\xa9\x02\xdc\x1f\xee\x8a\xe9\x53\x6a\xb0\x2c\x05\x42\x91\x1b\x55\x04\x26\xbd\xa8\x52\x4b\x5e\x9a\xd6\x27\x75\x28\x5f\xbd\xf8\x1b\xf7\x0c\x0e\xe2\x5b\x2b\xc5\x0b\xdd\x96\xbd\x46\xa3\xe8\xb5\x1a\x1e\xf5\x53\xb1\x52\xae\x52\xd0\xb9\x6f\x5e\x28\x4f\x0e\xd6\xcd\x62\x21\xa6\xf5\x4e\x43\x75\x5c\x15\x20\x3f\x8d\xde\x6c\x19\x52\x0f\x93\xa3\x9c\x27\x3d\xf0\x70\xb3\x8e\x9b\x53\xe7\x19\x29\x7b\x71\x7f\x83\x89\x13\xcc\x1e\xfc\xec\x8a\xd8\x15\x80\xc3\x2a\xdc\x9e\x41\xc3\x44\xf0\x8c\xbd\xd1\x1f\xbc\x13\x30\xf9\x7e\x70\x99\x3b\x93\x45\x1f\xaa\x12\x30\x06\x2f\xfb\xa8\x04\x5f\x6d\xa1\xbe\xfc\x2b\x5e\x07\x4a\xf4\x49\xa8\x28\x00\x64\x35\x56\x1f\x8a\x6c\x77\x60\x08\x5c\x58\x15\xb6\xf4\x68\xa1\x16\xc6\x77\x67\x7a\xe1\xd9\xf8\x74\x4a\x49\x9e\x8f\x87\x45\x24\x3c\x35\x0d\x3f\x21\xfb\xf9\x16\xbe\xce\xb0\x0b\xed\x3b\x2d\x74\x1c\x48\x49\x13\xac\x8b\xcf\x76\xf4\x82\xd7\xe4\xaa\x31\x25\x2c\x48\x0a\x04\x0d\xe7\xf1\x75\xdf\x32\x1a\x00\x1f\xdb\x47\xd3\x54\x33\x02\x62\xf4\xe1\x19\xe3\xd4\xdf\xc7\x5f\x58\x53\xaf\x0c\xc0\xae\x07\x6d\x4d\xe7\xfb\x77\x34\xfd\x42\x5f\x8f\x53\x42\x9d\x35\x8b\x10\x8d\x68\x3e\x44\x40\xd3\xb6\x9b\x7a\xfb\x28\x41\x17\x03\xd8\x11\x15\xe2\x56\x4c\x4a\x34\x2b\x57\x03\x53\x23\xf1\x26\x26\x0d\x67\xa8\xa6\xe8\xa1\x11\x6b\xce\xb6\x5b\x91\x77\xbc\x37\xf2\x66\xd6\x63\x34\x87\xda\xdc\x47\x9d\x58\x3c\xf5\xc6\x42\xc1\x24\x65\x33\x12\x58\xc5\x42\x44\x1f\x00\xbc\xd4\x9a\x87\x83\xa2\x75\xad\xc5\x62\x19\xf8\x65\x86\xf4\x24\xca\x03\xb8\x01\xa5\xbf\x08\xe8\xd2\x01\x3b\xf9\x9b\xfb\x9b\xda\x90\xa9\x22\xa4\xec\x7a\x1f\x02\x32\x5d\x48\xf9\x28\xa6\x4f\x6a\xf3\x45\x36\x16\xa9\x0d\x93\x0a\xa8\x14\xd5\x0d\xe2\x1d\xdc\x33\x03\x65\x62\x6a\x2d\x2e\x6a\x1d\x7e\x83\x04\x2f\xe9\x97\xb1\xc5\xe4\xb7\xfb\x97\x0e\x06\x2c\xc2\xa0\x42\x36\x30\x4d\x46\x9e\x27\x0a\x64\xa2\xc0\xdf\x1a\xd1\x6b\xcc\xa2\x40\xbf\x4d\xd3\xcf\x11\x37\x94\x71\xa8\x9f\x7b\x68\xb3\xa5\xd2\xe4\x48\x81\x21\x2a\x3f\x3e\xfe\xd5\xae\x78\x75\x9a\x24\x82\xda\xfd\x82\xbb\x28\x69\xda\x89\xb7\x17\xc5\xd2\x7a\xd9\xbb\x61\x80\x97\x0d\x91\xc6\x3a\x6e\x82\x6a\x5a\x08\x40\x64\x0f\x45\xa5\x1a\x9a\x00\xdf\x51\x46\x0e\x20\xad\x35\x9d\x68\xa2\x31\x9b\xa4\xc6\x5f\xa7\x62\xfe\x9f\x54\xb4\xfd\x21\xab\x20\xfa\x1c\xc9\x4c\xbc\x1a\xf8\xfd\xa0\xdf\xe0\x1d\x4b\x3b\x13\xd3\x5f\xdc\x0c\x76\xf9\xf3\xf3\x11\x58\xae\x6f\x15\xcb\x6e\x61\x28\x18\xdf\x15\x75\x44\x69\x44\x19\xca\xc3\x84\x5f\x79\x1a\x03\xe0\xaa\x21\xbb\x9c\x74\xe3\x88\x06\x5e\xcd\x90\xa7\x03\x6c\xe3\xc3\xc8\xa2\xf4\xb9\xdd\xb3\xf3\x61\xb4\x40\xb9\xb3\xa3\x1e\x96\x98\x63\x4d\xc9\x9e\x94\xbf\x68\x9c\x60\xfb\x45\x4d\xb7\xf5\x00\xc6\x82\xf5\x24\xa0\x83\x36\x34\xeb\x99\x80\x8b\xd6\xe6\x42\xce\x6c\xa1\x89\xc6\x80\xa9\xe0\xe5\xd2\xab\x18\x68\x97\x33\x35\x48\xf6\x1d\xe1\xe4\xe9\xd0\x1f\x92\x60\x73\x54\xca\xd9\xcf\x1b\x60\x2c\x91\x61\xf0\x4e\x5f\xe8\xcd\x9a\xfd\x51\xb7\x11\x2e\x37\xea\xdc\xcd\xd2\x92\x05\x9c\x28\x01\xd2\xc7\x2c\xa4\x29\x29\x40\xc1\xd7\xcc\x54\xd1\xf0\x01\x9e\xe5\x38\x29\x96\x1f\x78\x22\x05\xf3\x8a\xb6\x8a\x42\x68\x82\xf4\x78\x53\xe9\x7a\xef\xc9\x28\x07\x0c\xbb\xfa\x30\x8b\x4f\x9d\x8e\xc9\xcf\x38\xda\x8c\xf0\x07\xfd\xd0\x0e\xe1\x1d\x99\x18\x65\xc9\x86\x74\xbb\x34\xca\x73\xd8\xde\x05\x2f\xdd\x69\xf9\xcf\x77\xf5\x0a\x79\xdf\x88\x17\xfb\xf0\x1c\xc3\xba\xd0\xc0\xde\x8e\x99\x6d\xae\xa5\x91\xc6\xb9\x61\xed\x12\xb0\xdc\xf7\x66\xa2\xe7\x3b\x3f\x19\xa9\xc0\x9b\xb2\xaa\x7e\xb1\x24\x10\x31\xe4\x53\xdb\x62\x0d\x99\x53\x6b\x98\x89\x08\xd5\xb2\x55\xc5\x26\xad\xf2\xa8\x5c\x81\xe8\x1c\x87\x39\x66\xa8\x00\x2d\x27\x4c\xb9\x28\xc4\x82\xa5\x73\x4d\x25\x19\x35\x95\x89\x6f\x61\x0c\x02\xb6\xa1\xe6\x08\x0d\x94\x2f\x70\x60\xc9\x47\x8c\xa7\xb3\x71\xe1\x05\xf6\x3a\x3e\xdb\xab\x52\x04\xb6\xae\x8e\xf5\xa1\xa3\x39\xad\x0f\x76\xd6\x85\x34\xe8\x72\xb6\x71\xcd\xf6\xed\xd3\xc1\x7b\x00\xac\x01\x75\xfa\xff\x69\x4f\x1a\x75\xab\x09\x8b\x30\x0c\xbc\xe2\x89\x11\x06\xb8\x6a\x07\x9d\x68\x40\xe5\x93\x3f\xa4\x25\x87\xe9\x82\x79\xc2\xd4\xe0\x0c\xa4\xec\x3a\x3a\xe5\x2e\x89\x9f\xf9\x54\x58\x45\xc9\x1b\xec\xa7\x9a\xf4\x8c\xbf\x7f\x06\x8c\xca\x5d\xf5\x67\xfd\x1a\x72\xdf\x7a\x02\x87\x93\xdf\x87\x9c\x6b\x15\xf2\x1d\x60\xb9\x4f\x54\x87\xcb\xbc\x86\x7d\xeb\xac\x64\x5e\xcc\xba\xbe\xd0\x11\x0b\xd3\x18\x57\xae\x84\x61\xcd\x36\xb5\x3d\xbf\x90\xcf\xfa\x8f\x67\x67\x30\xeb\x28\xd3\x2a\x5f\x23\xd9\x18\xd4\x39\xce\x49\x36\x91\x8f\xe9\x49\x97\xfd\x11\xff\xc4\x0b\xca\xca\xed\x53\xa2\x84\x2c\x59\x2d\xe4\xf0\x95\x83\x98\xeb\x7b\xe8\x62\x94\xea\xd0\x7c\x11\x0d\xa6\xe9\x1a\x6e\x63\xf3\xcb\x3e\x95\xf8\x52\xf7\x4b\x62\xba\x09\xb4\xff\x65\xa3\xde\x3c\x5e\xee\x4e\x14\x5b\xc2\x65\xa4\x20\xbc\xa0\x7f\x3b\x0b\x78\xab\x7d\xaa\xfb\x67\x51\x77\xf4\x5b\x1b\xe5\xe3\x0d\x20\x5a\x47\x38\xab\xb8\x08\x7f\xee\x82\x13\xea\x33\x3d\x11\xf9\x3c\x43\xcd\x27\x82\xda\xee\x72\x62\xaa\x7b\x9d\x08\x69\xba\x58\xca\x8c\x18\xe6\x75\x89\xb3\xe1\x41\xf1\xa8\x42\x7d\xb3\x67\x55\xd5\x4e\x3f\x1c\x4c\x8b\x73\x65\x07\xad\xc6\x6f\x24\xe9\x64\x1f\xcf\xbc\x6a\x8f\x52\xa0\x2e\x56\x33\x2c\x5b\xbf\xf1\x5b\x7f\xc1\xc3\x67\x92\x7f\x1c\x7a\x42\x63\x43\xd8\x84\xda\x90\x2d\x1c\x0c\x0f\x90\xd8\xc0\x33\xc1\x98\x6b\x95\xfa\xdc\x15\x8b\x8b\xf9\xc2\xf3\x3b\x82\x4c\xce\x1c\xff\x5a\x92\xf0\x6c\x31\xd4\x24\xeb\x74\xb0\x49\xd9\x21\x7f\xdf\x73\x54\x99\x87\xca\x2d\xed\x78\x83\xab\x30\xa4\xfa\xa3\x2a\xc3\xe6\x12\xcd\xf2\x6c\x48\xc9\xb2\x91\x35\xbd\xa9\xe5\x4a\xdc\xd7\x3e\xbf\xe5\xff\x3e\xc5\xb4\x91\x3f\x2a\xce\x46\x8c\xf8\x6c\xc2\x7e\x9d\x5a\x11\x3a\x23\x2b\xed\x0b\x55\xcd\x65\x2f\x1f\x56\x58\x07\x6f\x94\x94\x26\x61\x88\x9e\x5b\x08\x76\x2a\x06\x18\xd9\x71\x3e\x18\x64\xd1\x1a\x84\x7c\x59\x1b\x65\x8a\x64\x51\x69\x63\x4f\x37\x61\x4e\x23\xad\xfc\x34\x4f\x11\x4f\x08\xc1\xa4\xdc\x61\xf7\x6e\xdb\x6f\x9d\x81\xba\x61\x5b\x12\xcd\xff\xe8\x3d\xe9\xd9\x68\x7f\xfe\xd6\x5e\xf5\xe7\x40\x53\xe2\x55\x17\x34\xae\x97\xe6\x29\xaf\xea\xb3\xe8\x74\xa7\x22\xb4\x83\xb3\x7a\x49\x6b\x85\x79\xae\xca\xfc\x12\xf4\xf0\xd5\x48\xe4\xc7\xe1\xa8\x65\xab\xf3\xf7\xb3\x55\x2a\x6f\xac\x5e\x0f\x08\xed\xb8\xa5\x82\xe6\x5b\x73\x08\xcd\xa3\xeb\xc1\x81\x07\x00\xc8\x1d\x9c\xaf\x9f\xc8\x90\x99\x7d\xe6\x97\xf9\xa2\xdc\x2c\x13\xa2\xf8\x1a\x99\x65\xa6\xd2\x5a\xdf\x65\xc9\x5a\x90\x9c\xa4\x7d\x7f\x6b\xaa\x89\xb8\x10\xb3\xd9\xfb\x50\x57\xd4\x36\x36\x0d\x69\x99\x81\xe7\xb3\x7a\x0a\xbf\x10\xb0\xf6\xa7\x92\x78\x3d\x45\x68\x84\x27\xda\xfa\x7f\x3f\x5f\x95\x36\xc1\x3f\x32\xdd\xca\x43\x3e\x7c\x83\xb5\x7d\xdb\x91\xf0\xa5\x17\xd9\x9b\xd1\x27\x9b\x56\xd0\x06\x4f\x75\x71\x43\x43\x8a\x06\x63\x05\xc2\x72\xd8\x60\x9c\x41\x62\x5d\x6b\xb7\x09\x91\xe3\xe1\xca\x00\xe7\xa9\x1b\x90\x9a\xe7\x85\x0d\x67\x0f\xff\xc0\x6c\x86\x99\x88\x86\x46\x5a\xc8\x3c\x58\x25\xb3\xa3\xe0\xdf\x9e\xab\x8c\x6e\x2b\x62\x12\x3f\xfb\x2c\xd9\x47\xd3\x7b\x2c\x8c\x2d\x74\xff\x46\xbd\x56\x16\x03\xb0\x6d\x4c\x7a\x43\x33\xd7\x14\x5a\xbe\x2a\x60\xb1\x9e\xb1\xa2\x81\xa7\xd0\xda\x20\x5a\x36\xa7\x4b\x8e\xfd\x58\xdb\xd6\x20\xf9\x7d\x4f\xf4\x91\xc3\xc5\x61\x61\x18\xf9\x64\x99\x36\x33\x85\xd7\xe4\xe8\xfd\x5c\x6c\xc4\xe7\x0e\xeb\xeb\xe9\x0b\xb6\x4f\xd8\xea\x03\x49\x17\xe8\x35\xe3\x2b\x61\x94\x62\xe9\xbc\x16\x2d\x81\x32\x5d\x7b\x0a\x0e\xa7\x7b\x48\x18\x69\x84\x4d\x03\x77\x41\x74\x62\x0d\x45\xde\xf2\x05\x6f\x3f\x54\xa3\xfb\xb8\xd9\x9f\xbd\x6d\x15\xb3\xec\x9a\x2b\x8e\x81\x3e\xd8\xf9\xf3\xc0\x8e\xa1\x68\x90\x0f\x1e\x0a\x9a\xc3\x28\xeb\xd1\x8e\xe9\xab\xba\xdf\xd1\x34\x2e\x4f\xb7\xfe\xad\x99\x04\x99\x2d\x33\x2e\x64\x2a\x52\x7b\xd6\x16\x3c\x7f\x00\xfa\xbe\xe4\x2b\x4c\x4d\xfa\x11\xff\x5e\x6d\x81\x66\xee\xba\x24\x42\x09\x66\x0f\xd8\xe7\xa3\xdf\x11\xd5\xe8\x28\x3a\xf9\x8e\xc1\x5c\xe0\x5a\xa8\x22\x51\xe5\x0e\x4e\xf5\x38\x4e\xbe\x91\x96\xa8\x89\x87\x88\xba\xeb\xec\x44\xc1\x20\xc7\x99\xcc\x8b\x33\x0a\xbd\x04\xc7\xcc\xa2\x8b\x10\x70\x1d\x8c\x00\xbf\x99\x99\xe4\x40\x00\xcf\x0f\x41\xd3\x08\x9b\x19\xe5\x99\xbd\x53\x99\x27\xba\x5c\x5a\x50\x7d\xac\xf7\xab\xcd\xa8\x23\x40\x0a\xf8\xfd\x00\x97\xd2\x12\xd8\x5d\x37\x9d\xfc\xdb\x64\x21\xb2\x7f\x59\xf0\xc5\xaa\x20\xe7\xf2\x50\xae\x0c\xa2\x6d\x10\x94\x07\x35\x3c\xa9\xe7\x87\xa5\x50\x45\x46\xcd\xa6\x21\xbf\x5b\xfe\xec\x34\xb3\xe3\x47\xad\x3f\x7f\x0f\xa8\xcc\x87\x39\x3a\x0a\xbc\x9c\x0c\x9f\xe2\x33\xeb\xaf\xf5\xf8\xf3\x41\xb3\xc8\x47\xbe\xab\x9b\x66\xc1\x6e\xfd\x6d\xb1\xfd\x4f\xc9\xa4\x5b\x09\x39\x14\xf3\x23\xcb\x68\x52\xbd\xb7\xcf\xff\x91\x78\xdd\x06\xbe\x60\xa7\x91\xdc\xc8\x50\x73\xfa\xfa\x42\x9a\x46\x5c\x6a\xd3\x0a\x20\xf6\xc3\x84\x18\x10\x1b\x77\xd1\xe0\xe7\x19\x74\x52\xf6\xa3\x09\xcc\xb7\x93\xd2\x27\x27\x69\x7a\xb0\x8c\xb7\xdc\x60\x94\xf0\x1c\xda\x23\x75\x40\x26\x7d\x3b\xb8\x98\xee\x8e\x71\x8d\xc3\x3b\x34\x2e\xce\x92\xed\xd9\xd9\xc4\x9b\x53\x19\x4e\x7b\xad\xa2\x79\xca\x74\xe3\x5b\xb6\x04\xe5\xbd\x30\xe5\xd8\x84\x27\xda\x3f\x65\x2c\x3b\x75\x76\xb1\xfb\x2a\xd2\xfc\x6e\x75\x5e\xe3\x31\x61\x48\x3c\x55\xeb\xb4\xcd\x20\xfc\xa1\xac\x39\xd6\xfd\x0b\x29\xc5\xb2\x24\x55\xc1\xd3\x87\x90\x72\x14\x9d\xa3\x63\x4e\xe1\xc7\x63\xe6\x9e\x5e\x58\x8b\xda\xd9\x3e\x4c\x9f\x99\x65\xd3\xde\x80\xfa\x27\x8c\xe8\x41\xba\xe1\xf0\xc2\x18\xc5\xfb\x88\xd4\x19\xe5\xad\x36\xa8\x6c\x2d\x42\x0a\x68\xe2\x8a\x0c\x23\xf1\xf2\xec\xbe\x29\xae\x56\x13\x46\xd5\x5a\x62\x57\xd1\x62\x84\xcc\x42\x04\x02\x3b\xff\x54\xb8\x5a\x9c\x1f\x3d\xf0\xa4\xc3\xcd\x14\x70\xe7\xd4\xc4\x7e\x8a\x7a\xd7\xfc\xfb\x35\xa7\xc5\x7a\xc0\x1d\x5a\x3f\xe7\x9e\x78\x2b\xad\x1f\xb7\x27\x16\xc3\xd3\x22\xec\x4e\x7c\x6d\x6b\xe3\xc2\xdf\x11\x86\x49\x1a\x0c\x05\xc0\xec\x54\xd0\x6b\x78\x3a\x8d\x9f\xa6\xc6\xc0\x10\xc9\x40\x59\xdf\x80\x08\x98\x04\x6c\x01\xcf\xcb\x5d\x0c\x9a\x29\xe9\x56\xf1\x40\xac\x9c\x5e\xa3\xc2\xf7\xc5\xf3\xa9\x47\xe1\x2e\xf4\x21\x02\x00\x66\x07\xf4\xd3\x10\x04\xa5\x52\x8b\xb2\xe8\x43\xed\x89\xe3\x70\xec\x06\x65\x5d\x66\xfe\x32\xad\xa3\x46\x1c\x61\x18\x97\xd9\x72\x36\x2a\x71\x1a\xa3\xde\x1b\x1a\xa6\x1d\x51\xbd\x8c\xfa\x41\xe0\x3c\x5c\x87\x41\x55\x6f\x42\xf6\xb5\x29\x05\x52\xa7\x81\x1b\x69\x13\x54\xb3\x72\x15\xeb\x20\xfd\xf7\x34\x50\xd8\xc3\x1b\x5a\x5e\xf1\x11\xac\xb6\x59\xb1\xbf\x77\xb2\xc0\x4c\x4c\x85\x09\xbd\xf0\x9e\xa9\x0e\xb9\xb3\x9b\x72\x44\xeb\x30\x0d\x97\x4f\x35\x3d\x4a\xac\xfc\x8d\x09\x79\xa9\x72\xff\x52\x38\xeb\x91\xb9\x57\xf9\x12\x97\x34\x52\x3a\x88\xa7\x50\x19\xb8\xe6\x56\x36\xee\x2f\xdd\xb4\x7b\xf3\x3a\x38\x51\x01\x3a\x5c\xa2\x38\x7a\x54\xc8\x03\xcc\x94\x68\x8a\x86\xa6\x79\xfb\xcb\x69\xc3\x92\xf5\x54\x77\xe2\x3e\xf6\x84\x69\xea\xe8\x65\x2b\x1a\x71\xa7\x6b\x6c\xe4\x96\x70\x03\xe8\x94\xc7\xc1\x1d\xda\xd9\x17\x7f\xdb\xa5\x0e\xe3\xa1\xa2\x1a\x84\x0a\x74\x09\xa7\xf2\x75\x54\x12\x1b\xf3\x17\x3d\x86\x9c\x81\xdf\x5a\x00\x46\xc2\x93\xb9\x2e\x29\xd1\x91\xd3\x42\x20\x52\x38\xf8\x38\xcc\xa2\x24\x01\xaa\x07\x86\x78\x43\x6e\x96\x4a\xb4\x53\x2c\xa8\xcb\x0a\x5b\x87\xd4\xdb\x77\x94\x0e\x96\x30\x92\x05\xeb\x79\x88\x73\xb5\x89\x00\xbf\xee\x97\x04\xcf\x66\x53\x51\x67\x4b\xd6\x51\xc6\x67\xb3\xe1\x2b\x6a\x00\x51\x9b\xdb\x2e\xd4\x2f\x2a\x88\x78\x77\x5a\x88\x7c\xfd\x9e\xff\xc0\x79\xfc\xf8\x45\x98\xfb\xd7\x47\x24\x06\x3b\x03\x80\x13\x8c\x2e\x1a\x60\x8c\xa6\xeb\xe2\x61\x41\x74\x5e\x3d\xf3\x68\x5b\x75\xfe\xa7\x3a\xcc\xa3\x2f\x7e\x5b\x9b\xc4\xaa\x3a\xd9\xe7\x0b\x18\xd4\xe5\x37\xb0\x0a\x76\x62\x46\xaa\x10\x25\xe1\xeb\x3d\xcb\x73\x30\x2f\x8b\xd1\x23\xbe\xf5\x53\x5e\x78\x7b\x68\xbb\x03\xe7\x39\x06\x3b\x44\x6b\xcf\x0e\xe3\x03\x43\x17\xf8\xda\x5a\xaa\xd5\x46\xae\xbb\xb8\x8f\xe9\x4a\x37\x84\x9a\xa0\xec\x5a\xa1\x70\x07\x61\xff\x79\x1c\xb2\x06\x4e\x02\x36\xe4\xc9\x21\xf4\x85\xb0\xe7\xa5\x34\x2a\xa0\x49\x20\x4c\x39\xba\x55\xcf\x9b\x02\x45\xd4\xd6\x33\x2e\xb1\xbe\xdd\xe3\x56\xf7\x9a\x4a\x42\x4a\x88\x7b\xd5\xc2\x3b\x1c\xfd\xfb\x1d\x56\x43\x64\xa6\xe4\x9a\xc7\x3e\x58\x35\xe0\xa3\x00\x36\xbf\x06\xf8\xaa\x6c\x1b\x4d\xd7\x35\x89\x02\x53\x5e\x4b\x8a\x20\x06\xb8\x97\x48\xd0\x0b\x7f\x39\x4d\x52\x4b\x5a\xe9\x06\x6e\x08\x6a\x17\xd7\x99\x4e\xb8\xb1\x01\xe4\x6e\x02\xb5\x04\x18\x64\xf9\xaa\x2e\xdb\xed\x0e\x90\x4f\x13\x02\x6d\x73\x2f\x29\x9e\x41\x87\x15\xf4\x66\x9f\x9c\x06\x0e\xb8\x94\x7d\xf7\x6b\x9f\x68\x28\xf0\x83\x91\x8b\x8f\x09\x36\xda\xe9\x06\x4f\x3d\x79\x30\xfc\xf8\xb0\x8c\xde\xce\xbf\x78\xb4\xa8\x84\xa4\x39\x5c\x41\x0a\xe1\x37\xe4\xad\xab\x9f\x03\x2a\x83\xb7\xf7\x71\xd1\xdb\x32\xf6\xbe\x7a\x63\xff\x6b\x7a\x23\x0b\x50\x16\x46\x50\x41\x03\x4a\x28\xe8\xdc\xd7\x26\x17\x77\x56\xee\x3d\xa8\x56\xc7\x62\xf0\x4e\x19\x56\x07\x18\xa7\xd3\xd4\xa6\xfd\x91\x11\x2d\x7e\x1e\xc1\x09\xbe\x03\x28\xcb\xcc\xfc\x16\x6c\xe3\x6a\x66\xc6\xca\x7b\x31\x19\x47\x3e\x42\x56\x92\xc3\x82\x3f\xdf\x97\x98\xb9\xa3\x29\x40\x9e\xc6\xa7\x84\xf4\x98\x28\xc2\x50\x98\x54\x05\x27\xc0\xd1\x56\xcf\x18\x30\xfd\xcf\x86\xdc\xfc\xae\xc3\x23\x2d\x07\x74\x2c\x77\xb7\x8e\x8b\x68\x6c\xa5\x93\xde\xc2\xe8\xbd\x2e\x3d\x19\x98\xe1\xe8\x78\xbd\x1f\xf8\x16\xc5\x46\x12\xfe\xc9\x11\x64\x77\x35\x3a\x28\x30\x8f\xb2\x26\xea\x7e\xdd\x1e\xbe\x4d\x34\xf2\xbd\x2e\xbc\xf4\xd3\x76\x71\xac\x42\xb5\xcd\xce\x6b\xf0\x66\x58\x15\x08\xf9\x2d\x19\x8f\xe4\xa2\x6a\x4f\x64\x60\x59\x37\x1c\xc6\xf6\xb2\x3e\x22\xf1\x16\xae\x2f\x57\x23\xbe\xcd\xa7\x51\xa3\xfc\x12\xf7\x78\x9c\xe2\x9e\x9d\x8b\x01\x8b\x9d\x28\xfa\x22\xa7\xa8\x2c\x18\x35\x9a\x18\x59\x05\x1d\x7b\x2b\xe8\x32\x86\x15\x4f\x59\xa8\x05\x4b\xd9\x7f\xd5\x1a\x42\x88\x89\x10\xdd\xba\x6b\xfb\x73\x82\x9c\xc5\x73\xdb\x40\xd3\x20\x71\xe5\xb0\x87\x61\x37\x41\xb3\xef\x3c\x4e\x05\xcc\x46\xad\x97\x1f\xdf\x0d\x3f\x00\x90\x68\x57\xa3\x74\xbe\x17\x2f\xbf\x6c\x45\x50\x68\xc3\x37\xbf\x8f\x32\xea\xb8\xb2\xdf\xc4\x82\xe2\xb4\x71\x84\xa5\xb2\x34\x47\x37\x73\x86\xf6\x10\x5a\x64\xb7\x2b\x16\x33\x2e\x80\x25\x13\x15\xf9\x0d\x6f\x20\xcb\x10\x8f\x9d\x54\xa8\x05\xb2\xfb\xc2\x7c\x51\xfb\xc5\x29\x73\xbc\xef\xe8\x56\xb0\x7e\x83\xc2\x0b\xc8\x48\x56\x4b\x23\x54\xd6\xfa\x9f\x44\xef\x0e\xd8\x88\x14\xcf\x76\x69\xef\x79\xa6\x63\x47\x39\x65\x49\x89\x0b\x3f\x6b\x8e\x34\x28\xe1\xc0\x14\x9d\x64\x13\x09\x3c\x1b\x53\xe4\x44\xfd\x2f\x02\x62\xf3\x3c\x3f\x08\xf5\xc1\x67\x90\x76\x60\x67\xef\x08\x63\x69\xfe\xa1\x70\x7c\x59\x3c\x37\x95\xfd\x36\x7e\xed\x1f\x9f\x3b\xaf\xd2\x78\x99\xcf\x8e\x91\xae\xd1\xe2\x20\xaa\xe9\xa4\x7a\xcd\xb4\x8a\x0b\x16\x66\xa0\x33\x0c\xee\xaf\xfe\x75\x3c\x67\x29\x07\x0e\x7e\xfc\x5a\x8d\x03\x4f\xfa\xa4\x69\xdd\x65\xc2\x18\x2c\xea\x4a\xc8\x70\xe8\xc2\x92\xc8\x47\x88\x72\xa8\xef\x1a\x32\x11\x14\xbe\xb2\x39\x17\x2f\xda\xb1\x00\x4f\x86\xbf\xb3\x51\x2c\xc9\x22\x0d\xf8\x37\x15\x3f\xbb\x7f\x49\x82\xd8\x3a\xb9\xde\xd0\xda\x62\x6e\x0f\x74\xed\x7e\x34\x7e\x68\x9d\x46\x08\x30\x56\xd5\x6b\x67\x2e\x69\x52\xc6\x81\x2e\x87\x07\xa0\x01\x3b\xc4\xee\x14\xba\xf5\x55\x65\x0b\xd3\xfa\x5a\x81\xc0\x4a\x0f\x57\x6c\xcf\x59\xa6\x88\x09\xa4\x62\xb2\xc7\x5a\xa5\x47\xb2\x52\x47\xaf\x88\x61\xe0\xb4\xd7\xbb\x75\x9b\x6c\xd8\x62\x17\x79\x53\xf8\x5d\x7d\x81\x3b\x86\x2d\x69\x88\x61\x0b\xdc\x16\xe2\x3d\xb6\x34\x45\x2e\x97\x4a\x74\x12\x2e\x43\x60\x74\x9d\xad\x02\x70\xae\xfd\x86\x1f\xe1\x68\x8f\x75\xa5\xe1\xb8\x6b\x3a\xbe\x52\xf2\x80\x78\x61\x9d\x64\x52\x62\x5c\xdf\x10\x12\xe6\x54\xf7\x46\x2e\x27\xf1\x6a\x01\x8f\x5f\xa7\xd4\x35\x96\xae\xe1\x5b\xa6\xa7\xf2\xba\x10\x7b\xf0\x1a\x71\x08\x7a\x7c\x75\x93\x88\x99\x7d\x35\xa9\x44\x12\xde\xb5\xb9\xde\xfb\x29\x6e\xce\x6c\x01\x6c\x37\xa2\x36\xed\x1b\x84\x75\xc2\x60\xd7\xd8\x4f\xca\xbe\xca\xf4\xa7\x31\xbc\xce\x73\x6b\x52\x3f\x93\xd9\x3b\x28\x85\x4c\xf4\x90\x2e\x6e\xa5\x18\x78\x7d\xe8\xf7\xd7\x5b\x6e\x5b\x6f\xfc\xbe\x0b\xc2\x48\xc5\x3b\xa4\x18\x0a\x2b\xb0\x6e\xae\x23\x26\x6f\x5e\x5e\xd1\x56\xc3\xb1\xaa\x3c\xa1\xc9\xca\x8c\x79\x0b\xdf\x83\x31\x06\xca\x8a\xf3\xce\x0f\x94\x9b\x06\x22\xa8\xda\xc1\x42\x75\x04\xf0\x9d\xba\x91\xe4\x7f\xbd\xa1\xf4\xe5\xcb\xdc\x55\xbb\xee\x1b\xf8\x8e\x04\x51\xd5\x62\xf0\x23\x05\x72\x74\x73\x7c\xdc\x77\xde\x0f\xa4\x92\x8c\xf8\xb1\x65\x5c\xaf\x48\x35\x59\x21\x53\x91\x17\x7d\xf1\x86\x80\xa0\xe3\x7a\x6c\x3c\x1a\x6f\x2e\x67\xbe\x88\x6e\x0a\x21\xbd\xa3\x18\xee\xa3\x32\xac\x8d\xf0\x61\x70\xcc\x7b\xd4\xac\x5d\x19\x0e\x4a\x26\xa8\x6d\x43\xd9\x09\xb1\x14\xce\xdc\x34\x2d\x4b\x2d\x41\x44\x52\xfe\xb1\x98\xe5\xe9\x3a\x95\x18\xb4\x6d\x5b\x8f\x27\xf0\x79\xce\x1c\xe1\xaa\xe7\x11\xa3\x32\xd5\x9d\x2e\x1f\x15\xd6\xb8\x26\x52\x4a\x92\xb2\x93\x60\xa6\x8e\xa8\xd9\x0c\x3c\x77\x71\xbb\x2d\x86\x6c\xbe\x5d\x1e\x15\xbf\x9c\x29\xa0\x53\xfe\xde\xa3\x2e\xb4\xbf\xc8\x94\xe4\xd6\x1d\xaa\x46\x97\xdd\x3c\x92\x13\x0b\xdb\x9b\x82\x21\xc4\xc9\x40\x79\xf0\xe2\x5d\x96\x33\x11\x6b\x0f\x76\x12\x1e\x68\x74\xd3\x21\xad\xfe\x33\xf5\x12\x17\x3b\xe6\xfd\xb1\xe6\xb7\xf2\xaf\x50\x4e\xd3\xb0\x88\x2c\xff\x22\x51\x54\x39\x76\xee\xc5\x21\x46\x36\x01\xfb\xd3\x5d\xc1\x99\x5d\x5d\x10\x40\xc6\x96\x44\x59\xdd\x5d\x5d\xf3\xde\x68\x93\x79\x92\xfb\x6f\x48\xa9\x93\x22\xc6\x6c\x4f\xe1\x90\x4b\x93\x64\xd9\x03\xde\x58\xba\x3c\x49\xab\xd7\xb7\x8e\xe1\x2f\xeb\x7c\x21\xeb\x57\xd5\xec\xd1\x98\x94\x9a\x2d\x78\xbc\x11\x2f\xf5\xae\x1c\xc0\xd6\x1d\xb3\x39\xd0\x2b\xb0\x0b\xee\xd0\x93\x8d\x9c\xc4\x85\xc7\x8f\xf9\xd4\x69\x4b\x1a\xc7\x2e\xd2\xfb\x06\x0c\x47\xb0\xef\xc6\xe4\x2b\x89\x3a\x06\x97\xf6\x9d\x61\x8c\xee\xd5\xf6\xe9\x32\xa3\x98\x8e\x0b\x58\x9c\x52\xe8\x34\x01\xc9\x63\x1a\x3f\xca\xb7\x3a\x5a\xd2\xed\xd4\x34\xb9\x82\x98\x13\x6f\x65\xe5\x62\x8f\x82\x55\xba\x12\x6a\x54\xbe\x33\xc0\x31\x22\x15\xfb\x4a\x30\xa7\xd6\x06\x54\x88\x74\xa7\xb3\xb4\xb4\xf8\x95\x7f\x9c\xba\x40\x01\x29\x3b\xae\x33\xf5\x97\xc0\xcc\x51\x4f\x5d\x81\xe6\xab\x67\x49\x0c\x3c\xd6\x0d\xb9\x0c\xa1\xef\x9f\x07\x68\x27\xa9\x5f\xc1\x63\x8c\x63\xb4\x42\x82\xa4\x53\xb6\xa3\x5d\x47\xc2\xda\x85\x43\x19\xcd\x28\x9c\x4d\x74\xc3\x81\x03\x54\xd3\xd2\x08\x13\xa6\xa8\x6b\x77\x44\x9f\xd4\x34\xe6\x7e\x10\xb9\x9c\x54\x0d\x27\x8a\x5d\x6f\xb9\x7a\x3c\x4d\x9c\x1f\xc1\xbf\x31\x32\x6e\xdf\x4c\xa2\xa1\xed\x92\xee\x46\x4d\x0f\x1a\x22\x20\xd8\xb9\xcc\x8a\x7b\x30\x95\xdd\xa6\x28\x69\xab\xf2\x39\xcc\xa3\x35\x8a\x6e\x66\xa2\xe9\xd2\xd3\xd9\x75\xe2\x33\x2b\xfa\xd4\xfc\x44\xd2\x1d\x3c\x69\x2e\x94\x26\x35\x58\x4e\x8d\x1d\x3b\xeb\x85\xd2\x17\x24\x0d\xd4\x33\x9e\x0f\xd6\xc2\xc3\x98\x55\x89\x7c\x92\xd3\xfa\x09\x25\xff\xa1\x24\xab\xa1\x7f\x00\x95\xfb\x86\x8a\xc2\xd5\x7e\x90\x10\x89\xe6\x2e\x92\x26\xaf\x0d\x5f\xc4\x91\x58\x12\x79\xc9\xa5\x28\x62\xda\x40\xe7\xc7\xd3\x2f\x19\x74\xc1\x4a\xaf\xf7\x7e\xea\xbf\xbd\x26\x97\x5e\xb2\x59\x9c\x92\x1d\xa0\x81\x71\x34\x11\x25\x8f\x48\x1b\xa0\x05\xce\x2c\x90\x2d\x77\x0d\xd8\x85\xd7\xf7\xc3\x19\x0e\x5d\xac\xca\x26\x7e\xe8\x94\xad\x43\x49\x96\xcd\x24\x32\x65\x2a\xc9\x94\xeb\x0b\x02\x75\xed\x92\x49\x1f\x46\x21\xf2\xd0\xec\x63\x91\x7a\x65\x7f\x1c\xcf\x62\x5d\xff\xea\x07\xff\xb3\x80\x8c\xf3\x54\xfc\x71\x2a\x94\xbb\x10\x81\x67\x1c\xb0\x82\x0d\x8b\x75\xa7\xf4\xa0\xba\x38\xdc\x1f\x4f\x2a\xf8\xbc\xe9\x7d\xc1\xd9\x55\xaf\x2a\xef\xd4\x64\x39\x66\x1f\x85\xc2\x02\xe0\x37\x7d\x2d\x9c\xc3\x4b\xe4\xec\xf0\xe6\x9f\x81\x7d\xa1\x69\x3b\x48\x20\xe4\xe9\xfc\x9c\xae\xa3\x4f\x11\xe0\x32\x7b\xce\x85\x1f\xb5\x55\x9c\x3e\x19\x92\x2a\x02\xc6\xf9\xff\x86\x09\x6a\x5f\x60\x83\x8a\xb8\x00\xbb\x93\xf5\x84\x42\x6e\x87\xbd\x0c\x48\xb2\x04\x80\xa7\x83\x63\x2d\x3f\x82\x4c\x08\xd2\xd6\x68\x2e\x39\x5a\x90\x0d\x71\x71\xde\x08\xb6\xdb\xd3\xdb\xbb\x72\x4d\x4c\x2c\x99\xc6\x30\xcb\xec\x15\x0d\x81\xce\x39\x3b\xdc\xbf\x70\xad\x3d\x14\x42\xdc\xe6\xc0\x6e\x1d\x82\xa4\x86\x1f\xa4\xd4\x74\x37\x3d\x23\xb7\xa0\xaf\x77\xf2\xae\x09\x8b\x88\x3e\xfc\x59\x6a\xbb\x48\x63\x07\xb6\x55\x94\x97\x5d\x75\x0c\x59\xa9\x33\x09\xf7\xca\xf0\x14\xf6\x9e\x41\xcc\x17\x04\xdf\x5b\xd4\xeb\xda\x48\x4a\xfd\x1b\x90\xdb\x2a\xa9\xd6\x8d\x0f\x4c\xf3\x31\xd1\x06\x4c\x31\xdc\x5c\x89\xd3\xc8\xe0\xcd\xb5\xe5\xaf\xa8\xba\xd0\xdc\x12\x35\x9d\xff\x9c\x5b\x39\x55\x08\x4f\x8b\x1e\xd5\x7a\x35\xc1\x01\xd7\x39\x35\x1e\x3d\xc1\x17\x29\xf2\xb0\x44\x1f\x21\x72\x95\x5d\xff\x32\x0a\x63\x1c\x56\xe0\x54\xf9\xb6\x9d\x80\xe8\xe2\x69\x65\x35\x76\x1d\x50\x56\x05\x48\xcc\x66\xb6\x7d\x36\xc8\xd6\x7e\xc8\xc1\x00\x18\x3a\x0c\x61\x49\x96\x60\x72\x4f\x5c\x7c\xda\x7d\xf9\x5c\x3d\xaa\x49\xb2\x2e\x45\x82\xf1\x55\x79\x8a\xfe\x7d\x1d\x25\x3a\x3e\xab\xd9\x3b\xae\x94\x97\xed\x68\xeb\xc2\x87\x52\xe0\x81\x9c\xc6\x46\x2e\x74\xf1\x59\xf3\xc0\xc4\x95\xaf\x7f\x43\xd7\xe7\x63\x21\xfe\x76\x67\xa4\x91\x3f\x6c\x74\x5a\x54\xe0\x1f\xaf\xb1\xcf\x79\x99\x10\xfb\x49\x98\xb4\x80\x4e\x36\x07\x91\x29\x33\xa3\x85\x38\x91\xa7\x92\xc3\x1f\x03\xe0\x3e\xa9\x16\x3c\xa7\xa5\x53\x59\x17\x9a\xbc\x8d\x2c\x41\x9a\x9d\xa5\x8a\x9c\x20\xdd\xf4\xb6\x58\x41\x80\x5a\x41\xfb\xa6\x3c\xb6\x68\xfe\x65\xd8\xf2\x24\xb8\x36\x7c\x7a\x06\x38\x7d\x6d\x18\xa2\xd0\x9e\x6c\x84\x21\xce\xe0\x69\xb8\x7e\xc9\xab\x4f\x74\x5b\x38\x5c\x51\x59\x31\x93\x51\xea\xac\x6b\x69\x2c\x7f\xdf\x64\x55\x60\xf5\x40\xcc\x91\x1f\xc6\xa8\xdb\x5e\x6f\xc7\xf4\x66\x17\xfb\x2b\x14\x81\x3d\x11\x16\x11\xd7\x01\x68\x6c\xd9\xc9\xbb\xa1\x1b\x70\x3d\x06\x23\x38\xf4\x46\xec\xf4\x57\x4d\x55\x21\x7f\x3d\xc8\xe1\xa0\xe9\x68\x2e\x8e\x28\x71\x0d\x4f\xaa\xc7\x9c\x87\x2d\xe7\x01\x3e\x24\x3c\x16\xf8\xd7\xea\xfe\x7f\x42\x49\x16\x3a\x17\xc7\x20\x62\x5d\x98\x2a\x5f\x2a\x33\x9d\x17\x55\x16\x2a\x9f\xa1\xfa\x4a\x85\x94\xdb\x19\xfb\xed\x10\xc3\x80\x03\xe8\xae\x11\x85\xa5\xc5\xdd\x15\x10\x81\x03\x5f\x7e\x35\x41\x28\xbc\x12\x73\x99\xff\xb6\xb9\xd4\x63\xd5\x30\x38\x98\x72\xa9\x15\x54\x7e\x09\x09\x09\x16\x31\x32\xc3\x05\x39\x4e\x63\xbe\xb3\x58\x45\xee\xc3\xc0\xd9\xa9\x7c\xf8\x90\x11\xc8\xd2\xf5\x8d\x3f\xd3\xda\x36\xb4\xa3\xaa\xfc\xca\x72\x44\x45\x08\x22\xce\xa3\x5b\x43\x16\x21\xfe\xa4\x31\xcc\x15\x17\x1d\xe8\xf2\x7e\x0f\x55\x08\x15\x60\xaf\x71\x78\x17\x65\xaa\x3d\x2a\x60\x70\xa3\xe2\xb9\xf5\xc3\xf3\xe1\x06\xf9\x7b\x99\x3e\x38\xe3\xeb\x71\x8f\x4c\x30\x67\x05\x96\xbe\x0b\x80\xf4\x04\x60\x9b\x7a\x6d\x87\x98\xbe\x59\x2a\xbe\x5a\xfb\x42\x4e\xfd\x53\xe3\x83\x00\x47\xb1\x0f\x24\xe1\x75\x6c\xa5\x65\xc8\xc1\x8e\xa8\x2c\xe2\x5f\xe5\x04\x3e\xef\xa2\xfa\x6f\x88\x23\x6b\x10\x90\x65\xd1\x2b\x71\xc6\x51\x32\xc8\x58\x39\x86\xac\xae\x6e\x31\x48\xd8\x8c\x63\x72\xb1\xc4\x09\x5e\xf8\x05\x9c\x6c\x55\xd2\xe3\xdc\xc4\xd5\x72\x2a\x95\xa5\xd5\x84\x30\xbe\xfb\xde\xcf\x27\xa5\x25\x59\x15\xf6\x1b\xdb\x3a\x78\xd2\x7c\x6b\x53\x7b\xa0\x16\xd2\x0b\x17\x6d\x1a\x29\x40\x6b\xb8\xb8\x6e\x6a\xad\x2d\x44\x6b\xe1\xca\x17\xa1\xd7\x74\x4c\xfb\x45\xe7\xef\x93\x3b\x22\x42\x2d\x06\x64\x20\xf0\x25\x03\x96\x0f\x65\x40\xa1\x23\x5e\xd6\x52\x76\x0f\x30\x24\x56\x2b\xf8\xd0\x32\x8c\xd2\x3e\xc0\xc4\x51\x17\xb1\xe7\xcb\x71\xd1\x6e\x26\x3b\xbc\x85\xfe\x82\x78\xb8\x2d\x6b\x3d\x92\x12\x34\xcb\xfd\x75\x17\x45\xd9\x3b\x7f\xe4\xb5\xdd\x93\x2e\x0a\xe8\x6b\x82\xf8\x38\x55\x84\x42\xae\x68\xca\x13\x5f\x9b\x0f\x35\x29\xfd\x6c\x9e\x2f\xce\x97\xf2\x20\x0f\xc2\x34\xe1\xea\x8b\x7e\x1b\xb1\x35\xe7\x78\x23\x10\x7c\x4d\x1c\x12\xb6\x6f\xe8\x86\x7b\x03\xb2\xbb\xfb\xde\x4d\xb3\x3b\x2a\x14\xb4\xb3\xe4\x51\x0e\x05\x94\x64\x99\x30\xd6\x21\x77\x94\xf4\xb5\x13\xaa\xc3\x6f\xfc\x7b\x80\x5f\x35\x33\xbf\x91\xba\xa5\x50\x71\xd5\xe3\x12\xab\xa6\x2b\x40\x62\x6c\x8b\x0f\x60\x66\xfc\x94\x2b\x59\xb9\xce\xf0\xe1\x05\x2a\xea\x37\xf6\x66\xc8\x6f\x91\xe7\x09\x58\x72\x78\x4a\xff\x08\x58\x7d\xa4\xa3\x0e\x47\xd6\x45\x5b\x3e\xe2\x4a\x19\x64\xfe\xaa\x9f\xf3\xb8\x28\xa6\x68\xa8\xe3\x3e\xdd\x37\x7c\x8e\xa1\x29\xe9\xce\x34\xf9\x2f\xe8\x34\x70\xa3\x8f\x48\x65\x97\x30\x67\x7e\x55\x1f\x6b\xca\xae\x41\xd8\x6a\x9d\xf7\xc7\x81\xd7\x0f\x3a\x45\x98\x54\xca\xa0\x1c\xd2\xe4\x06\xbe\x0f\xec\xc3\x79\xf9\x2a\xd1\x32\xec\x36\xc9\x01\xc4\xf4\x6d\xf5\x5d\xbe\xb7\x52\x0f\xf4\x60\x81\x58\x9b\xb0\xf3\xb8\x3e\x86\xc7\x1d\xe0\xf6\xa9\x6e\xa0\xcc\x77\xf8\xe6\xc1\x6d\xf8\xb8\xfd\x10\x99\x28\xc3\x9b\xb4\x22\x67\x9c\x3f\x6d\xcd\x2b\x9e\x91\x9f\x65\xa1\xe9\xc3\xc1\x2b\xbc\xcb\x1e\x26\xfd\x2b\x44\x2e\xe5\xc1\x85\xb8\x62\x7d\x48\xc6\xe0\x0e\xc5\xee\xe4\x7a\x26\x64\xde\x93\xc0\x14\xd7\xd5\xd4\x34\x01\x4d\x2d\x99\xe4\x71\x38\x7b\x5c\xfa\x1e\x08\x4f\xbe\x3d\x68\xcd\x62\xb9\x8f\x0b\x81\x0a\x60\x86\x97\x23\x50\x44\xc9\x18\x5b\xc6\xa0\x72\x32\xb5\x21\x4b\xea\x46\xfc\xb1\x1d\x01\xd0\x8e\x79\x03\x9d\x85\xc3\x2c\xa0\xa0\xc1\x9e\x9c\xa0\x75\x4f\x92\x7e\xfa\x70\x40\x3a\x14\xbb\x35\x1c\x14\x97\xe6\xde\xb1\xa1\xb8\xa6\x61\x68\x37\x45\x59\x15\xb6\xd0\xf2\xbd\x79\xd2\x23\x83\xee\xc6\x4f\x96\x1b\x54\xd7\xbd\x57\x8c\xaf\x3a\x48\xce\x26\xe2\x25\x3d\x4e\x84\x61\x85\x26\xeb\x8d\x35\x9c\xa3\xad\x6b\x67\xa8\x7e\x77\xcf\xe9\x16\xfc\x0a\xe4\xd2\x3a\x5b\x60\x51\xb8\xe1\xbd\x01\x42\xaf\xea\x65\x10\x10\xe4\xb4\x3a\xe9\x00\x8c\x22\x5e\xc1\xe0\xd7\xcc\xc0\x82\x0c\x5f\x4d\x70\xdd\x8c\xad\xe7\x02\xcf\x2b\x4f\xf2\x15\x9d\x62\x3e\x9b\x24\xfe\xba\xd0\xa8\x8d\x59\x6a\x86\xd2\x32\x62\x40\x59\x0c\x67\x78\xea\x61\x8f\xbe\xb4\xbb\x8d\x36\x16\xef\xf0\xea\x02\x1a\xd2\x92\x83\x38\x46\xb2\x5c\x28\x5d\xc0\xe7\x01\x04\xa3\x19\x41\x16\x19\xdd\xc9\x21\xd0\xff\x1b\x58\x37\xee\xd9\x0d\x42\x88\xbd\x54\x29\xb8\x68\x16\xf9\xad\x1e\xc7\x74\xc6\xb3\x7a\xe8\x7c\x1e\xfe\x92\x99\x3f\xed\xdb\x94\xa2\x65\x06\x7d\xe4\x8e\xa4\xa4\x1c\x54\xda\xb0\x2d\xa4\x3e\x19\x49\x1c\x04\x17\xb5\xf3\xb8\x36\x96\x76\x74\x66\xdb\xa8\xe7\x20\x99\x09\xbe\xa6\xeb\xea\x85\xe2\x1e\x32\xf1\xf6\x87\xc1\x5a\x64\xdd\x15\xd8\xf9\x67\xaa\x5d\x99\xf5\xe7\xb2\xe0\x92\xd3\x30\x80\x93\xda\x2d\x24\xc1\x69\xa4\x00\xa1\x62\x3b\x62\x9a\x80\xe3\x60\x7e\x67\xcf\x03\xe8\x99\x95\x43\x65\x4b\x86\xce\x91\x44\xdc\x81\x9e\xba\x72\x30\xec\xe7\x33\x49\xcb\x5f\x48\x47\x36\x59\xf7\x6d\x6f\x2b\x6d\x29\xc2\xcc\xbd\xdc\x5c\xae\xd2\xbe\xae\x8e\xe5\xee\x57\x49\x7b\x3d\x68\x20\x72\xc9\x34\xf4\xa4\xf8\xfb\x9d\xee\xfe\xdc\x53\x43\x48\x6c\xa0\x69\xf1\xea\x4e\xb9\x5b\xd2\x63\x65\xdd\x29\x3f\xe7\xea\x8f\x0a\xc7\xc9\x0b\x82\x83\xc4\xd4\xf0\x41\xeb\xc5\x1f\x33\x9a\xc8\x1d\x29\x6a\xda\xfc\x17\x7b\x1e\xe5\x5c\xaf\xce\x2c\x71\xc8\xf7\xd1\x5a\xf4\x15\x1e\x98\x0b\x14\x15\xf8\xf0\x62\x77\x74\x37\x61\x59\xdf\x4d\xdf\xb7\x6b\x6a\x28\xf2\x52\xf0\x2d\xdd\x0c\x75\x04\x37\x85\x11\x34\x47\x84\x4b\x7e\xe5\x7f\xca\x72\x0c\x51\x94\x43\x1c\x27\x9d\x96\xc6\x6d\xe7\xbd\x96\x90\x74\x0a\x72\x1b\x3f\xcf\x4f\x63\xd1\x56\x1b\x1e\x6c\x10\x4e\x90\xed\xad\x48\xe7\xd4\x3b\x83\x89\x82\xcc\x05\x29\xe4\x32\x67\x6b\xaf\x1e\xac\x69\xad\xb1\xd0\xa7\x25\xbe\x13\xe7', 2)
| 39,887
| 39,887
| 0.749994
| 9,967
| 39,887
| 3.000201
| 0.026186
| 0.004414
| 0.004816
| 0.004414
| 0.002006
| 0.001505
| 0.001505
| 0
| 0
| 0
| 0
| 0.312431
| 0.000075
| 39,887
| 1
| 39,887
| 39,887
| 0.437318
| 0
| 0
| 0
| 0
| 1
| 0.998997
| 0.998997
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0dac526211b637630dabbaf983c54b223bc8abd2
| 23,360
|
py
|
Python
|
sdk/python/pulumi_equinix_metal/port_vlan_attachment.py
|
pulumi/pulumi-equinix-metal
|
79213497bddc7ae806d3b27c3f349fdff935a19f
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-01-08T21:57:33.000Z
|
2021-01-08T21:57:33.000Z
|
sdk/python/pulumi_equinix_metal/port_vlan_attachment.py
|
pulumi/pulumi-equinix-metal
|
79213497bddc7ae806d3b27c3f349fdff935a19f
|
[
"ECL-2.0",
"Apache-2.0"
] | 33
|
2020-12-23T21:37:39.000Z
|
2022-03-25T19:23:17.000Z
|
sdk/python/pulumi_equinix_metal/port_vlan_attachment.py
|
pulumi/pulumi-equinix-metal
|
79213497bddc7ae806d3b27c3f349fdff935a19f
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-01-08T21:24:44.000Z
|
2021-01-08T21:24:44.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['PortVlanAttachmentArgs', 'PortVlanAttachment']
@pulumi.input_type
class PortVlanAttachmentArgs:
def __init__(__self__, *,
device_id: pulumi.Input[str],
port_name: pulumi.Input[str],
vlan_vnid: pulumi.Input[int],
force_bond: Optional[pulumi.Input[bool]] = None,
native: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a PortVlanAttachment resource.
:param pulumi.Input[str] device_id: ID of device to be assigned to the VLAN
:param pulumi.Input[str] port_name: Name of network port to be assigned to the VLAN
:param pulumi.Input[int] vlan_vnid: VXLAN Network Identifier, integer
:param pulumi.Input[bool] force_bond: Add port back to the bond when this resource is removed. Default is false.
:param pulumi.Input[bool] native: Mark this VLAN a native VLAN on the port. This can be used only if this assignment assigns second or further VLAN to the port. To ensure that this attachment is not first on a port, you can use `depends_on` pointing to another metal_port_vlan_attachment, just like in the layer2-individual example above.
"""
pulumi.set(__self__, "device_id", device_id)
pulumi.set(__self__, "port_name", port_name)
pulumi.set(__self__, "vlan_vnid", vlan_vnid)
if force_bond is not None:
pulumi.set(__self__, "force_bond", force_bond)
if native is not None:
pulumi.set(__self__, "native", native)
@property
@pulumi.getter(name="deviceId")
def device_id(self) -> pulumi.Input[str]:
"""
ID of device to be assigned to the VLAN
"""
return pulumi.get(self, "device_id")
@device_id.setter
def device_id(self, value: pulumi.Input[str]):
pulumi.set(self, "device_id", value)
@property
@pulumi.getter(name="portName")
def port_name(self) -> pulumi.Input[str]:
"""
Name of network port to be assigned to the VLAN
"""
return pulumi.get(self, "port_name")
@port_name.setter
def port_name(self, value: pulumi.Input[str]):
pulumi.set(self, "port_name", value)
@property
@pulumi.getter(name="vlanVnid")
def vlan_vnid(self) -> pulumi.Input[int]:
"""
VXLAN Network Identifier, integer
"""
return pulumi.get(self, "vlan_vnid")
@vlan_vnid.setter
def vlan_vnid(self, value: pulumi.Input[int]):
pulumi.set(self, "vlan_vnid", value)
@property
@pulumi.getter(name="forceBond")
def force_bond(self) -> Optional[pulumi.Input[bool]]:
"""
Add port back to the bond when this resource is removed. Default is false.
"""
return pulumi.get(self, "force_bond")
@force_bond.setter
def force_bond(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_bond", value)
@property
@pulumi.getter
def native(self) -> Optional[pulumi.Input[bool]]:
"""
Mark this VLAN a native VLAN on the port. This can be used only if this assignment assigns second or further VLAN to the port. To ensure that this attachment is not first on a port, you can use `depends_on` pointing to another metal_port_vlan_attachment, just like in the layer2-individual example above.
"""
return pulumi.get(self, "native")
@native.setter
def native(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "native", value)
@pulumi.input_type
class _PortVlanAttachmentState:
def __init__(__self__, *,
device_id: Optional[pulumi.Input[str]] = None,
force_bond: Optional[pulumi.Input[bool]] = None,
native: Optional[pulumi.Input[bool]] = None,
port_id: Optional[pulumi.Input[str]] = None,
port_name: Optional[pulumi.Input[str]] = None,
vlan_id: Optional[pulumi.Input[str]] = None,
vlan_vnid: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering PortVlanAttachment resources.
:param pulumi.Input[str] device_id: ID of device to be assigned to the VLAN
:param pulumi.Input[bool] force_bond: Add port back to the bond when this resource is removed. Default is false.
:param pulumi.Input[bool] native: Mark this VLAN a native VLAN on the port. This can be used only if this assignment assigns second or further VLAN to the port. To ensure that this attachment is not first on a port, you can use `depends_on` pointing to another metal_port_vlan_attachment, just like in the layer2-individual example above.
:param pulumi.Input[str] port_id: UUID of device port
:param pulumi.Input[str] port_name: Name of network port to be assigned to the VLAN
:param pulumi.Input[str] vlan_id: UUID of VLAN API resource
:param pulumi.Input[int] vlan_vnid: VXLAN Network Identifier, integer
"""
if device_id is not None:
pulumi.set(__self__, "device_id", device_id)
if force_bond is not None:
pulumi.set(__self__, "force_bond", force_bond)
if native is not None:
pulumi.set(__self__, "native", native)
if port_id is not None:
pulumi.set(__self__, "port_id", port_id)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if vlan_id is not None:
pulumi.set(__self__, "vlan_id", vlan_id)
if vlan_vnid is not None:
pulumi.set(__self__, "vlan_vnid", vlan_vnid)
@property
@pulumi.getter(name="deviceId")
def device_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of device to be assigned to the VLAN
"""
return pulumi.get(self, "device_id")
@device_id.setter
def device_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "device_id", value)
@property
@pulumi.getter(name="forceBond")
def force_bond(self) -> Optional[pulumi.Input[bool]]:
"""
Add port back to the bond when this resource is removed. Default is false.
"""
return pulumi.get(self, "force_bond")
@force_bond.setter
def force_bond(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_bond", value)
@property
@pulumi.getter
def native(self) -> Optional[pulumi.Input[bool]]:
"""
Mark this VLAN a native VLAN on the port. This can be used only if this assignment assigns second or further VLAN to the port. To ensure that this attachment is not first on a port, you can use `depends_on` pointing to another metal_port_vlan_attachment, just like in the layer2-individual example above.
"""
return pulumi.get(self, "native")
@native.setter
def native(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "native", value)
@property
@pulumi.getter(name="portId")
def port_id(self) -> Optional[pulumi.Input[str]]:
"""
UUID of device port
"""
return pulumi.get(self, "port_id")
@port_id.setter
def port_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "port_id", value)
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of network port to be assigned to the VLAN
"""
return pulumi.get(self, "port_name")
@port_name.setter
def port_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "port_name", value)
@property
@pulumi.getter(name="vlanId")
def vlan_id(self) -> Optional[pulumi.Input[str]]:
"""
UUID of VLAN API resource
"""
return pulumi.get(self, "vlan_id")
@vlan_id.setter
def vlan_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vlan_id", value)
@property
@pulumi.getter(name="vlanVnid")
def vlan_vnid(self) -> Optional[pulumi.Input[int]]:
"""
VXLAN Network Identifier, integer
"""
return pulumi.get(self, "vlan_vnid")
@vlan_vnid.setter
def vlan_vnid(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "vlan_vnid", value)
class PortVlanAttachment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
device_id: Optional[pulumi.Input[str]] = None,
force_bond: Optional[pulumi.Input[bool]] = None,
native: Optional[pulumi.Input[bool]] = None,
port_name: Optional[pulumi.Input[str]] = None,
vlan_vnid: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Provides a resource to attach device ports to VLANs.
Device and VLAN must be in the same facility.
If you need this resource to add the port back to bond on removal, set `force_bond = true`.
To learn more about Layer 2 networking in Equinix Metal, refer to
* <https://metal.equinix.com/developers/docs/networking/layer2/>
* <https://metal.equinix.com/developers/docs/networking/layer2-configs/>
## Example Usage
### Hybrid network type
```python
import pulumi
import pulumi_equinix_metal as equinix_metal
test_vlan = equinix_metal.Vlan("testVlan",
description="VLAN in New Jersey",
facility="ny5",
project_id=local["project_id"])
test_device = equinix_metal.Device("testDevice",
hostname="test",
plan="c3.small.x86",
facilities=["ny5"],
operating_system="ubuntu_20_04",
billing_cycle="hourly",
project_id=local["project_id"])
test_device_network_type = equinix_metal.DeviceNetworkType("testDeviceNetworkType",
device_id=test_device.id,
type="hybrid")
test_port_vlan_attachment = equinix_metal.PortVlanAttachment("testPortVlanAttachment",
device_id=test_device_network_type.id,
port_name="eth1",
vlan_vnid=test_vlan.vxlan)
```
### Layer 2 network
```python
import pulumi
import pulumi_equinix_metal as equinix_metal
test_device = equinix_metal.Device("testDevice",
hostname="test",
plan="c3.small.x86",
facilities=["ny5"],
operating_system="ubuntu_20_04",
billing_cycle="hourly",
project_id=local["project_id"])
test_device_network_type = equinix_metal.DeviceNetworkType("testDeviceNetworkType",
device_id=test_device.id,
type="layer2-individual")
test1_vlan = equinix_metal.Vlan("test1Vlan",
description="VLAN in New Jersey",
facility="ny5",
project_id=local["project_id"])
test2_vlan = equinix_metal.Vlan("test2Vlan",
description="VLAN in New Jersey",
facility="ny5",
project_id=local["project_id"])
test1_port_vlan_attachment = equinix_metal.PortVlanAttachment("test1PortVlanAttachment",
device_id=test_device_network_type.id,
vlan_vnid=test1_vlan.vxlan,
port_name="eth1")
test2_port_vlan_attachment = equinix_metal.PortVlanAttachment("test2PortVlanAttachment",
device_id=test_device_network_type.id,
vlan_vnid=test2_vlan.vxlan,
port_name="eth1",
native=True,
opts=pulumi.ResourceOptions(depends_on=["metal_port_vlan_attachment.test1"]))
```
## Attribute Referece
* `id` - UUID of device port used in the assignment
* `vlan_id` - UUID of VLAN API resource
* `port_id` - UUID of device port
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] device_id: ID of device to be assigned to the VLAN
:param pulumi.Input[bool] force_bond: Add port back to the bond when this resource is removed. Default is false.
:param pulumi.Input[bool] native: Mark this VLAN a native VLAN on the port. This can be used only if this assignment assigns second or further VLAN to the port. To ensure that this attachment is not first on a port, you can use `depends_on` pointing to another metal_port_vlan_attachment, just like in the layer2-individual example above.
:param pulumi.Input[str] port_name: Name of network port to be assigned to the VLAN
:param pulumi.Input[int] vlan_vnid: VXLAN Network Identifier, integer
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PortVlanAttachmentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a resource to attach device ports to VLANs.
Device and VLAN must be in the same facility.
If you need this resource to add the port back to bond on removal, set `force_bond = true`.
To learn more about Layer 2 networking in Equinix Metal, refer to
* <https://metal.equinix.com/developers/docs/networking/layer2/>
* <https://metal.equinix.com/developers/docs/networking/layer2-configs/>
## Example Usage
### Hybrid network type
```python
import pulumi
import pulumi_equinix_metal as equinix_metal
test_vlan = equinix_metal.Vlan("testVlan",
description="VLAN in New Jersey",
facility="ny5",
project_id=local["project_id"])
test_device = equinix_metal.Device("testDevice",
hostname="test",
plan="c3.small.x86",
facilities=["ny5"],
operating_system="ubuntu_20_04",
billing_cycle="hourly",
project_id=local["project_id"])
test_device_network_type = equinix_metal.DeviceNetworkType("testDeviceNetworkType",
device_id=test_device.id,
type="hybrid")
test_port_vlan_attachment = equinix_metal.PortVlanAttachment("testPortVlanAttachment",
device_id=test_device_network_type.id,
port_name="eth1",
vlan_vnid=test_vlan.vxlan)
```
### Layer 2 network
```python
import pulumi
import pulumi_equinix_metal as equinix_metal
test_device = equinix_metal.Device("testDevice",
hostname="test",
plan="c3.small.x86",
facilities=["ny5"],
operating_system="ubuntu_20_04",
billing_cycle="hourly",
project_id=local["project_id"])
test_device_network_type = equinix_metal.DeviceNetworkType("testDeviceNetworkType",
device_id=test_device.id,
type="layer2-individual")
test1_vlan = equinix_metal.Vlan("test1Vlan",
description="VLAN in New Jersey",
facility="ny5",
project_id=local["project_id"])
test2_vlan = equinix_metal.Vlan("test2Vlan",
description="VLAN in New Jersey",
facility="ny5",
project_id=local["project_id"])
test1_port_vlan_attachment = equinix_metal.PortVlanAttachment("test1PortVlanAttachment",
device_id=test_device_network_type.id,
vlan_vnid=test1_vlan.vxlan,
port_name="eth1")
test2_port_vlan_attachment = equinix_metal.PortVlanAttachment("test2PortVlanAttachment",
device_id=test_device_network_type.id,
vlan_vnid=test2_vlan.vxlan,
port_name="eth1",
native=True,
opts=pulumi.ResourceOptions(depends_on=["metal_port_vlan_attachment.test1"]))
```
## Attribute Referece
* `id` - UUID of device port used in the assignment
* `vlan_id` - UUID of VLAN API resource
* `port_id` - UUID of device port
:param str resource_name: The name of the resource.
:param PortVlanAttachmentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PortVlanAttachmentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
device_id: Optional[pulumi.Input[str]] = None,
force_bond: Optional[pulumi.Input[bool]] = None,
native: Optional[pulumi.Input[bool]] = None,
port_name: Optional[pulumi.Input[str]] = None,
vlan_vnid: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PortVlanAttachmentArgs.__new__(PortVlanAttachmentArgs)
if device_id is None and not opts.urn:
raise TypeError("Missing required property 'device_id'")
__props__.__dict__["device_id"] = device_id
__props__.__dict__["force_bond"] = force_bond
__props__.__dict__["native"] = native
if port_name is None and not opts.urn:
raise TypeError("Missing required property 'port_name'")
__props__.__dict__["port_name"] = port_name
if vlan_vnid is None and not opts.urn:
raise TypeError("Missing required property 'vlan_vnid'")
__props__.__dict__["vlan_vnid"] = vlan_vnid
__props__.__dict__["port_id"] = None
__props__.__dict__["vlan_id"] = None
super(PortVlanAttachment, __self__).__init__(
'equinix-metal:index/portVlanAttachment:PortVlanAttachment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
device_id: Optional[pulumi.Input[str]] = None,
force_bond: Optional[pulumi.Input[bool]] = None,
native: Optional[pulumi.Input[bool]] = None,
port_id: Optional[pulumi.Input[str]] = None,
port_name: Optional[pulumi.Input[str]] = None,
vlan_id: Optional[pulumi.Input[str]] = None,
vlan_vnid: Optional[pulumi.Input[int]] = None) -> 'PortVlanAttachment':
"""
Get an existing PortVlanAttachment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] device_id: ID of device to be assigned to the VLAN
:param pulumi.Input[bool] force_bond: Add port back to the bond when this resource is removed. Default is false.
:param pulumi.Input[bool] native: Mark this VLAN a native VLAN on the port. This can be used only if this assignment assigns second or further VLAN to the port. To ensure that this attachment is not first on a port, you can use `depends_on` pointing to another metal_port_vlan_attachment, just like in the layer2-individual example above.
:param pulumi.Input[str] port_id: UUID of device port
:param pulumi.Input[str] port_name: Name of network port to be assigned to the VLAN
:param pulumi.Input[str] vlan_id: UUID of VLAN API resource
:param pulumi.Input[int] vlan_vnid: VXLAN Network Identifier, integer
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PortVlanAttachmentState.__new__(_PortVlanAttachmentState)
__props__.__dict__["device_id"] = device_id
__props__.__dict__["force_bond"] = force_bond
__props__.__dict__["native"] = native
__props__.__dict__["port_id"] = port_id
__props__.__dict__["port_name"] = port_name
__props__.__dict__["vlan_id"] = vlan_id
__props__.__dict__["vlan_vnid"] = vlan_vnid
return PortVlanAttachment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="deviceId")
def device_id(self) -> pulumi.Output[str]:
"""
ID of device to be assigned to the VLAN
"""
return pulumi.get(self, "device_id")
@property
@pulumi.getter(name="forceBond")
def force_bond(self) -> pulumi.Output[Optional[bool]]:
"""
Add port back to the bond when this resource is removed. Default is false.
"""
return pulumi.get(self, "force_bond")
@property
@pulumi.getter
def native(self) -> pulumi.Output[Optional[bool]]:
"""
Mark this VLAN a native VLAN on the port. This can be used only if this assignment assigns second or further VLAN to the port. To ensure that this attachment is not first on a port, you can use `depends_on` pointing to another metal_port_vlan_attachment, just like in the layer2-individual example above.
"""
return pulumi.get(self, "native")
@property
@pulumi.getter(name="portId")
def port_id(self) -> pulumi.Output[str]:
"""
UUID of device port
"""
return pulumi.get(self, "port_id")
@property
@pulumi.getter(name="portName")
def port_name(self) -> pulumi.Output[str]:
"""
Name of network port to be assigned to the VLAN
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="vlanId")
def vlan_id(self) -> pulumi.Output[str]:
"""
UUID of VLAN API resource
"""
return pulumi.get(self, "vlan_id")
@property
@pulumi.getter(name="vlanVnid")
def vlan_vnid(self) -> pulumi.Output[int]:
"""
VXLAN Network Identifier, integer
"""
return pulumi.get(self, "vlan_vnid")
| 42.705667
| 346
| 0.63887
| 2,894
| 23,360
| 4.926745
| 0.080511
| 0.062491
| 0.058634
| 0.03086
| 0.864708
| 0.844719
| 0.833848
| 0.808458
| 0.802216
| 0.779843
| 0
| 0.004888
| 0.264384
| 23,360
| 546
| 347
| 42.783883
| 0.824837
| 0.455779
| 0
| 0.627615
| 1
| 0
| 0.090154
| 0.00729
| 0
| 0
| 0
| 0
| 0
| 1
| 0.158996
| false
| 0.004184
| 0.020921
| 0
| 0.276151
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
21752fdd14232168f672076170c155132b191a0d
| 16,865
|
py
|
Python
|
yandex/cloud/ydb/v1/database_service_pb2_grpc.py
|
korsar182/python-sdk
|
873bf2a9b136a8f2faae72e86fae1f5b5c3d896a
|
[
"MIT"
] | 36
|
2018-12-23T13:51:50.000Z
|
2022-03-25T07:48:24.000Z
|
yandex/cloud/ydb/v1/database_service_pb2_grpc.py
|
korsar182/python-sdk
|
873bf2a9b136a8f2faae72e86fae1f5b5c3d896a
|
[
"MIT"
] | 15
|
2019-02-28T04:55:09.000Z
|
2022-03-06T23:17:24.000Z
|
yandex/cloud/ydb/v1/database_service_pb2_grpc.py
|
korsar182/python-sdk
|
873bf2a9b136a8f2faae72e86fae1f5b5c3d896a
|
[
"MIT"
] | 18
|
2019-02-23T07:10:57.000Z
|
2022-03-28T14:41:08.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from yandex.cloud.operation import operation_pb2 as yandex_dot_cloud_dot_operation_dot_operation__pb2
from yandex.cloud.ydb.v1 import database_pb2 as yandex_dot_cloud_dot_ydb_dot_v1_dot_database__pb2
from yandex.cloud.ydb.v1 import database_service_pb2 as yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2
class DatabaseServiceStub(object):
"""A set of methods for managing databases.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Get = channel.unary_unary(
'/yandex.cloud.ydb.v1.DatabaseService/Get',
request_serializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.GetDatabaseRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__pb2.Database.FromString,
)
self.List = channel.unary_unary(
'/yandex.cloud.ydb.v1.DatabaseService/List',
request_serializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.ListDatabasesRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.ListDatabasesResponse.FromString,
)
self.Create = channel.unary_unary(
'/yandex.cloud.ydb.v1.DatabaseService/Create',
request_serializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.CreateDatabaseRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Update = channel.unary_unary(
'/yandex.cloud.ydb.v1.DatabaseService/Update',
request_serializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.UpdateDatabaseRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Start = channel.unary_unary(
'/yandex.cloud.ydb.v1.DatabaseService/Start',
request_serializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.StartDatabaseRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Stop = channel.unary_unary(
'/yandex.cloud.ydb.v1.DatabaseService/Stop',
request_serializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.StopDatabaseRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Delete = channel.unary_unary(
'/yandex.cloud.ydb.v1.DatabaseService/Delete',
request_serializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.DeleteDatabaseRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Restore = channel.unary_unary(
'/yandex.cloud.ydb.v1.DatabaseService/Restore',
request_serializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.RestoreBackupRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
self.Backup = channel.unary_unary(
'/yandex.cloud.ydb.v1.DatabaseService/Backup',
request_serializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.BackupDatabaseRequest.SerializeToString,
response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
)
class DatabaseServiceServicer(object):
"""A set of methods for managing databases.
"""
def Get(self, request, context):
"""Returns the specified database.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def List(self, request, context):
"""Retrieves a list of databases.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Create(self, request, context):
"""Creates a new database.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Update(self, request, context):
"""Modifies the specified database.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Start(self, request, context):
"""Starts the specified database.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Stop(self, request, context):
"""Stops the specified database.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Deletes the specified database.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Restore(self, request, context):
"""Restores the specified backup
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Backup(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_DatabaseServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Get': grpc.unary_unary_rpc_method_handler(
servicer.Get,
request_deserializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.GetDatabaseRequest.FromString,
response_serializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__pb2.Database.SerializeToString,
),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
request_deserializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.ListDatabasesRequest.FromString,
response_serializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.ListDatabasesResponse.SerializeToString,
),
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.CreateDatabaseRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Update': grpc.unary_unary_rpc_method_handler(
servicer.Update,
request_deserializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.UpdateDatabaseRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Start': grpc.unary_unary_rpc_method_handler(
servicer.Start,
request_deserializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.StartDatabaseRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Stop': grpc.unary_unary_rpc_method_handler(
servicer.Stop,
request_deserializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.StopDatabaseRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.DeleteDatabaseRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Restore': grpc.unary_unary_rpc_method_handler(
servicer.Restore,
request_deserializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.RestoreBackupRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
'Backup': grpc.unary_unary_rpc_method_handler(
servicer.Backup,
request_deserializer=yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.BackupDatabaseRequest.FromString,
response_serializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'yandex.cloud.ydb.v1.DatabaseService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class DatabaseService(object):
"""A set of methods for managing databases.
"""
@staticmethod
def Get(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.ydb.v1.DatabaseService/Get',
yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.GetDatabaseRequest.SerializeToString,
yandex_dot_cloud_dot_ydb_dot_v1_dot_database__pb2.Database.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def List(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.ydb.v1.DatabaseService/List',
yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.ListDatabasesRequest.SerializeToString,
yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.ListDatabasesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.ydb.v1.DatabaseService/Create',
yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.CreateDatabaseRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Update(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.ydb.v1.DatabaseService/Update',
yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.UpdateDatabaseRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Start(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.ydb.v1.DatabaseService/Start',
yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.StartDatabaseRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Stop(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.ydb.v1.DatabaseService/Stop',
yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.StopDatabaseRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.ydb.v1.DatabaseService/Delete',
yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.DeleteDatabaseRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Restore(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.ydb.v1.DatabaseService/Restore',
yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.RestoreBackupRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Backup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/yandex.cloud.ydb.v1.DatabaseService/Backup',
yandex_dot_cloud_dot_ydb_dot_v1_dot_database__service__pb2.BackupDatabaseRequest.SerializeToString,
yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 49.026163
| 139
| 0.690365
| 1,725
| 16,865
| 6.291014
| 0.074203
| 0.047272
| 0.073535
| 0.089292
| 0.899005
| 0.896148
| 0.887025
| 0.855511
| 0.802064
| 0.794324
| 0
| 0.009066
| 0.241328
| 16,865
| 343
| 140
| 49.169096
| 0.839078
| 0.043878
| 0
| 0.580986
| 1
| 0
| 0.078363
| 0.049601
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070423
| false
| 0
| 0.014085
| 0.03169
| 0.126761
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
219c8d11c849f7180a6f9c65ac4006fe9829adca
| 1,454
|
py
|
Python
|
tests/test_1903.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_1903.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_1903.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pytest
"""
Test 1903. Largest Odd Number in String
"""
@pytest.fixture(scope="session")
def init_variables_1903():
from src.leetcode_1903_largest_odd_number_in_string import Solution
solution = Solution()
def _init_variables_1903():
return solution
yield _init_variables_1903
class TestClass1903:
def test_solution_0(self, init_variables_1903):
assert init_variables_1903().largestOddNumber("52") == "5"
def test_solution_1(self, init_variables_1903):
assert init_variables_1903().largestOddNumber("4206") == ""
def test_solution_2(self, init_variables_1903):
assert init_variables_1903().largestOddNumber("35427") == "35427"
#!/usr/bin/env python
import pytest
"""
Test 1903. Largest Odd Number in String
"""
@pytest.fixture(scope="session")
def init_variables_1903():
from src.leetcode_1903_largest_odd_number_in_string import Solution
solution = Solution()
def _init_variables_1903():
return solution
yield _init_variables_1903
class TestClass1903:
def test_solution_0(self, init_variables_1903):
assert init_variables_1903().largestOddNumber("52") == "5"
def test_solution_1(self, init_variables_1903):
assert init_variables_1903().largestOddNumber("4206") == ""
def test_solution_2(self, init_variables_1903):
assert init_variables_1903().largestOddNumber("35427") == "35427"
| 23.079365
| 73
| 0.726272
| 180
| 1,454
| 5.511111
| 0.205556
| 0.235887
| 0.308468
| 0.127016
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.112769
| 0.170564
| 1,454
| 62
| 74
| 23.451613
| 0.709784
| 0.02751
| 0
| 1
| 0
| 0
| 0.036419
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.333333
| false
| 0
| 0.133333
| 0.066667
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
21a7912ad645002edd200c50f3836e73f80883ee
| 10,921
|
py
|
Python
|
tests/attribute_statement_data.py
|
tophatmonocle/pysaml2
|
ddbf8cdce5dd4d5a607e533d2af98422b69f89c5
|
[
"Apache-2.0"
] | null | null | null |
tests/attribute_statement_data.py
|
tophatmonocle/pysaml2
|
ddbf8cdce5dd4d5a607e533d2af98422b69f89c5
|
[
"Apache-2.0"
] | 1
|
2019-01-15T22:30:49.000Z
|
2019-01-15T22:30:49.000Z
|
tests/attribute_statement_data.py
|
tophatmonocle/pysaml2
|
ddbf8cdce5dd4d5a607e533d2af98422b69f89c5
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Testdata for attribute converters """
STATEMENT1 = """<?xml version="1.0" encoding="utf-8"?>
<ns1:AttributeStatement xmlns:ns1="urn:oasis:names:tc:SAML:2.0:assertion">
<ns1:Attribute Name="urn:mace:dir:attribute-def:eduPersonPrincipalName"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>rohe0002@umu.se</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:eduPersonTargetedID"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>a139b2116ad1dd7b91c129a32a242fcc5fd9e821</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:displayName"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>Hedberg, Roland</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:uid"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>rohe0002</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:eduPersonNickname"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>rohe0002</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:cn"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>Roland Hedberg</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:eduPersonAffiliation"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>member</ns1:AttributeValue>
<ns1:AttributeValue>employee</ns1:AttributeValue>
<ns1:AttributeValue>staff</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:street"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>Umeå universitet</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:postalCode"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>901 87</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:eduPersonScopedAffiliation"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>employee@umu.se</ns1:AttributeValue>
<ns1:AttributeValue>staff@umu.se</ns1:AttributeValue>
<ns1:AttributeValue>member@umu.se</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:physicalDeliveryOfficeName"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>Västra flygeln, plan 4</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:employeeType"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>IT-arkitekt</ns1:AttributeValue>
<ns1:AttributeValue>övrig/annan befattning</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:ou"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>Ladokenheten</ns1:AttributeValue>
<ns1:AttributeValue>IT-enheten</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:givenName"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>Roland</ns1:AttributeValue>
</ns1:Attribute>
<ns1:Attribute Name="urn:mace:dir:attribute-def:sn"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<ns1:AttributeValue>Hedberg</ns1:AttributeValue>
</ns1:Attribute>
</ns1:AttributeStatement>"""
STATEMENT2 = """<?xml version="1.0" encoding="utf-8"?>
<saml2_tophat:AttributeStatement xmlns:saml2_tophat="urn:oasis:names:tc:SAML:2.0:assertion">
<saml2_tophat:Attribute FriendlyName="uid" Name="urn:oid:0.9.2342.19200300.100.1.1"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<saml2_tophat:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:type="xs:string">demouser</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
<saml2_tophat:Attribute FriendlyName="surname"
Name="urn:oid:2.5.4.4"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<saml2_tophat:AttributeValue
xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:type="xs:string">SWITCHaai</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
<saml2_tophat:Attribute FriendlyName="givenName"
Name="urn:oid:2.5.4.42"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<saml2_tophat:AttributeValue
xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:type="xs:string">Demouser</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
<saml2_tophat:Attribute FriendlyName="eduPersonAffiliation"
Name="urn:oid:1.3.6.1.4.1.5923.1.1.1.1"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<saml2_tophat:AttributeValue
xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:type="xs:string">staff</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
<saml2_tophat:Attribute FriendlyName="eduPersonEntitlement"
Name="urn:oid:1.3.6.1.4.1.5923.1.1.1.7"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<saml2_tophat:AttributeValue
xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:type="xs:string">http://example.org/res/99999</saml2_tophat:AttributeValue>
<saml2_tophat:AttributeValue
xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:type="xs:string">http://publisher-xy.com/e-journals</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
<saml2_tophat:Attribute FriendlyName="mail"
Name="urn:oid:0.9.2342.19200300.100.1.3"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<saml2_tophat:AttributeValue
xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:type="xs:string">demouser@example.org</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
</saml2_tophat:AttributeStatement>"""
STATEMENT3 = """<?xml version='1.0' encoding='UTF-8'?>
<ns0:AttributeStatement xmlns:ns0="urn:oasis:names:tc:SAML:2.0:assertion">
<ns0:Attribute Name="umuselin">
<ns0:AttributeValue>1234567890</ns0:AttributeValue>
</ns0:Attribute>
<ns0:Attribute Name="edupersonaffiliation">
<ns0:AttributeValue>staff</ns0:AttributeValue>
</ns0:Attribute>
<ns0:Attribute FriendlyName="surname" Name="urn:oid:2.5.4.4"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<ns0:AttributeValue>Hedberg</ns0:AttributeValue>
</ns0:Attribute>
<ns0:Attribute Name="uid">
<ns0:AttributeValue>roland</ns0:AttributeValue>
</ns0:Attribute>
<ns0:Attribute Name="givenname">
<ns0:AttributeValue>Roland</ns0:AttributeValue>
</ns0:Attribute>
</ns0:AttributeStatement>"""
STATEMENT4 = """<?xml version='1.0' encoding='UTF-8'?>
<ns0:AttributeStatement xmlns:ns0="urn:oasis:names:tc:SAML:2.0:assertion" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ns0:Attribute Name="user_id" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified">
<ns0:AttributeValue xsi:type="xs:string">bob</ns0:AttributeValue>
</ns0:Attribute>
<ns0:Attribute Name="NameID" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified">
<ns0:AttributeValue xsi:type="xs:string">bobsnameagain</ns0:AttributeValue>
</ns0:Attribute>
</ns0:AttributeStatement>"""
STATEMENT_MIXED = """<?xml version="1.0" encoding="utf-8"?>
<saml2_tophat:AttributeStatement xmlns:saml2_tophat="urn:oasis:names:tc:SAML:2.0:assertion">
<saml2_tophat:Attribute FriendlyName="uid" Name="urn:oid:0.9.2342.19200300.100.1.1"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<saml2_tophat:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:type="xs:string">demouser</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
<saml2_tophat:Attribute FriendlyName="swissEduPersonHomeOrganizationType"
Name="urn:oid:2.16.756.1.2.5.1.1.5"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<saml2_tophat:AttributeValue
xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:type="xs:string">others</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
<saml2_tophat:Attribute Name="urn:mace:dir:attribute-def:givenName"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<saml2_tophat:AttributeValue>Roland</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
<saml2_tophat:Attribute Name="urn:mace:dir:attribute-def:sn"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
<saml2_tophat:AttributeValue>Hedberg</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
<saml2_tophat:Attribute FriendlyName="eduPersonAffiliation"
Name="urn:oid:1.3.6.1.4.1.5923.1.1.1.1"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
<saml2_tophat:AttributeValue
xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:type="xs:string">staff</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
<saml2_tophat:Attribute Name="urn:example:com:foo"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:example">
<saml2_tophat:AttributeValue>Thing</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
<saml2_tophat:Attribute Name="user_id"
NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified">
<saml2_tophat:AttributeValue>bob</saml2_tophat:AttributeValue>
</saml2_tophat:Attribute>
</saml2_tophat:AttributeStatement>"""
| 54.064356
| 128
| 0.695449
| 1,395
| 10,921
| 5.399283
| 0.096057
| 0.087626
| 0.062135
| 0.071694
| 0.867897
| 0.858736
| 0.8282
| 0.805364
| 0.781731
| 0.752921
| 0
| 0.058509
| 0.143943
| 10,921
| 201
| 129
| 54.333333
| 0.747139
| 0.007051
| 0
| 0.565445
| 0
| 0.235602
| 0.989388
| 0.643721
| 0
| 0
| 0
| 0
| 0.026178
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
21e64f9b73502db4dc518ec37568bb53c68a19b4
| 5,813
|
py
|
Python
|
misc/human_evaluation.py
|
HaydenFaulkner/SAAT-edit
|
0b1c519d08e4d7cb4f6b441d00c63730be68f9b3
|
[
"MIT"
] | 2
|
2021-03-01T14:27:20.000Z
|
2021-11-17T21:54:07.000Z
|
misc/human_evaluation.py
|
HaydenFaulkner/SAAT-edit
|
0b1c519d08e4d7cb4f6b441d00c63730be68f9b3
|
[
"MIT"
] | null | null | null |
misc/human_evaluation.py
|
HaydenFaulkner/SAAT-edit
|
0b1c519d08e4d7cb4f6b441d00c63730be68f9b3
|
[
"MIT"
] | null | null | null |
import json
import os
from random import randrange
import statistics
import utils
if __name__ == '__main__':
# setup paths
cocofmt_file = os.path.join('../datasets', 'msvd', 'metadata', 'msvd_train_cocofmt.json')
# cocofmt_file = os.path.join('datasets', 'msrvtt', 'metadata', 'msrvtt_test_cocofmt.json')
tmp_file_gt = 'human_gt.json'
tmp_file_pr = 'human_pr.json'
# how many samples to run
runs = 100 # 100 for MSVD
# runs = 20 # 20 for MSRVTT
# load the real ground truth
gt = json.load(open(cocofmt_file))
ids = [x['id'] for x in gt['images']]
caps = dict()
for c in gt['annotations']:
if c['image_id'] not in caps:
caps[c['image_id']] = []
caps[c['image_id']].append(c['caption'])
# lets do random sampling numerous times and avg the results
scores = dict()
for run in range(runs):
print('run ', run)
# initialise the predictions list and the fake 'groundtruth' where the predictions are left out
predictions = list()
gte = {'images': gt['images'], 'annotations': list(), 'type': 'captions', 'info': dict(), 'licenses': 'n/a'}
# run through each clip
for id in ids:
# randomly select one of the ground truth captions for this clip
sample_index = randrange(len(caps[id]))
# append the caption to either the predictions or the fake groundtruth
cap_id = 0
for index, cap in enumerate(caps[id]):
if index == sample_index: # this is the 'predicted' caption
predictions.append({'image_id': id, 'caption': cap})
else: # this remains a groundtruth caption
gte['annotations'].append({'caption': cap, 'image_id': id, 'id': cap_id})
cap_id += 1
# dump out the new groundtruth and prediction json files
json.dump(gte, open(tmp_file_gt, 'w'))
json.dump(predictions, open(tmp_file_pr, 'w'))
# calculate the language stats
lang_stats = utils.language_eval(tmp_file_gt, tmp_file_pr)
for k, v in lang_stats.items():
if k not in scores:
scores[k] = list()
scores[k].append(v)
print('------------ scores after %d runs ------------' % runs)
print(scores)
for k, v in scores.items():
print(k, statistics.mean(v), statistics.stdev(v))
if 0:
######################################## Compare training scores with overfitting
# setup paths
cocofmt_file = os.path.join('datasets', 'msvd', 'metadata', 'msvd_train_cocofmt.json')
# cocofmt_file = os.path.join('datasets', 'msrvtt', 'metadata', 'msrvtt_train_cocofmt.json')
tmp_file_gt = 'human_gt.json'
tmp_file_pr = 'human_pr.json'
# how many samples to run
runs = 100 # 100 for MSVD
# runs = 20 # 20 for MSRVTT
# load the ids of the captions in our test-on-training run
gtt = json.load(open('/media/hayden/Storage2/CODEBASE/SAAT-master/experiments/exp/train_best.json'))
ids_td = [x['image_id'] for x in gtt]
# load the real ground truth
gt = json.load(open(cocofmt_file))
ids = [x['id'] for x in gt['images']]
caps = dict()
for c in gt['annotations']:
if c['image_id'] not in caps:
caps[c['image_id']] = []
caps[c['image_id']].append(c['caption'])
# lets do random sampling numerous times and avg the results
scores = dict()
for run in range(runs):
print('run ', run)
# initialise the predictions list and the fake 'groundtruth' where the predictions are left out
predictions = list()
gte = {'images': gt['images'], 'annotations': list(), 'type': 'captions', 'info': dict(), 'licenses': 'n/a'}
# run through each clip
for id in ids_td:
# randomly select one of the ground truth captions for this clip
sample_index = randrange(len(caps[id]))
sample_index_gt = sample_index
while sample_index_gt == sample_index: # ensure diff one
sample_index_gt = randrange(len(caps[id]))
# append the caption to either the predictions or the fake groundtruth
cap_id = 0
for index, cap in enumerate(caps[id]):
if index == sample_index: # this is the 'predicted' caption
predictions.append({'image_id': id, 'caption': cap})
elif 1: #index == sample_index_gt: # uncomment to do 1 v 1 comparisons
gte['annotations'].append({'caption': cap, 'image_id': id, 'id': cap_id})
cap_id += 1
# dump out the new groundtruth and prediction json files
json.dump(gte, open(tmp_file_gt, 'w'))
json.dump(predictions, open(tmp_file_pr, 'w'))
# calculate the language stats
lang_stats = utils.language_eval(tmp_file_gt, tmp_file_pr)
for k, v in lang_stats.items():
if k not in scores:
scores[k] = list()
scores[k].append(v)
# lang_stats = utils.language_eval(cocofmt_file, '/media/hayden/Storage2/CODEBASE/SAAT-master/experiments/exp/train_best.json') # tmp_file_pr)
lang_stats = utils.language_eval(tmp_file_gt, '/media/hayden/Storage2/CODEBASE/SAAT-master/experiments/exp/train_best.json') # tmp_file_pr)
print('------------ scores after %d runs ------------' % runs)
print(scores)
for k, v in scores.items():
print(k, statistics.mean(v), statistics.stdev(v), 'model: ', lang_stats[k])
| 41.820144
| 151
| 0.574574
| 752
| 5,813
| 4.303191
| 0.191489
| 0.032447
| 0.02225
| 0.021014
| 0.893078
| 0.87021
| 0.87021
| 0.87021
| 0.859394
| 0.859394
| 0
| 0.007585
| 0.296921
| 5,813
| 138
| 152
| 42.123188
| 0.784194
| 0.274213
| 0
| 0.752941
| 0
| 0
| 0.170089
| 0.047354
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0.094118
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df01b75b38837399f88f1b56fe75b3a14194c754
| 13
|
py
|
Python
|
tests/test2.py
|
Sebastian-sudo/Python_interpreter
|
a26f29a354719323ba24772237d7bf9b932fdefa
|
[
"MIT"
] | null | null | null |
tests/test2.py
|
Sebastian-sudo/Python_interpreter
|
a26f29a354719323ba24772237d7bf9b932fdefa
|
[
"MIT"
] | null | null | null |
tests/test2.py
|
Sebastian-sudo/Python_interpreter
|
a26f29a354719323ba24772237d7bf9b932fdefa
|
[
"MIT"
] | null | null | null |
1+5
4<0
5==5
| 3.25
| 4
| 0.461538
| 6
| 13
| 1
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.6
| 0.230769
| 13
| 3
| 5
| 4.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
10d8ec881b965546b1d31dfa9d405003932ccfad
| 6,206
|
py
|
Python
|
main_task/models.py
|
briemadu/scorekeeping
|
032134cf7934076f671a000a3a0b87a0b5e47354
|
[
"MIT"
] | null | null | null |
main_task/models.py
|
briemadu/scorekeeping
|
032134cf7934076f671a000a3a0b87a0b5e47354
|
[
"MIT"
] | null | null | null |
main_task/models.py
|
briemadu/scorekeeping
|
032134cf7934076f671a000a3a0b87a0b5e47354
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: 2022 Madureira, Brielen
# SPDX-License-Identifier: MIT
"""
Models used to map a tuple with a probe sentence embedding and a dialogue
state representation into the corresponding task labels (combinations of true,
false, shared, private).
Three probing classifiers, which are NNs with:
- ShallowClassifier: one linear layer
- DeeperClassifier: two linear layers and a sigmoid in between
- DeepestClassifier: three linear layers with sigmoid and ReLU in between
All have a softmax function on top for classification.
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
class ShallowClassifier(nn.Module):
"""Probing classifier, 1 layers and cross entropy loss."""
def __init__(self, params, nlabels):
"""
Args:
params (dataclass): Experiment parameters.
nlabels (int): Number of classification labels.
"""
super().__init__()
torch.manual_seed(params.random_seed)
input_dimension = params.probe_dim + params.dialogue_rep_dim
self.decoder = nn.Linear(input_dimension, nlabels)
def forward(self, representations, probes):
"""Perform a forward pass with the input data.
The dialogue state representations and the probes' embeddings
are concatenated and then flow through the layers of the
neural network. A softmax function on top for prediction
and the cross entropy function is used to estimate the loss.
Dialogue state representations are vectors with dimension S.
Probe embeddings are vectors with dimension P.
The inputs are batches with N datapoints.
Args:
representations (torch.Tensor):
Batch with dialogue state representations, dim=(N, S).
probes (torch.Tensor):
Batch with probes embeddings, dim=(N, P).
Returns:
tuple: Output scores and predicted labels.
(torch.Tensor dim=(batch, nlabels), torch.Tensor dim=N)
"""
# concatenate dialogue representations and probes' embeddings
# dim = batch_size, (768+512)
x = torch.cat((representations, probes), dim=1)
# dim = batch, nlabels
x = self.decoder(x)
predicted = torch.argmax(F.log_softmax(x, dim=1), dim=1)
return x, predicted
class DeeperClassifier(nn.Module):
"""Probing classifier, 2 layers and cross entropy loss."""
def __init__(self, params, nlabels):
"""
Args:
params (dataclass): Experiment parameters.
nlabels (int): Number of classification labels.
"""
super().__init__()
torch.manual_seed(params.random_seed)
input_dimension = params.probe_dim + params.dialogue_rep_dim
self.layers = nn.Sequential(
nn.Linear(input_dimension, params.hidden_dim),
nn.Sigmoid(),
nn.Dropout(params.dropout),
nn.Linear(params.hidden_dim, nlabels)
)
def forward(self, representations, probes):
"""Perform a forward pass with the input data.
The dialogue state representations and the probes' embeddings
are concatenated and then flow through the layers of the
neural network. A softmax function on top for prediction
and the cross entropy function is used to estimate the loss.
Dialogue state representations are vectors with dimension S.
Probe embeddings are vectors with dimension P.
The inputs are batches with N datapoints.
Args:
representations (torch.Tensor):
Batch with dialogue state representations, dim=(N, S).
probes (torch.Tensor):
Batch with probes embeddings, dim=(N, P).
Returns:
tuple: Output scores and predicted labels.
(torch.Tensor dim=(batch, nlabels), torch.Tensor dim=N)
"""
# dim = batch_size, (768+512)
x = torch.cat((representations, probes), dim=1)
# dim = batch, nlabels
x = self.layers(x)
predicted = torch.argmax(F.log_softmax(x, dim=1), dim=1)
return x, predicted
class DeepestClassifier(nn.Module):
"""Probing classifier, 3 layers and cross entropy loss."""
def __init__(self, params, nlabels):
"""
Args:
params (dataclass): Experiment parameters.
nlabels (int): Number of classification labels.
"""
super().__init__()
torch.manual_seed(params.random_seed)
input_dimension = params.probe_dim + params.dialogue_rep_dim
self.layers = nn.Sequential(
nn.Linear(input_dimension, params.hidden_dim),
nn.ReLU(),
nn.Dropout(params.dropout),
nn.Linear(params.hidden_dim, params.hidden_dim_2),
nn.Sigmoid(),
nn.Dropout(params.dropout),
nn.Linear(params.hidden_dim_2, nlabels)
)
def forward(self, representations, probes):
"""Perform a forward pass with the input data.
The dialogue state representations and the probes' embeddings
are concatenated and then flow through the layers of the
neural network. A softmax function on top for prediction
and the cross entropy function is used to estimate the loss.
Dialogue state representations are vectors with dimension S.
Probe embeddings are vectors with dimension P.
The inputs are batches with N datapoints.
Args:
representations (torch.Tensor):
Batch with dialogue state representations, dim=(N, S).
probes (torch.Tensor):
Batch with probes embeddings, dim=(N, P).
Returns:
tuple: Output scores and predicted labels.
(torch.Tensor dim=(batch, nlabels), torch.Tensor dim=N)
"""
# dim = batch_size, (768+512)
x = torch.cat((representations, probes), dim=1)
# dim = batch, nlabels
x = self.layers(x)
predicted = torch.argmax(F.log_softmax(x, dim=1), dim=1)
return x, predicted
| 35.462857
| 78
| 0.635836
| 737
| 6,206
| 5.274084
| 0.192673
| 0.033959
| 0.064831
| 0.035503
| 0.810908
| 0.810908
| 0.804734
| 0.804734
| 0.804734
| 0.793157
| 0
| 0.008522
| 0.281502
| 6,206
| 174
| 79
| 35.666667
| 0.863198
| 0.558975
| 0
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122449
| false
| 0
| 0.061224
| 0
| 0.306122
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3377334112a263e730f9be22b7aaf82f8c9d5d97
| 374
|
py
|
Python
|
recipe_scrapers/_arm_test_utils.py
|
pherodeon/recipe-scrapers
|
816ee1cfd777149efff60ca01d377ab5e141e24b
|
[
"MIT"
] | null | null | null |
recipe_scrapers/_arm_test_utils.py
|
pherodeon/recipe-scrapers
|
816ee1cfd777149efff60ca01d377ab5e141e24b
|
[
"MIT"
] | null | null | null |
recipe_scrapers/_arm_test_utils.py
|
pherodeon/recipe-scrapers
|
816ee1cfd777149efff60ca01d377ab5e141e24b
|
[
"MIT"
] | null | null | null |
from _utils import get_minutes
print(get_minutes('4 min' ))
print(get_minutes(' 4min' ))
print(get_minutes(' 4 min' ))
print(get_minutes('1h4 min' ))
print(get_minutes('1h 4 min' ))
print(get_minutes('1h4min' ))
print(get_minutes('1h 4min' ))
print(get_minutes(' 1h4 min' ))
print(get_minutes(' 1h 4 min' ))
print(get_minutes(' 1h4min' ))
print(get_minutes(' 1h 4min' ))
| 23.375
| 32
| 0.703209
| 59
| 374
| 4.237288
| 0.20339
| 0.48
| 0.66
| 0.432
| 0.884
| 0.884
| 0.884
| 0.884
| 0.672
| 0.672
| 0
| 0.057751
| 0.120321
| 374
| 15
| 33
| 24.933333
| 0.702128
| 0
| 0
| 0.916667
| 0
| 0
| 0.204301
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.083333
| 0
| 0.083333
| 0.916667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
338251755e15095e795c640b79b27a5850132b72
| 158
|
py
|
Python
|
ibsng/handler/util/get_d_r_b_d_status.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 6
|
2018-03-06T10:16:36.000Z
|
2021-12-05T12:43:10.000Z
|
ibsng/handler/util/get_d_r_b_d_status.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 3
|
2018-03-06T10:27:08.000Z
|
2022-01-02T15:21:27.000Z
|
ibsng/handler/util/get_d_r_b_d_status.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 3
|
2018-01-06T16:28:31.000Z
|
2018-09-17T19:47:19.000Z
|
"""Get drbd status API method."""
from ibsng.handler.handler import Handler
class getDRBDStatus(Handler):
"""Get drbd status method class."""
pass
| 17.555556
| 41
| 0.702532
| 20
| 158
| 5.55
| 0.6
| 0.126126
| 0.234234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177215
| 158
| 8
| 42
| 19.75
| 0.853846
| 0.360759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
33a7a738d32dc6ef0baf5a60ad8b08051af6b5fe
| 43,483
|
py
|
Python
|
model.py
|
cmiras/BSL-segmentation
|
35a66d6c758dcf4734adb0ebc5a40ea7238d7a1d
|
[
"MIT"
] | null | null | null |
model.py
|
cmiras/BSL-segmentation
|
35a66d6c758dcf4734adb0ebc5a40ea7238d7a1d
|
[
"MIT"
] | null | null | null |
model.py
|
cmiras/BSL-segmentation
|
35a66d6c758dcf4734adb0ebc5a40ea7238d7a1d
|
[
"MIT"
] | null | null | null |
import os
import time
import copy
import pickle
import json
from math import ceil
from pathlib import Path
import datetime
from tqdm import tqdm
import numpy as np
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
from torch.utils.tensorboard import SummaryWriter
from utils import Bar
from utils.viz import viz_results_paper
from utils.averagemeter import AverageMeter
from utils.utils import torch_to_list, get_num_signs
from eval import Metric
class MultiStageModel(nn.Module):
def __init__(self, num_stages, num_layers, num_f_maps, dim, num_classes):
super(MultiStageModel, self).__init__()
self.num_classes = num_classes
self.stage1 = SingleStageModel(num_layers, num_f_maps, dim, num_classes)
self.stages = nn.ModuleList([copy.deepcopy(SingleStageModel(num_layers, num_f_maps, num_classes, num_classes)) for s in range(num_stages-1)])
def forward(self, x, mask):
out = self.stage1(x, mask)
outputs = out.unsqueeze(0)
for s in self.stages:
if self.num_classes == 1:
out = s(torch.sigmoid(out) * mask[:, 0:1, :], mask)
else:
out = s(F.softmax(out, dim=1) * mask[:, 0:1, :], mask)
outputs = torch.cat((outputs, out.unsqueeze(0)), dim=0)
return outputs
class ASFormerMultiStageModel(nn.Module):
def __init__(self,device, num_stages, num_layers, num_f_maps, dim, num_classes):
super(ASFormerMultiStageModel, self).__init__()
self.num_classes = num_classes
self.conv_cls = nn.Conv1d(num_f_maps, num_classes, 1)
self.conv_bound = nn.Conv1d(num_f_maps, 1, 1)
self.stage1 = ASFormerSingleStageModel(device,num_layers, num_f_maps, dim, num_classes)
self.stages = nn.ModuleList([copy.deepcopy(Decoder(device,num_layers,2,2, num_f_maps, num_classes, num_classes,att_type='sliding_att', alpha=exponential_descrease(s))) for s in range(num_stages-1)])
def forward(self, x, mask):
out = self.stage1(x, mask) # feature = self.shared_layers(x, mask)
out_cls = self.conv_cls(out)
outputs = out_cls.unsqueeze(0)
for s in self.stages:
if self.num_classes == 1:
out_cls,_ = s(torch.sigmoid(out_cls) * mask[:, 0:1, :],out*mask[:,0:1, :], mask)
else:
out_cls,_ = s(F.softmax(out_cls, dim=1) * mask[:, 0:1, :],out*mask[:,0:1, :], mask)
outputs = torch.cat((outputs, out_cls.unsqueeze(0)), dim=0)
return outputs
class SingleStageModel(nn.Module):
def __init__(self, num_layers, num_f_maps, dim, num_classes):
super(SingleStageModel, self).__init__()
self.num_classes = num_classes
self.conv_1x1 = nn.Conv1d(dim, num_f_maps, 1)
self.layers = nn.ModuleList([copy.deepcopy(DilatedResidualLayer(2 ** i, num_f_maps, num_f_maps)) for i in range(num_layers)])
self.conv_out = nn.Conv1d(num_f_maps, num_classes, 1)
def forward(self, x, mask):
out = self.conv_1x1(x)
for layer in self.layers:
out = layer(out, mask)
out = self.conv_out(out) * mask[:, 0:1, :]
return out
class ASFormerSingleStageModel(nn.Module):
def __init__(self,device, num_layers, num_f_maps, dim, num_classes):
super(ASFormerSingleStageModel, self).__init__()
self.num_classes = num_classes
self.conv_1x1 = nn.Conv1d(dim, num_f_maps, 1)
self.layers=Encoder(device,num_layers, 2, 2, num_f_maps, dim, num_classes, 0.3, att_type='sliding_att', alpha=1)
#self.conv_out = nn.Conv1d(num_f_maps, num_classes, 1)
def forward(self, x, mask):
# out = self.conv_1x1(x)
out = self.layers(x, mask)
#out = self.conv_out(out) * mask[:, 0:1, :]
return out
class DilatedResidualLayer(nn.Module):
def __init__(self, dilation, in_channels, out_channels):
super(DilatedResidualLayer, self).__init__()
self.conv_dilated = nn.Conv1d(in_channels, out_channels, 3, padding=dilation, dilation=dilation)
self.conv_1x1 = nn.Conv1d(out_channels, out_channels, 1)
self.dropout = nn.Dropout()
def forward(self, x, mask):
out = F.relu(self.conv_dilated(x))
out = self.conv_1x1(out)
out = self.dropout(out)
return (x + out) * mask[:, 0:1, :]
def exponential_descrease(idx_decoder, p=3):
return math.exp(-p*idx_decoder)
class AttentionHelper(nn.Module):
def __init__(self):
super(AttentionHelper, self).__init__()
self.softmax = nn.Softmax(dim=-1)
def scalar_dot_att(self, proj_query, proj_key, proj_val, padding_mask):
'''
scalar dot attention.
:param proj_query: shape of (B, C, L)
:param proj_key: shape of (B, C, L)
:param proj_val: shape of (B, C, L)
:param padding_mask: shape of (B, C, L)
:return: attention value of shape (B, C, L)
'''
m, c1, l1 = proj_query.shape
m, c2, l2 = proj_key.shape
assert c1 == c2
energy = torch.bmm(proj_query.permute(0, 2, 1), proj_key) # out of shape (B, L1, L2)
attention = energy / np.sqrt(c1)
attention = attention + torch.log(padding_mask + 1e-6) # mask the zero paddings. log(1e-6) for zero paddings
attention = self.softmax(attention)
attention = attention * padding_mask
attention = attention.permute(0,2,1)
out = torch.bmm(proj_val, attention)
return out, attention
class AttLayer(nn.Module):
def __init__(self, device,q_dim, k_dim, v_dim, r1, r2, r3, bl, stage, att_type): # r1 = r2
super(AttLayer, self).__init__()
self.query_conv = nn.Conv1d(in_channels=q_dim, out_channels=q_dim // r1, kernel_size=1)
self.key_conv = nn.Conv1d(in_channels=k_dim, out_channels=k_dim // r2, kernel_size=1)
self.value_conv = nn.Conv1d(in_channels=v_dim, out_channels=v_dim // r3, kernel_size=1)
self.conv_out = nn.Conv1d(in_channels=v_dim // r3, out_channels=v_dim, kernel_size=1)
self.device=device
self.bl = bl
self.stage = stage
self.att_type = att_type
assert self.att_type in ['normal_att', 'block_att', 'sliding_att']
assert self.stage in ['encoder','decoder']
self.att_helper = AttentionHelper()
self.window_mask = self.construct_window_mask()
def construct_window_mask(self):
'''
construct window mask of shape (1, l, l + l//2 + l//2)
'''
window_mask = torch.zeros((1, self.bl, self.bl + 2* (self.bl //2)))
for i in range(self.bl):
window_mask[:, :, i:i+self.bl] = 1
return window_mask.to(self.device)
def forward(self, x1, x2, mask):
# x1 from the encoder
# x2 from the decoder
query = self.query_conv(x1)
key = self.key_conv(x1)
if self.stage == 'decoder':
assert x2 is not None
value = self.value_conv(x2)
else:
value = self.value_conv(x1)
if self.att_type == 'normal_att':
return self._normal_self_att(query, key, value, mask)
elif self.att_type == 'block_att':
return self._block_wise_self_att(query, key, value, mask)
elif self.att_type == 'sliding_att':
return self._sliding_window_self_att(query, key, value, mask)
def _normal_self_att(self,q,k,v, mask):
m_batchsize, c1, L = q.size()
_,c2,L = k.size()
_,c3,L = v.size()
padding_mask = torch.ones((m_batchsize, 1, L)).to(self.device) * mask[:,0:1,:]
output, attentions = self.att_helper.scalar_dot_att(q, k, v, padding_mask)
output = self.conv_out(F.relu(output))
output = output[:, :, 0:L]
return output * mask[:, 0:1, :]
def _block_wise_self_att(self, q,k,v, mask):
m_batchsize, c1, L = q.size()
_,c2,L = k.size()
_,c3,L = v.size()
nb = L // self.bl
if L % self.bl != 0:
q = torch.cat([q, torch.zeros((m_batchsize, c1, self.bl - L % self.bl)).to(self.device)], dim=-1)
k = torch.cat([k, torch.zeros((m_batchsize, c2, self.bl - L % self.bl)).to(self.device)], dim=-1)
v = torch.cat([v, torch.zeros((m_batchsize, c3, self.bl - L % self.bl)).to(self.device)], dim=-1)
nb += 1
padding_mask = torch.cat([torch.ones((m_batchsize, 1, L)).to(self.device) * mask[:,0:1,:], torch.zeros((m_batchsize, 1, self.bl * nb - L)).to(self.device)],dim=-1)
q = q.reshape(m_batchsize, c1, nb, self.bl).permute(0, 2, 1, 3).reshape(m_batchsize * nb, c1, self.bl)
padding_mask = padding_mask.reshape(m_batchsize, 1, nb, self.bl).permute(0, 2, 1, 3).reshape(m_batchsize * nb,1, self.bl)
k = k.reshape(m_batchsize, c2, nb, self.bl).permute(0, 2, 1, 3).reshape(m_batchsize * nb, c2, self.bl)
v = v.reshape(m_batchsize, c3, nb, self.bl).permute(0, 2, 1, 3).reshape(m_batchsize * nb, c3, self.bl)
output, attentions = self.att_helper.scalar_dot_att(q, k, v, padding_mask)
output = self.conv_out(F.relu(output))
output = output.reshape(m_batchsize, nb, c3, self.bl).permute(0, 2, 1, 3).reshape(m_batchsize, c3, nb * self.bl)
output = output[:, :, 0:L]
return output * mask[:, 0:1, :]
def _sliding_window_self_att(self, q,k,v, mask):
# block operation
m_batchsize, c1, L = q.size()
_, c2, _ = k.size()
_, c3, _ = v.size()
# assert m_batchsize == 1
# currently, we only accept input with batch size 1
# padding zeros for the last segment
nb = L // self.bl
if L % self.bl != 0:
q = torch.cat([q, torch.zeros((m_batchsize, c1, self.bl - L % self.bl)).to(self.device)], dim=-1)
k = torch.cat([k, torch.zeros((m_batchsize, c2, self.bl - L % self.bl)).to(self.device)], dim=-1)
v = torch.cat([v, torch.zeros((m_batchsize, c3, self.bl - L % self.bl)).to(self.device)], dim=-1)
nb += 1
padding_mask = torch.cat([torch.ones((m_batchsize, 1, L)).to(self.device) * mask[:,0:1,:], torch.zeros((m_batchsize, 1, self.bl * nb - L)).to(self.device)],dim=-1)
# sliding window approach, by splitting query_proj and key_proj into shape (c1, l) x (c1, 2l)
# sliding window for query_proj: reshape
q = q.reshape(m_batchsize, c1, nb, self.bl).permute(0, 2, 1, 3).reshape(m_batchsize * nb, c1, self.bl)
# sliding window approach for key_proj
# 1. add paddings at the start and end
k = torch.cat([torch.zeros(m_batchsize, c2, self.bl // 2).to(self.device), k, torch.zeros(m_batchsize, c2, self.bl // 2).to(self.device)], dim=-1)
v = torch.cat([torch.zeros(m_batchsize, c3, self.bl // 2).to(self.device), v, torch.zeros(m_batchsize, c3, self.bl // 2).to(self.device)], dim=-1)
padding_mask = torch.cat([torch.zeros(m_batchsize, 1, self.bl // 2).to(self.device), padding_mask, torch.zeros(m_batchsize, 1, self.bl // 2).to(self.device)], dim=-1)
# 2. reshape key_proj of shape (m_batchsize*nb, c1, 2*self.bl)
k = torch.cat([k[:,:, i*self.bl:(i+1)*self.bl+(self.bl//2)*2] for i in range(nb)], dim=0) # special case when self.bl = 1
v = torch.cat([v[:,:, i*self.bl:(i+1)*self.bl+(self.bl//2)*2] for i in range(nb)], dim=0)
# 3. construct window mask of shape (1, l, 2l), and use it to generate final mask
padding_mask = torch.cat([padding_mask[:,:, i*self.bl:(i+1)*self.bl+(self.bl//2)*2] for i in range(nb)], dim=0) # of shape (m*nb, 1, 2l)
final_mask = self.window_mask.repeat(m_batchsize * nb, 1, 1) * padding_mask
output, attention = self.att_helper.scalar_dot_att(q, k, v, final_mask)
output = self.conv_out(F.relu(output))
output = output.reshape(m_batchsize, nb, -1, self.bl).permute(0, 2, 1, 3).reshape(m_batchsize, -1, nb * self.bl)
output = output[:, :, 0:L]
return output * mask[:, 0:1, :]
class MultiHeadAttLayer(nn.Module):
def __init__(self, device,q_dim, k_dim, v_dim, r1, r2, r3, bl, stage, att_type, num_head):
super(MultiHeadAttLayer, self).__init__()
# assert v_dim % num_head == 0
self.conv_out = nn.Conv1d(v_dim * num_head, v_dim, 1)
self.layers = nn.ModuleList(
[copy.deepcopy(AttLayer(device,q_dim, k_dim, v_dim, r1, r2, r3, bl, stage, att_type)) for i in range(num_head)])
self.dropout = nn.Dropout(p=0.5)
def forward(self, x1, x2, mask):
out = torch.cat([layer(x1, x2, mask) for layer in self.layers], dim=1)
out = self.conv_out(self.dropout(out))
return out
class ConvFeedForward(nn.Module):
def __init__(self, dilation, in_channels, out_channels):
super(ConvFeedForward, self).__init__()
self.layer = nn.Sequential(
nn.Conv1d(in_channels, out_channels, 3, padding=dilation, dilation=dilation),
nn.ReLU()
)
def forward(self, x):
return self.layer(x)
class FCFeedForward(nn.Module):
def __init__(self, in_channels, out_channels):
super(FCFeedForward, self).__init__()
self.layer = nn.Sequential(
nn.Conv1d(in_channels, out_channels, 1), # conv1d equals fc
nn.ReLU(),
nn.Dropout(),
nn.Conv1d(out_channels, out_channels, 1)
)
def forward(self, x):
return self.layer(x)
class AttModule(nn.Module):
def __init__(self, device,dilation, in_channels, out_channels, r1, r2, att_type, stage, alpha):
super(AttModule, self).__init__()
self.feed_forward = ConvFeedForward(dilation, in_channels, out_channels)
self.instance_norm = nn.InstanceNorm1d(in_channels, track_running_stats=False)
self.att_layer = AttLayer(device,in_channels, in_channels, out_channels, r1, r1, r2, dilation, att_type=att_type, stage=stage) # dilation
self.conv_1x1 = nn.Conv1d(out_channels, out_channels, 1)
self.dropout = nn.Dropout()
self.alpha = alpha
def forward(self, x, f, mask):
out = self.feed_forward(x)
out = self.alpha * self.att_layer(self.instance_norm(out), f, mask) + out
out = self.conv_1x1(out)
out = self.dropout(out)
return (x + out) * mask[:, 0:1, :]
class PositionalEncoding(nn.Module):
"Implement the PE function."
def __init__(self, d_model, max_len=10000):
super(PositionalEncoding, self).__init__()
# Compute the positional encodings once in log space.
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2) *
-(math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0).permute(0,2,1) # of shape (1, d_model, l)
self.pe = nn.Parameter(pe, requires_grad=True)
# self.register_buffer('pe', pe)
def forward(self, x):
return x + self.pe[:, :, 0:x.shape[2]]
class Encoder(nn.Module):
def __init__(self,device, num_layers, r1, r2, num_f_maps, input_dim, num_classes, channel_masking_rate, att_type, alpha):
super(Encoder, self).__init__()
self.conv_1x1 = nn.Conv1d(input_dim, num_f_maps, 1) # fc layer
# self.position_en = PositionalEncoding(d_model=num_f_maps)
self.layers = nn.ModuleList(
[AttModule(device,2 ** i, num_f_maps, num_f_maps, r1, r2, att_type, 'encoder', alpha) for i in # 2**i
range(num_layers)])
# self.conv_out = nn.Conv1d(num_f_maps, num_classes, 1)
self.dropout = nn.Dropout2d(p=channel_masking_rate)
self.channel_masking_rate = channel_masking_rate
def forward(self, x, mask):
'''
:param x: (N, C, L)
:param mask:
:return:
'''
if self.channel_masking_rate > 0:
x = x.unsqueeze(2)
x = self.dropout(x)
x = x.squeeze(2)
feature = self.conv_1x1(x)
# feature = self.position_en(feature)
for layer in self.layers:
feature = layer(feature, None, mask)
# out = self.conv_out(feature) * mask[:, 0:1, :]
return feature
class Decoder(nn.Module):
def __init__(self,device, num_layers, r1, r2, num_f_maps, input_dim, num_classes, att_type, alpha):
super(Decoder, self).__init__()# self.position_en = PositionalEncoding(d_model=num_f_maps)
self.conv_1x1 = nn.Conv1d(input_dim, num_f_maps, 1)
self.layers = nn.ModuleList(
[AttModule(device,2 ** i, num_f_maps, num_f_maps, r1, r2, att_type, 'decoder', alpha) for i in # 2 ** i
range(num_layers)])
self.conv_out = nn.Conv1d(num_f_maps, num_classes, 1)
def forward(self, x, fencoder, mask):
feature = self.conv_1x1(x)
for layer in self.layers:
feature = layer(feature, fencoder, mask)
out = self.conv_out(feature) * mask[:, 0:1, :]
return out, feature
class MyTransformer(nn.Module):
def __init__(self,device, num_decoders, num_layers, r1, r2, num_f_maps, input_dim, num_classes, channel_masking_rate):
super(MyTransformer, self).__init__()
self.encoder = Encoder(device,num_layers, r1, r2, num_f_maps, input_dim, num_classes, channel_masking_rate, att_type='sliding_att', alpha=1)
self.decoders = nn.ModuleList([copy.deepcopy(Decoder(device,num_layers, r1, r2, num_f_maps, num_classes, num_classes, att_type='sliding_att', alpha=exponential_descrease(s))) for s in range(num_decoders)]) # num_decoders
self.activation = nn.Softmax(dim=1)
def forward(self, x, mask):
outputs = []
out, feature = self.encoder(x, mask)
outputs.append(self.activation(out))
for decoder in self.decoders:
out, feature = decoder(F.softmax(out, dim=1) * mask[:, 0:1, :], feature* mask[:, 0:1, :], mask)
outputs.append(self.activation(out))
return outputs
class ASFormerTrainer:
def __init__(self, num_blocks, num_layers, num_f_maps, dim, num_classes, device, weights, save_dir):
self.model = ASFormerMultiStageModel(device,num_blocks, num_layers, num_f_maps, dim, num_classes)
if weights is None:
self.ce = nn.CrossEntropyLoss(ignore_index=-100)
else:
self.ce = nn.CrossEntropyLoss(weight=torch.tensor(weights).to(device), ignore_index=-100)
self.mse = nn.MSELoss(reduction='none')
self.mse_red = nn.MSELoss(reduction='mean')
self.sm = nn.Softmax(dim=1)
self.num_classes = num_classes
self.writer = SummaryWriter(log_dir=f'{save_dir}/logs')
self.global_counter = 0
self.train_result_dict = {}
self.test_result_dict = {}
def train(self, save_dir, batch_gen, num_epochs, batch_size, learning_rate, device, eval_args, pretrained=''):
self.model.train()
self.model.to(device)
# load pretrained model
if pretrained != '':
pretrained_dict = torch.load(pretrained)
self.model.load_state_dict(pretrained_dict)
optimizer = optim.Adam(self.model.parameters(), lr=learning_rate)
for epoch in range(num_epochs):
epoch_loss = 0
end = time.time()
batch_time = AverageMeter()
data_time = AverageMeter()
bar = Bar("E%d" % (epoch + 1), max=batch_gen.get_max_index())
count = 0
get_metrics_train = Metric('train')
while batch_gen.has_next():
self.global_counter += 1
batch_input, batch_target, batch_target_eval, mask = batch_gen.next_batch(batch_size)
batch_input, batch_target, batch_target_eval, mask = batch_input.to(device), batch_target.to(device), batch_target_eval.to(device), mask.to(device)
optimizer.zero_grad()
predictions = self.model(batch_input, mask)
loss = 0
# loss for each stage
for ix, p in enumerate(predictions):
if self.num_classes == 1:
loss += self.mse_red(p.transpose(2, 1).contiguous().view(-1, self.num_classes).squeeze(), batch_target.view(-1))
else:
loss += self.ce(p.transpose(2, 1).contiguous().view(-1, self.num_classes), batch_target.view(-1))
loss += 0.15*torch.mean(torch.clamp(self.mse(F.log_softmax(p[:, :, 1:], dim=1), F.log_softmax(p.detach()[:, :, :-1], dim=1)), min=0, max=16)*mask[:, :, 1:])
epoch_loss += loss.item()
loss.backward()
optimizer.step()
if self.num_classes == 1:
predicted = torch.round(predictions[-1].data.squeeze())
gt = torch.round(batch_target)
gt_eval = batch_target_eval
else:
_, predicted = torch.max(predictions[-1].data, 1)
gt = batch_target
gt_eval = batch_target_eval
get_metrics_train.calc_scores_per_batch(predicted, gt, gt_eval, mask)
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# plot progress
bar.suffix = "({batch}/{size}) Batch: {bt:.1f}s | Total: {total:} | ETA: {eta:} | Loss: {loss:}".format(
batch=count + 1,
size=batch_gen.get_max_index() / batch_size,
bt=batch_time.avg,
total=bar.elapsed_td,
eta=datetime.timedelta(seconds=ceil((bar.eta_td/batch_size).total_seconds())),
loss=loss.item()
)
count += 1
bar.next()
batch_gen.reset()
torch.save(self.model.state_dict(), save_dir + "/epoch-" + str(epoch + 1) + ".model")
torch.save(optimizer.state_dict(), save_dir + "/epoch-" + str(epoch + 1) + ".opt")
get_metrics_train.calc_metrics()
result_dict = get_metrics_train.save_print_metrics(self.writer, save_dir, epoch, epoch_loss/(len(batch_gen.list_of_examples)/batch_size))
self.train_result_dict.update(result_dict)
eval_args[7] = epoch
eval_args[1] = save_dir + "/epoch-" + str(epoch+1) + ".model"
self.predict(*eval_args)
with open(f'{save_dir}/train_results.json', 'w') as fp:
json.dump(self.train_result_dict, fp, indent=4)
with open(f'{save_dir}/eval_results.json', 'w') as fp:
json.dump(self.test_result_dict, fp, indent=4)
self.writer.close()
def predict(
self,
args,
model_dir,
results_dir,
features_dict,
gt_dict,
gt_dict_dil,
vid_list_file,
epoch,
device,
mode,
classification_threshold,
uniform=0,
save_pslabels=False,
CP_dict=None,
):
save_score_dict = {}
metrics_per_signer = {}
get_metrics_test = Metric(mode)
self.model.eval()
with torch.no_grad():
if CP_dict is None:
self.model.to(device)
self.model.load_state_dict(torch.load(model_dir))
epoch_loss = 0
for vid in tqdm(vid_list_file):
features = np.swapaxes(features_dict[vid], 0, 1)
if CP_dict is not None:
predicted = torch.tensor(CP_dict[vid]).to(device)
pred_prob = CP_dict[vid]
gt = torch.tensor(gt_dict[vid]).to(device)
gt_eval = torch.tensor(gt_dict_dil[vid]).to(device)
else:
input_x = torch.tensor(features, dtype=torch.float)
input_x.unsqueeze_(0)
input_x = input_x.to(device)
predictions = self.model(input_x, torch.ones(input_x.size(), device=device))
if self.num_classes == 1:
# regression
num_iter = 1
pred_prob = predictions[-1].squeeze()
pred_prob = torch_to_list(pred_prob)
predicted = torch.tensor(np.where(np.asarray(pred_prob) > args.classification_threshold, 1, 0)).to(device)
gt = torch.tensor(gt_dict[vid]).to(device)
gt_eval = torch.tensor(gt_dict_dil[vid]).to(device)
else:
num_iter = 1
pred_prob = torch_to_list(self.sm(predictions[-1]))[0][1]
predicted = torch.tensor(np.where(np.asarray(pred_prob) > args.classification_threshold, 1, 0)).to(device)
gt = torch.tensor(gt_dict[vid]).to(device)
gt_eval = torch.tensor(gt_dict_dil[vid]).to(device)
if uniform:
num_signs = get_num_signs(gt_dict[vid])
len_clip = len(gt_dict[vid])
predicted = [0]*len_clip
dist_uni = len_clip / num_signs
for i in range(1, num_signs):
predicted[round(i*dist_uni)] = 1
predicted[round(i*dist_uni)+1] = 1
pred_prob = predicted
predicted = torch.tensor(predicted).to(device)
if save_pslabels:
save_score_dict[vid] = {}
save_score_dict[vid]['scores'] = np.asarray(pred_prob)
save_score_dict[vid]['preds'] = np.asarray(torch_to_list(predicted))
continue
loss = 0
mask = torch.ones(self.num_classes, np.shape(gt)[0]).to(device)
# loss for each stage
for ix, p in enumerate(predictions):
if self.num_classes == 1:
loss += self.mse_red(p.transpose(2, 1).contiguous().view(-1, self.num_classes).squeeze(), gt.view(-1))
else:
loss += self.ce(p.transpose(2, 1).contiguous().view(-1, self.num_classes), gt.view(-1))
loss += 0.15*torch.mean(torch.clamp(self.mse(F.log_softmax(p[:, :, 1:], dim=1), F.log_softmax(p.detach()[:, :, :-1], dim=1)), min=0, max=16)*mask[:, 1:])
epoch_loss += loss.item()
cut_endpoints = True
if cut_endpoints:
if sum(predicted[-2:]) > 0 and sum(gt_eval[-4:]) == 0:
for j in range(len(predicted)-1, 0, -1):
if predicted[j] != 0:
predicted[j] = 0
elif predicted[j] == 0 and j < len(predicted) - 2:
break
if sum(predicted[:2]) > 0 and sum(gt_eval[:4]) == 0:
check = 0
for j, item in enumerate(predicted):
if item != 0:
predicted[j] = 0
check = 1
elif item == 0 and (j > 2 or check):
break
get_metrics_test.calc_scores_per_batch(predicted.unsqueeze(0), gt.unsqueeze(0), gt_eval.unsqueeze(0))
save_score_dict[vid] = {}
save_score_dict[vid]['scores'] = np.asarray(pred_prob)
save_score_dict[vid]['gt'] = torch_to_list(gt)
if mode == 'test' and args.viz_results:
if not isinstance(vid, int):
f_name = vid.split('/')[-1].split('.')[0]
else:
f_name = str(vid)
viz_results_paper(
gt,
torch_to_list(predicted),
name=results_dir + "/" + f'{f_name}',
pred_prob=pred_prob,
)
if save_pslabels:
PL_labels_dict = {}
PL_scores_dict = {}
for vid in vid_list_file:
if args.test_data == 'phoenix14':
episode = vid.split('.')[0]
part = vid.split('.')[1]
elif args.test_data == 'bsl1k':
episode = vid.split('_')[0]
part = vid.split('_')[1]
if episode not in PL_labels_dict:
PL_labels_dict[episode] = []
PL_scores_dict[episode] = []
PL_labels_dict[episode].extend(save_score_dict[vid]['preds'])
PL_scores_dict[episode].extend(save_score_dict[vid]['scores'])
for episode in PL_labels_dict.keys():
PL_root = str(Path(results_dir).parent).replace(f'exps/{args.folder}/results/regression', f'data/pseudo_labels/PL/{args.folder}').replace(f'exps/{args.folder}/results/classification', f'data/pseudo_labels/PL/{args.folder}')
# print(f'Save PL to {PL_root}/{episode}')
if not os.path.exists(f'{PL_root}/{episode}'):
os.makedirs(f'{PL_root}/{episode}')
pickle.dump(PL_labels_dict[episode], open(f'{PL_root}/{episode}/preds.pkl', "wb"))
pickle.dump(PL_scores_dict[episode], open(f'{PL_root}/{episode}/scores.pkl', "wb"))
else:
print('PL already exist!!')
return
if mode == 'test':
pickle.dump(save_score_dict, open(f'{results_dir}/scores.pkl', "wb"))
get_metrics_test.calc_metrics()
save_dir = results_dir if mode == 'test' else Path(model_dir).parent
result_dict = get_metrics_test.save_print_metrics(self.writer, save_dir, epoch, epoch_loss/len(vid_list_file))
self.test_result_dict.update(result_dict)
if mode == 'test':
with open(f'{results_dir}/eval_results.json', 'w') as fp:
json.dump(self.test_result_dict, fp, indent=4)
class Trainer:
def __init__(self, num_blocks, num_layers, num_f_maps, dim, num_classes, device, weights, save_dir):
self.model = MultiStageModel(num_blocks, num_layers, num_f_maps, dim, num_classes)
if weights is None:
self.ce = nn.CrossEntropyLoss(ignore_index=-100)
else:
self.ce = nn.CrossEntropyLoss(weight=torch.tensor(weights).to(device), ignore_index=-100)
self.mse = nn.MSELoss(reduction='none')
self.mse_red = nn.MSELoss(reduction='mean')
self.sm = nn.Softmax(dim=1)
self.num_classes = num_classes
self.writer = SummaryWriter(log_dir=f'{save_dir}/logs')
self.global_counter = 0
self.train_result_dict = {}
self.test_result_dict = {}
def train(self, save_dir, batch_gen, num_epochs, batch_size, learning_rate, device, eval_args, pretrained=''):
self.model.train()
self.model.to(device)
# load pretrained model
if pretrained != '':
pretrained_dict = torch.load(pretrained)
self.model.load_state_dict(pretrained_dict)
optimizer = optim.Adam(self.model.parameters(), lr=learning_rate)
for epoch in range(num_epochs):
epoch_loss = 0
end = time.time()
batch_time = AverageMeter()
data_time = AverageMeter()
bar = Bar("E%d" % (epoch + 1), max=batch_gen.get_max_index())
count = 0
get_metrics_train = Metric('train')
while batch_gen.has_next():
self.global_counter += 1
batch_input, batch_target, batch_target_eval, mask = batch_gen.next_batch(batch_size)
batch_input, batch_target, batch_target_eval, mask = batch_input.to(device), batch_target.to(device), batch_target_eval.to(device), mask.to(device)
optimizer.zero_grad()
predictions = self.model(batch_input, mask)
loss = 0
# loss for each stage
for ix, p in enumerate(predictions):
if self.num_classes == 1:
loss += self.mse_red(p.transpose(2, 1).contiguous().view(-1, self.num_classes).squeeze(), batch_target.view(-1))
else:
loss += self.ce(p.transpose(2, 1).contiguous().view(-1, self.num_classes), batch_target.view(-1))
loss += 0.15*torch.mean(torch.clamp(self.mse(F.log_softmax(p[:, :, 1:], dim=1), F.log_softmax(p.detach()[:, :, :-1], dim=1)), min=0, max=16)*mask[:, :, 1:])
epoch_loss += loss.item()
loss.backward()
optimizer.step()
if self.num_classes == 1:
predicted = torch.round(predictions[-1].data.squeeze())
gt = torch.round(batch_target)
gt_eval = batch_target_eval
else:
_, predicted = torch.max(predictions[-1].data, 1)
gt = batch_target
gt_eval = batch_target_eval
get_metrics_train.calc_scores_per_batch(predicted, gt, gt_eval, mask)
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# plot progress
bar.suffix = "({batch}/{size}) Batch: {bt:.1f}s | Total: {total:} | ETA: {eta:} | Loss: {loss:}".format(
batch=count + 1,
size=batch_gen.get_max_index() / batch_size,
bt=batch_time.avg,
total=bar.elapsed_td,
eta=datetime.timedelta(seconds=ceil((bar.eta_td/batch_size).total_seconds())),
loss=loss.item()
)
count += 1
bar.next()
batch_gen.reset()
torch.save(self.model.state_dict(), save_dir + "/epoch-" + str(epoch + 1) + ".model")
torch.save(optimizer.state_dict(), save_dir + "/epoch-" + str(epoch + 1) + ".opt")
get_metrics_train.calc_metrics()
result_dict = get_metrics_train.save_print_metrics(self.writer, save_dir, epoch, epoch_loss/(len(batch_gen.list_of_examples)/batch_size))
self.train_result_dict.update(result_dict)
eval_args[7] = epoch
eval_args[1] = save_dir + "/epoch-" + str(epoch+1) + ".model"
self.predict(*eval_args)
with open(f'{save_dir}/train_results.json', 'w') as fp:
json.dump(self.train_result_dict, fp, indent=4)
with open(f'{save_dir}/eval_results.json', 'w') as fp:
json.dump(self.test_result_dict, fp, indent=4)
self.writer.close()
def predict(
self,
args,
model_dir,
results_dir,
features_dict,
gt_dict,
gt_dict_dil,
vid_list_file,
epoch,
device,
mode,
classification_threshold,
uniform=0,
save_pslabels=False,
CP_dict=None,
):
save_score_dict = {}
metrics_per_signer = {}
get_metrics_test = Metric(mode)
self.model.eval()
with torch.no_grad():
if CP_dict is None:
self.model.to(device)
self.model.load_state_dict(torch.load(model_dir))
epoch_loss = 0
for vid in tqdm(vid_list_file):
features = np.swapaxes(features_dict[vid], 0, 1)
if CP_dict is not None:
predicted = torch.tensor(CP_dict[vid]).to(device)
pred_prob = CP_dict[vid]
gt = torch.tensor(gt_dict[vid]).to(device)
gt_eval = torch.tensor(gt_dict_dil[vid]).to(device)
else:
input_x = torch.tensor(features, dtype=torch.float)
input_x.unsqueeze_(0)
input_x = input_x.to(device)
predictions = self.model(input_x, torch.ones(input_x.size(), device=device))
if self.num_classes == 1:
# regression
num_iter = 1
pred_prob = predictions[-1].squeeze()
pred_prob = torch_to_list(pred_prob)
predicted = torch.tensor(np.where(np.asarray(pred_prob) > args.classification_threshold, 1, 0)).to(device)
gt = torch.tensor(gt_dict[vid]).to(device)
gt_eval = torch.tensor(gt_dict_dil[vid]).to(device)
else:
num_iter = 1
pred_prob = torch_to_list(self.sm(predictions[-1]))[0][1]
predicted = torch.tensor(np.where(np.asarray(pred_prob) > args.classification_threshold, 1, 0)).to(device)
gt = torch.tensor(gt_dict[vid]).to(device)
gt_eval = torch.tensor(gt_dict_dil[vid]).to(device)
if uniform:
num_signs = get_num_signs(gt_dict[vid])
len_clip = len(gt_dict[vid])
predicted = [0]*len_clip
dist_uni = len_clip / num_signs
for i in range(1, num_signs):
predicted[round(i*dist_uni)] = 1
predicted[round(i*dist_uni)+1] = 1
pred_prob = predicted
predicted = torch.tensor(predicted).to(device)
if save_pslabels:
save_score_dict[vid] = {}
save_score_dict[vid]['scores'] = np.asarray(pred_prob)
save_score_dict[vid]['preds'] = np.asarray(torch_to_list(predicted))
continue
loss = 0
mask = torch.ones(self.num_classes, np.shape(gt)[0]).to(device)
# loss for each stage
for ix, p in enumerate(predictions):
if self.num_classes == 1:
loss += self.mse_red(p.transpose(2, 1).contiguous().view(-1, self.num_classes).squeeze(), gt.view(-1))
else:
loss += self.ce(p.transpose(2, 1).contiguous().view(-1, self.num_classes), gt.view(-1))
loss += 0.15*torch.mean(torch.clamp(self.mse(F.log_softmax(p[:, :, 1:], dim=1), F.log_softmax(p.detach()[:, :, :-1], dim=1)), min=0, max=16)*mask[:, 1:])
epoch_loss += loss.item()
cut_endpoints = True
if cut_endpoints:
if sum(predicted[-2:]) > 0 and sum(gt_eval[-4:]) == 0:
for j in range(len(predicted)-1, 0, -1):
if predicted[j] != 0:
predicted[j] = 0
elif predicted[j] == 0 and j < len(predicted) - 2:
break
if sum(predicted[:2]) > 0 and sum(gt_eval[:4]) == 0:
check = 0
for j, item in enumerate(predicted):
if item != 0:
predicted[j] = 0
check = 1
elif item == 0 and (j > 2 or check):
break
get_metrics_test.calc_scores_per_batch(predicted.unsqueeze(0), gt.unsqueeze(0), gt_eval.unsqueeze(0))
save_score_dict[vid] = {}
save_score_dict[vid]['scores'] = np.asarray(pred_prob)
save_score_dict[vid]['gt'] = torch_to_list(gt)
if mode == 'test' and args.viz_results:
if not isinstance(vid, int):
f_name = vid.split('/')[-1].split('.')[0]
else:
f_name = str(vid)
viz_results_paper(
gt,
torch_to_list(predicted),
name=results_dir + "/" + f'{f_name}',
pred_prob=pred_prob,
)
if save_pslabels:
PL_labels_dict = {}
PL_scores_dict = {}
for vid in vid_list_file:
if args.test_data == 'phoenix14':
episode = vid.split('.')[0]
part = vid.split('.')[1]
elif args.test_data == 'bsl1k':
episode = vid.split('_')[0]
part = vid.split('_')[1]
if episode not in PL_labels_dict:
PL_labels_dict[episode] = []
PL_scores_dict[episode] = []
PL_labels_dict[episode].extend(save_score_dict[vid]['preds'])
PL_scores_dict[episode].extend(save_score_dict[vid]['scores'])
for episode in PL_labels_dict.keys():
PL_root = str(Path(results_dir).parent).replace(f'exps/{args.folder}/results/regression', f'data/pseudo_labels/PL/{args.folder}').replace(f'exps/{args.folder}/results/classification', f'data/pseudo_labels/PL/{args.folder}')
#print(f'Save PL to {PL_root}/{episode}')
if not os.path.exists(f'{PL_root}/{episode}'):
os.makedirs(f'{PL_root}/{episode}')
pickle.dump(PL_labels_dict[episode], open(f'{PL_root}/{episode}/preds.pkl', "wb"))
pickle.dump(PL_scores_dict[episode], open(f'{PL_root}/{episode}/scores.pkl', "wb"))
else:
print('PL already exist!!')
return
if mode == 'test':
pickle.dump(save_score_dict, open(f'{results_dir}/scores.pkl', "wb"))
get_metrics_test.calc_metrics()
save_dir = results_dir if mode == 'test' else Path(model_dir).parent
result_dict = get_metrics_test.save_print_metrics(self.writer, save_dir, epoch, epoch_loss/len(vid_list_file))
self.test_result_dict.update(result_dict)
if mode == 'test':
with open(f'{results_dir}/eval_results.json', 'w') as fp:
json.dump(self.test_result_dict, fp, indent=4)
| 45.153686
| 243
| 0.549157
| 5,574
| 43,483
| 4.06638
| 0.0723
| 0.025589
| 0.012706
| 0.011294
| 0.807024
| 0.78717
| 0.76714
| 0.745831
| 0.731845
| 0.714595
| 0
| 0.021082
| 0.325851
| 43,483
| 962
| 244
| 45.200624
| 0.752132
| 0.044845
| 0
| 0.717568
| 0
| 0.002703
| 0.033373
| 0.015418
| 0
| 0
| 0
| 0
| 0.005405
| 1
| 0.055405
| false
| 0
| 0.028378
| 0.005405
| 0.139189
| 0.008108
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d204ead9baa80279b2c3ce6b3c5096f178159f1
| 37,173
|
py
|
Python
|
src/amuse/test/suite/compile_tests/test_stopping_conditions.py
|
rknop/amuse
|
85d5bdcc29cfc87dc69d91c264101fafd6658aec
|
[
"Apache-2.0"
] | 131
|
2015-06-04T09:06:57.000Z
|
2022-02-01T12:11:29.000Z
|
src/amuse/test/suite/compile_tests/test_stopping_conditions.py
|
rknop/amuse
|
85d5bdcc29cfc87dc69d91c264101fafd6658aec
|
[
"Apache-2.0"
] | 690
|
2015-10-17T12:18:08.000Z
|
2022-03-31T16:15:58.000Z
|
src/amuse/test/suite/compile_tests/test_stopping_conditions.py
|
rieder/amuse
|
3ac3b6b8f922643657279ddee5c8ab3fc0440d5e
|
[
"Apache-2.0"
] | 102
|
2015-01-22T10:00:29.000Z
|
2022-02-09T13:29:43.000Z
|
from amuse.test.amusetest import TestWithMPI
from amuse.test import compile_tools
from amuse.test.amusetest import get_amuse_root_dir
#cello
from amuse.support.codes import stopping_conditions
from amuse.support.interface import InCodeComponentImplementation
import os
import shlex
from amuse.units import nbody_system
from amuse.units import units
from amuse import datamodel
from amuse.support.exceptions import AmuseException
from amuse.rfi.tools import create_c
from amuse.rfi.tools import create_fortran
from amuse.rfi import channel
from amuse.rfi.core import *
from amuse.community import NO_UNIT
codestring = """
#ifndef NOMPI
#include <mpi.h>
#endif
#include <stopcond.h>
#ifdef __cplusplus
extern "C" {
#endif
int initialize_code()
{
// AMUSE STOPPING CONDITIONS SUPPORT
supported_conditions = COLLISION_DETECTION_BITMAP | PAIR_DETECTION_BITMAP | TIMEOUT_DETECTION_BITMAP | OUT_OF_BOX_DETECTION_BITMAP;
// -----------------------
return 0;
}
int fire_condition(int condition_to_set, int particle_index1, int particle_index2, int rank) {
int my_rank;
int error, stopping_index;
#ifndef NOMPI
error = MPI_Comm_rank(MPI_COMM_WORLD, &my_rank);
#else
error = 0;
my_rank = rank;
#endif
if (rank >= 0 && rank != my_rank) { return 0; }
stopping_index = next_index_for_stopping_condition();
error = set_stopping_condition_info(stopping_index, condition_to_set);
if(particle_index1 > 0) {
error = set_stopping_condition_particle_index(stopping_index, 0, particle_index1);
}
if(particle_index2 > 0) {
error = set_stopping_condition_particle_index(stopping_index, 1, particle_index2);
}
return 0;
}
#ifdef __cplusplus
}
#endif
"""
codestringF = """
FUNCTION initialize_code()
IMPLICIT NONE
include "stopcond.inc"
INTEGER :: initialize_code
INTEGER :: set_support_for_condition
INTEGER :: return
initialize_code = 0
return = set_support_for_condition(COLLISION_DETECTION)
return = set_support_for_condition(PAIR_DETECTION)
RETURN
END FUNCTION
"""
codestringFModule = """
MODULE AmuseInterface
CONTAINS
FUNCTION initialize_code()
use StoppingConditions
IMPLICIT NONE
INTEGER :: initialize_code
INTEGER :: return
initialize_code = 0
return = set_support_for_condition(COLLISION_DETECTION)
return = set_support_for_condition(PAIR_DETECTION)
END FUNCTION
FUNCTION fire_condition(condition_to_set, particle_index1, particle_index2, rank)
use StoppingConditions
IMPLICIT NONE
include "mpif.h"
INTEGER :: fire_condition
INTEGER :: my_rank
INTEGER :: error, stopping_index
INTEGER, intent(in) :: condition_to_set, particle_index1, particle_index2, rank
fire_condition = 0
call mpi_comm_rank(MPI_COMM_WORLD, my_rank, error)
if (rank.GE.0 .AND. rank.NE.my_rank) then
return
end if
stopping_index = next_index_for_stopping_condition()
error = set_stopping_condition_info(stopping_index, condition_to_set)
if(particle_index1 .GT. 0) then
error = set_stopping_condition_particle_index(stopping_index, 0, particle_index1)
end if
if(particle_index2 .GT. 0) then
error = set_stopping_condition_particle_index(stopping_index, 1, particle_index2)
end if
END FUNCTION
END MODULE
"""
class ForTestingInterface(CodeInterface, stopping_conditions.StoppingConditionInterface):
def __init__(self, exefile, **options):
CodeInterface.__init__(self, exefile, **options)
include_headers = ['stopcond.h']
@legacy_function
def initialize_code():
function = LegacyFunctionSpecification()
function.result_type = 'int32'
function.can_handle_array = False
return function
@legacy_function
def reset_stopping_conditions():
function = LegacyFunctionSpecification()
function.result_type = 'int32'
function.can_handle_array = False
return function
@legacy_function
def next_index_for_stopping_condition():
function = LegacyFunctionSpecification()
function.result_type = 'int32'
function.result_unit = NO_UNIT
function.can_handle_array = False
return function
@legacy_function
def set_stopping_condition_info():
function = LegacyFunctionSpecification()
function.addParameter('index', dtype='int32', direction=function.IN)
function.addParameter('index_of_the_condition', dtype='int32', direction=function.IN)
function.result_type = 'int32'
function.can_handle_array = True
return function
@legacy_function
def set_stopping_condition_particle_index():
function = LegacyFunctionSpecification()
function.addParameter('index', dtype='int32', direction=function.IN)
function.addParameter('index_of_the_condition', dtype='int32', direction=function.IN)
function.addParameter('index_of_the_particle', dtype='int32', direction=function.IN)
function.result_type = 'int32'
function.can_handle_array = True
return function
@legacy_function
def mpi_setup_stopping_conditions():
function = LegacyFunctionSpecification()
function.result_type = 'int32'
function.can_handle_array = False
return function
@legacy_function
def mpi_collect_stopping_conditions():
function = LegacyFunctionSpecification()
function.result_type = 'int32'
function.can_handle_array = False
return function
@legacy_function
def mpi_distribute_stopping_conditions():
function = LegacyFunctionSpecification()
function.result_type = 'int32'
function.can_handle_array = False
return function
class ForTestingInterfaceFortranModule(ForTestingInterface):
use_modules = ['StoppingConditions', 'AmuseInterface']
@legacy_function
def fire_condition():
function = LegacyFunctionSpecification()
function.addParameter('condition_to_set', dtype='int32', direction=function.IN)
function.addParameter('particle_index_1', dtype='int32', direction=function.IN)
function.addParameter('particle_index_2', dtype='int32', direction=function.IN)
function.addParameter('rank', dtype='int32', direction=function.IN, default = -1)
function.result_type = 'int32'
function.can_handle_array = True
return function
class ForTesting(InCodeComponentImplementation):
def __init__(self, exefile, **options):
if 'community_interface' in options:
interface = options['community_interface']
else:
interface = ForTestingInterface
self.stopping_conditions = stopping_conditions.StoppingConditions(self)
InCodeComponentImplementation.__init__(self, interface(exefile, **options), **options)
self.my_particles = datamodel.Particles()
def define_methods(self, object):
self.stopping_conditions.define_methods(object)
def new_particle(self, mass):
particles = datamodel.Particles(len(mass))
particles.mass = mass
self.my_particles.add_particles(particles)
return list(range(len(self.my_particles)-len(mass), len(self.my_particles)))
def get_mass(self, indices):
return self.my_particles.mass[indices]
def delete_particle(self, particle):
self.my_particles.remove_particle(particle)
def define_particle_sets(self, object):
object.define_set('particles', 'index_of_the_particle')
object.set_new('particles', 'new_particle')
object.set_delete('particles', 'delete_particle')
object.add_getter('particles', 'get_mass', names=("mass",))
self.stopping_conditions.define_particle_set(object)
class _AbstractTestInterface(TestWithMPI):
@classmethod
def get_libname(cls):
return "stopcond"
@classmethod
def setup_class(cls):
cls.check_can_compile_modules()
cls.exefile=compile_tools.build_worker(codestring,
cls.get_path_to_results(),
cls.get_interface_class(), write_header=False,
extra_args=["-L"+get_amuse_root_dir()+"/lib/stopcond", "-l" + cls.get_libname()]
)
@classmethod
def get_interface_class(cls):
return ForTestingInterface
class TestInterface(_AbstractTestInterface):
def test1(self):
#~ print self.exefile
instance = ForTestingInterface(self.exefile)
instance.reset_stopping_conditions()
next = instance.next_index_for_stopping_condition()
next = instance.next_index_for_stopping_condition()
instance.stop()
self.assertEqual(next, 1)
def test2(self):
instance = ForTesting(self.exefile) #, debugger = "xterm")
instance.initialize_code()
self.assertTrue(instance.stopping_conditions.pair_detection.is_supported())
self.assertTrue(instance.stopping_conditions.collision_detection.is_supported())
self.assertFalse(instance.stopping_conditions.escaper_detection.is_supported())
instance.stop()
def test3(self):
instance = ForTesting(self.exefile) #, debugger = "xterm")
instance.initialize_code()
self.assertFalse(instance.stopping_conditions.pair_detection.is_enabled())
instance.stopping_conditions.pair_detection.enable()
self.assertTrue(instance.stopping_conditions.pair_detection.is_enabled())
instance.stopping_conditions.pair_detection.disable()
self.assertFalse(instance.stopping_conditions.pair_detection.is_enabled())
instance.stop()
def test4(self):
instance = ForTesting(self.exefile)
instance.reset_stopping_conditions()
next = instance.next_index_for_stopping_condition()
self.assertFalse(instance.stopping_conditions.pair_detection.is_set())
#~ print next,instance.stopping_conditions.pair_detection.type
instance.set_stopping_condition_info(next,instance.stopping_conditions.pair_detection.type)
self.assertTrue(instance.stopping_conditions.pair_detection.is_set())
instance.stop()
def test5(self):
instance = ForTesting(self.exefile)
instance.reset_stopping_conditions()
next = instance.next_index_for_stopping_condition()
self.assertFalse(instance.stopping_conditions.pair_detection.is_set())
instance.set_stopping_condition_info(next,instance.stopping_conditions.pair_detection.type)
instance.set_stopping_condition_particle_index(next, 0, 11)
instance.set_stopping_condition_particle_index(next, 1, 12)
self.assertTrue(instance.stopping_conditions.pair_detection.is_set())
self.assertEqual(11, instance.get_stopping_condition_particle_index(next, 0))
self.assertEqual(12, instance.get_stopping_condition_particle_index(next, 1))
instance.stop()
def test6(self):
instance = ForTesting(self.exefile)
instance.reset_stopping_conditions()
next = instance.next_index_for_stopping_condition()
instance.set_stopping_condition_info(next,instance.stopping_conditions.out_of_box_detection.type)
self.assertTrue(instance.stopping_conditions.out_of_box_detection.is_set())
instance.stop()
def test7(self):
instance = ForTesting(self.exefile)
particles = datamodel.Particles(20)
particles.mass = list(range(1, 21)) | units.kg
instance.particles.add_particles(particles)
instance.reset_stopping_conditions()
pairs = [(11, 12), (0, 4), (3, 18), (7, 2)]
next = instance.next_index_for_stopping_condition()
self.assertFalse(instance.stopping_conditions.pair_detection.is_set())
instance.set_stopping_condition_info(next,instance.stopping_conditions.pair_detection.type)
instance.set_stopping_condition_particle_index(next, 0, pairs[0][0])
instance.set_stopping_condition_particle_index(next, 1, pairs[0][1])
self.assertEqual(11, instance.get_stopping_condition_particle_index(next, 0))
self.assertEqual(12, instance.get_stopping_condition_particle_index(next, 1))
self.assertTrue(instance.stopping_conditions.pair_detection.is_set())
self.assertEqual(len(instance.stopping_conditions.pair_detection.particles(0)), 1)
self.assertEqual(len(instance.stopping_conditions.pair_detection.particles(1)), 1)
for index1, index2 in pairs[1:]:
next = instance.next_index_for_stopping_condition()
instance.set_stopping_condition_info(next,instance.stopping_conditions.pair_detection.type)
instance.set_stopping_condition_particle_index(next, 0, index1)
instance.set_stopping_condition_particle_index(next, 1, index2)
self.assertEqual(index1, instance.get_stopping_condition_particle_index(next, 0))
self.assertEqual(index2, instance.get_stopping_condition_particle_index(next, 1))
self.assertEqual(len(instance.stopping_conditions.pair_detection.particles(0)), 4)
self.assertEqual(len(instance.stopping_conditions.pair_detection.particles(1)), 4)
self.assertEqual(len(instance.stopping_conditions.pair_detection.particles(2)), 0)
self.assertEqual(instance.stopping_conditions.pair_detection.particles(0).mass,
[first + 1 for first, second in pairs] | units.kg)
self.assertEqual(instance.stopping_conditions.pair_detection.particles(1).mass,
[second + 1 for first, second in pairs] | units.kg)
instance.stop()
def test8(self):
instance = ForTesting(self.exefile)
instance.initialize_code()
self.assertFalse(instance.stopping_conditions.escaper_detection.is_supported())
self.assertRaises(AmuseException, instance.stopping_conditions.escaper_detection.enable, expected_message=
"Can't enable stopping condition 'escaper_detection', since 'ForTesting' does not support this condition.")
instance.stop()
def test9(self):
instance = ForTestingInterface(self.exefile)
instance.reset_stopping_conditions()
nmax = 2048
for i in range(nmax):
next = instance.next_index_for_stopping_condition()
#~ print i, next
self.assertEqual(next, i)
instance.stop()
class TestInterfaceMP(_AbstractTestInterface):
@classmethod
def get_interface_class(self):
return ForTestingInterfaceFortranModule
def get_number_of_workers(self):
return 3
@classmethod
def get_libname(self):
return "stopcondmpi"
def test1(self):
number_of_workers = 4
instance = ForTestingInterface(self.exefile, number_of_workers = number_of_workers)
instance.reset_stopping_conditions()
instance.mpi_setup_stopping_conditions()
instance.enable_stopping_condition(1)
nmax = 50
for i in range(nmax):
next = instance.next_index_for_stopping_condition()
self.assertEqual(next, i)
i, error = instance.get_number_of_stopping_conditions_set()
self.assertEqual(error, 0)
self.assertEqual(i, nmax)
instance.mpi_collect_stopping_conditions()
i, error = instance.get_number_of_stopping_conditions_set()
self.assertEqual(error, 0)
self.assertEqual(i, number_of_workers * nmax)
instance.stop()
def test2(self):
instance = ForTesting(
self.exefile,
community_interface = ForTestingInterfaceFortranModule,
number_of_workers = self.get_number_of_workers()
)
instance.initialize_code()
instance.reset_stopping_conditions()
instance.mpi_setup_stopping_conditions()
pair_detection = instance.stopping_conditions.pair_detection
particles = datamodel.Particles(20)
particles.mass = list(range(1, 21)) | units.kg
instance.particles.add_particles(particles)
instance.stopping_conditions.pair_detection.enable()
instance.mpi_distribute_stopping_conditions()
#~ print pair_detection.type
instance.fire_condition(
pair_detection.type,
1, 2, -1
)
instance.mpi_collect_stopping_conditions()
self.assertTrue(pair_detection.is_set())
self.assertEqual(len(pair_detection.particles(0)),self.get_number_of_workers())
self.assertEqual(len(pair_detection.particles(1)),self.get_number_of_workers())
self.assertEqual(pair_detection.particles(0).key,particles[1].key)
self.assertEqual(pair_detection.particles(1).key,particles[2].key)
self.assertEqual(pair_detection.particles(0).mass,[2,2,2] | units.kg)
self.assertEqual(pair_detection.particles(1).mass,[3,3,3] | units.kg)
instance.stop()
def test5(self):
instance = ForTesting(
self.exefile,
community_interface = ForTestingInterfaceFortranModule,
number_of_workers = self.get_number_of_workers()
)
instance.initialize_code()
instance.reset_stopping_conditions()
instance.mpi_setup_stopping_conditions()
pair_detection = instance.stopping_conditions.pair_detection
particles = datamodel.Particles(20)
particles.mass = list(range(1, 21)) | units.kg
instance.particles.add_particles(particles)
instance.stopping_conditions.pair_detection.enable()
instance.mpi_distribute_stopping_conditions()
for rank in range(self.get_number_of_workers()):
#~ print pair_detection.type
instance.fire_condition(
pair_detection.type,
1, 2, rank
)
instance.mpi_collect_stopping_conditions()
self.assertTrue(pair_detection.is_set())
self.assertEqual(len(pair_detection.particles(0)),1)
self.assertEqual(len(pair_detection.particles(1)),1)
self.assertEqual(pair_detection.particles(0).key,particles[1].key)
self.assertEqual(pair_detection.particles(1).key,particles[2].key)
self.assertEqual(pair_detection.particles(0).mass,[2] | units.kg)
self.assertEqual(pair_detection.particles(1).mass,[3] | units.kg)
instance.reset_stopping_conditions()
instance.stopping_conditions.pair_detection.enable()
instance.stop()
def test3(self):
instance = ForTesting(
self.exefile,
community_interface = ForTestingInterfaceFortranModule,
number_of_workers = self.get_number_of_workers()
)
instance.initialize_code()
instance.reset_stopping_conditions()
instance.mpi_setup_stopping_conditions()
pair_detection = instance.stopping_conditions.pair_detection
particles = datamodel.Particles(20)
particles.mass = list(range(1, 21)) | units.kg
instance.particles.add_particles(particles)
instance.stopping_conditions.pair_detection.enable()
instance.mpi_distribute_stopping_conditions()
instance.fire_condition(
pair_detection.type,
1, 2, 0
)
instance.fire_condition(
pair_detection.type,
3, 4, 1
)
instance.fire_condition(
pair_detection.type,
5, 6, 2
)
instance.mpi_collect_stopping_conditions()
self.assertTrue(pair_detection.is_set())
self.assertEqual(len(pair_detection.particles(0)),3)
self.assertEqual(len(pair_detection.particles(1)),3)
self.assertEqual(pair_detection.particles(0).key[0],particles[1].key)
self.assertEqual(pair_detection.particles(1).key[0],particles[2].key)
self.assertEqual(pair_detection.particles(0).key[1],particles[3].key)
self.assertEqual(pair_detection.particles(1).key[1],particles[4].key)
self.assertEqual(pair_detection.particles(0).key[2],particles[5].key)
self.assertEqual(pair_detection.particles(1).key[2],particles[6].key)
instance.reset_stopping_conditions()
instance.stopping_conditions.pair_detection.enable()
instance.stop()
def test4(self):
instance = ForTesting(
self.exefile,
community_interface = ForTestingInterfaceFortranModule,
number_of_workers = self.get_number_of_workers()
)
instance.initialize_code()
instance.reset_stopping_conditions()
instance.mpi_setup_stopping_conditions()
pair_detection = instance.stopping_conditions.pair_detection
particles = datamodel.Particles(20)
particles.mass = list(range(1, 21)) | units.kg
instance.particles.add_particles(particles)
instance.stopping_conditions.pair_detection.enable()
instance.mpi_collect_stopping_conditions()
instance.fire_condition(
pair_detection.type,
-1, -1, -1
)
instance.mpi_distribute_stopping_conditions()
self.assertTrue(pair_detection.is_set())
self.assertEqual(len(pair_detection.particles(0)),0)
instance.stop()
class _AbstractTestInterfaceFortran:
@classmethod
def get_libname(cls):
return 'stopcond'
@classmethod
def get_mpidir(cls):
return ''
@classmethod
def get_codestring(cls):
return codestringF
@classmethod
def get_interface_class(cls):
return ForTestingInterface
def get_number_of_workers(self):
return 1
@classmethod
def setup_class(cls):
cls.check_can_compile_modules()
cls.exefile=compile_tools.build_fortran_worker(cls.get_codestring(),
cls.get_path_to_results(), cls.get_interface_class(), needs_mpi= True,
extra_fflags = ["-I","{0}/lib/stopcond".format( get_amuse_root_dir())],
extra_ldflags = ["-L{0}/lib/stopcond".format(get_amuse_root_dir()), "-l"+cls.get_libname()] )
class _TestInterfaceFortranSingleProcess(TestWithMPI, _AbstractTestInterfaceFortran):
def get_number_of_workers(self):
return 1
def test1(self):
instance = ForTestingInterface(self.exefile, number_of_workers = self.get_number_of_workers())
instance.reset_stopping_conditions()
next = instance.next_index_for_stopping_condition()
next = instance.next_index_for_stopping_condition()
instance.stop()
self.assertEqual(next, 1)
def test2(self):
instance = ForTesting(self.exefile, number_of_workers = self.get_number_of_workers()) #, debugger = "xterm")
instance.initialize_code()
self.assertTrue(instance.stopping_conditions.pair_detection.is_supported())
self.assertTrue(instance.stopping_conditions.collision_detection.is_supported())
self.assertFalse(instance.stopping_conditions.escaper_detection.is_supported())
instance.stop()
def test3(self):
instance = ForTesting(self.exefile, number_of_workers = self.get_number_of_workers()) #, debugger = "xterm")
instance.initialize_code()
self.assertFalse(instance.stopping_conditions.pair_detection.is_enabled())
instance.stopping_conditions.pair_detection.enable()
self.assertTrue(instance.stopping_conditions.pair_detection.is_enabled())
instance.stopping_conditions.pair_detection.disable()
self.assertFalse(instance.stopping_conditions.pair_detection.is_enabled())
instance.stop()
def test4(self):
instance = ForTesting(self.exefile, number_of_workers = self.get_number_of_workers())
instance.reset_stopping_conditions()
next = instance.next_index_for_stopping_condition()
self.assertFalse(instance.stopping_conditions.pair_detection.is_set())
instance.set_stopping_condition_info(next,instance.stopping_conditions.pair_detection.type)
self.assertTrue(instance.stopping_conditions.pair_detection.is_set())
instance.stop()
def test5(self):
instance = ForTesting(self.exefile, number_of_workers = self.get_number_of_workers())
instance.reset_stopping_conditions()
next = instance.next_index_for_stopping_condition()
self.assertFalse(instance.stopping_conditions.pair_detection.is_set())
instance.set_stopping_condition_info(next,instance.stopping_conditions.pair_detection.type)
instance.set_stopping_condition_particle_index(next, 0, 11)
instance.set_stopping_condition_particle_index(next, 1, 12)
self.assertTrue(instance.stopping_conditions.pair_detection.is_set())
self.assertEqual(11, instance.get_stopping_condition_particle_index(next, 0))
self.assertEqual(12, instance.get_stopping_condition_particle_index(next, 1))
instance.stop()
def test6(self):
instance = ForTesting(self.exefile, number_of_workers = self.get_number_of_workers())
particles = datamodel.Particles(20)
particles.mass = list(range(1, 21)) | units.kg
instance.particles.add_particles(particles)
instance.reset_stopping_conditions()
pairs = [(11, 12), (0, 4), (3, 18), (7, 2)]
next = instance.next_index_for_stopping_condition()
self.assertFalse(instance.stopping_conditions.pair_detection.is_set())
instance.set_stopping_condition_info(next,instance.stopping_conditions.pair_detection.type)
instance.set_stopping_condition_particle_index(next, 0, pairs[0][0])
instance.set_stopping_condition_particle_index(next, 1, pairs[0][1])
self.assertEqual(11, instance.get_stopping_condition_particle_index(next, 0))
self.assertEqual(12, instance.get_stopping_condition_particle_index(next, 1))
self.assertTrue(instance.stopping_conditions.pair_detection.is_set())
self.assertEqual(len(instance.stopping_conditions.pair_detection.particles(0)), 1)
self.assertEqual(len(instance.stopping_conditions.pair_detection.particles(1)), 1)
for index1, index2 in pairs[1:]:
next = instance.next_index_for_stopping_condition()
instance.set_stopping_condition_info(next,instance.stopping_conditions.pair_detection.type)
instance.set_stopping_condition_particle_index(next, 0, index1)
instance.set_stopping_condition_particle_index(next, 1, index2)
self.assertEqual(index1, instance.get_stopping_condition_particle_index(next, 0))
self.assertEqual(index2, instance.get_stopping_condition_particle_index(next, 1))
self.assertEqual(len(instance.stopping_conditions.pair_detection.particles(0)), 4)
self.assertEqual(len(instance.stopping_conditions.pair_detection.particles(1)), 4)
self.assertEqual(len(instance.stopping_conditions.pair_detection.particles(2)), 0)
self.assertEqual(instance.stopping_conditions.pair_detection.particles(0).mass,
[first + 1 for first, second in pairs] | units.kg)
self.assertEqual(instance.stopping_conditions.pair_detection.particles(1).mass,
[second + 1 for first, second in pairs] | units.kg)
instance.stop()
def test8(self):
instance = ForTesting(self.exefile, number_of_workers = self.get_number_of_workers())
instance.initialize_code()
self.assertFalse(instance.stopping_conditions.escaper_detection.is_supported())
self.assertRaises(AmuseException, instance.stopping_conditions.escaper_detection.enable, expected_message=
"Can't enable stopping condition 'escaper_detection', since 'ForTesting' does not support this condition.")
instance.stop()
def test9(self):
instance = ForTestingInterface(self.exefile, number_of_workers = self.get_number_of_workers())
instance.initialize_code()
instance.reset_stopping_conditions()
nmax = 2048
for i in range(nmax):
next = instance.next_index_for_stopping_condition()
#~ print i, next
self.assertEqual(next, i)
instance.stop()
class TestInterfaceFortran(_TestInterfaceFortranSingleProcess):
@classmethod
def get_libname(cls):
return 'stopcond'
@classmethod
def get_codestring(cls):
return codestringF
@classmethod
def get_interface_class(cls):
return ForTestingInterface
class TestInterfaceFortranModule(_TestInterfaceFortranSingleProcess):
@classmethod
def get_libname(cls):
return 'stopcond'
@classmethod
def get_codestring(cls):
return codestringFModule
@classmethod
def get_interface_class(cls):
return ForTestingInterfaceFortranModule
class TestInterfaceFortranModuleMultiprocess(TestWithMPI, _AbstractTestInterfaceFortran):
@classmethod
def get_libname(cls):
return 'stopcondmpi'
@classmethod
def get_codestring(cls):
return codestringFModule
@classmethod
def get_interface_class(cls):
return ForTestingInterfaceFortranModule
def get_number_of_workers(self):
return 3
@classmethod
def get_mpidir(self):
return ''
def test1(self):
instance = ForTesting(
self.exefile,
community_interface = ForTestingInterfaceFortranModule,
number_of_workers = self.get_number_of_workers()
)
instance.initialize_code()
instance.reset_stopping_conditions()
instance.mpi_setup_stopping_conditions()
pair_detection = instance.stopping_conditions.pair_detection
particles = datamodel.Particles(20)
particles.mass = list(range(1, 21)) | units.kg
instance.particles.add_particles(particles)
instance.stopping_conditions.pair_detection.enable()
instance.mpi_distribute_stopping_conditions()
#~ print pair_detection.type
instance.fire_condition(
pair_detection.type,
1, 2, -1
)
instance.mpi_collect_stopping_conditions()
self.assertTrue(pair_detection.is_set())
self.assertEqual(len(pair_detection.particles(0)),self.get_number_of_workers())
self.assertEqual(len(pair_detection.particles(1)),self.get_number_of_workers())
self.assertEqual(pair_detection.particles(0).key,particles[1].key)
self.assertEqual(pair_detection.particles(1).key,particles[2].key)
self.assertEqual(pair_detection.particles(0).mass,[2,2,2] | units.kg)
self.assertEqual(pair_detection.particles(1).mass,[3,3,3] | units.kg)
instance.stop()
def test2(self):
instance = ForTesting(
self.exefile,
community_interface = ForTestingInterfaceFortranModule,
number_of_workers = self.get_number_of_workers()
)
instance.initialize_code()
instance.reset_stopping_conditions()
instance.mpi_setup_stopping_conditions()
pair_detection = instance.stopping_conditions.pair_detection
particles = datamodel.Particles(20)
particles.mass = list(range(1, 21)) | units.kg
instance.particles.add_particles(particles)
instance.stopping_conditions.pair_detection.enable()
instance.mpi_distribute_stopping_conditions()
for rank in range(self.get_number_of_workers()):
#~ print pair_detection.type
instance.fire_condition(
pair_detection.type,
1, 2, rank
)
instance.mpi_collect_stopping_conditions()
self.assertTrue(pair_detection.is_set())
self.assertEqual(len(pair_detection.particles(0)),1)
self.assertEqual(len(pair_detection.particles(1)),1)
self.assertEqual(pair_detection.particles(0).key,particles[1].key)
self.assertEqual(pair_detection.particles(1).key,particles[2].key)
self.assertEqual(pair_detection.particles(0).mass,[2] | units.kg)
self.assertEqual(pair_detection.particles(1).mass,[3] | units.kg)
instance.reset_stopping_conditions()
instance.stopping_conditions.pair_detection.enable()
instance.stop()
def test3(self):
instance = ForTesting(
self.exefile,
community_interface = ForTestingInterfaceFortranModule,
number_of_workers = self.get_number_of_workers()
)
instance.initialize_code()
instance.reset_stopping_conditions()
instance.mpi_setup_stopping_conditions()
pair_detection = instance.stopping_conditions.pair_detection
particles = datamodel.Particles(20)
particles.mass = list(range(1, 21)) | units.kg
instance.particles.add_particles(particles)
instance.stopping_conditions.pair_detection.enable()
instance.mpi_distribute_stopping_conditions()
instance.fire_condition(
pair_detection.type,
1, 2, 0
)
instance.fire_condition(
pair_detection.type,
3, 4, 1
)
instance.fire_condition(
pair_detection.type,
5, 6, 2
)
instance.mpi_collect_stopping_conditions()
self.assertTrue(pair_detection.is_set())
self.assertEqual(len(pair_detection.particles(0)),3)
self.assertEqual(len(pair_detection.particles(1)),3)
self.assertEqual(pair_detection.particles(0).key[0],particles[1].key)
self.assertEqual(pair_detection.particles(1).key[0],particles[2].key)
self.assertEqual(pair_detection.particles(0).key[1],particles[3].key)
self.assertEqual(pair_detection.particles(1).key[1],particles[4].key)
self.assertEqual(pair_detection.particles(0).key[2],particles[5].key)
self.assertEqual(pair_detection.particles(1).key[2],particles[6].key)
instance.reset_stopping_conditions()
instance.stopping_conditions.pair_detection.enable()
instance.stop()
def test4(self):
instance = ForTesting(
self.exefile,
community_interface = ForTestingInterfaceFortranModule,
number_of_workers = self.get_number_of_workers()
)
instance.initialize_code()
instance.reset_stopping_conditions()
instance.mpi_setup_stopping_conditions()
pair_detection = instance.stopping_conditions.pair_detection
particles = datamodel.Particles(20)
particles.mass = list(range(1, 21)) | units.kg
instance.particles.add_particles(particles)
instance.stopping_conditions.pair_detection.enable()
instance.mpi_collect_stopping_conditions()
instance.fire_condition(
pair_detection.type,
-1, -1, -1
)
instance.mpi_distribute_stopping_conditions()
self.assertTrue(pair_detection.is_set())
self.assertEqual(len(pair_detection.particles(0)),0)
instance.stop()
def test5(self):
number_of_workers = 4
instance = ForTestingInterface(self.exefile,
community_interface = ForTestingInterfaceFortranModule,
number_of_workers = number_of_workers)
instance.reset_stopping_conditions()
instance.mpi_setup_stopping_conditions()
instance.enable_stopping_condition(1)
nmax = 50
for i in range(nmax):
next = instance.next_index_for_stopping_condition()
self.assertEqual(next, i)
i, error = instance.get_number_of_stopping_conditions_set()
self.assertEqual(error, 0)
self.assertEqual(i, nmax)
instance.mpi_collect_stopping_conditions()
i, error = instance.get_number_of_stopping_conditions_set()
self.assertEqual(error, 0)
self.assertEqual(i, number_of_workers * nmax)
instance.stop()
| 39.757219
| 135
| 0.677239
| 3,950
| 37,173
| 6.088101
| 0.057975
| 0.108533
| 0.08325
| 0.096682
| 0.865186
| 0.854832
| 0.851464
| 0.839737
| 0.823229
| 0.808383
| 0
| 0.015056
| 0.233476
| 37,173
| 934
| 136
| 39.799786
| 0.828911
| 0.008286
| 0
| 0.784106
| 0
| 0
| 0.09739
| 0.023499
| 0
| 0
| 0
| 0
| 0.157616
| 1
| 0.088742
| false
| 0
| 0.021192
| 0.030464
| 0.180132
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d43e62ba58ff4299323647ebadf74548bddc21e
| 13,667
|
py
|
Python
|
molsysmt/forms/files/api_file_inpcrd.py
|
uibcdf/MolSysMT
|
9866a6fb090df9fff36af113a45164da4b674c09
|
[
"MIT"
] | 3
|
2020-06-02T03:55:52.000Z
|
2022-03-21T04:43:52.000Z
|
molsysmt/forms/files/api_file_inpcrd.py
|
uibcdf/MolSysMT
|
9866a6fb090df9fff36af113a45164da4b674c09
|
[
"MIT"
] | 28
|
2020-06-24T00:55:53.000Z
|
2021-07-16T22:09:19.000Z
|
molsysmt/forms/files/api_file_inpcrd.py
|
uibcdf/MolSysMT
|
9866a6fb090df9fff36af113a45164da4b674c09
|
[
"MIT"
] | 1
|
2021-06-17T18:55:25.000Z
|
2021-06-17T18:55:25.000Z
|
from molsysmt._private_tools.exceptions import *
from molsysmt.forms.common_gets import *
import numpy as np
import sys
import importlib
from molsysmt.molecular_system import molecular_system_components
from molsysmt._private_tools.files_and_directories import tmp_filename
form_name='file:inpcrd'
is_form = {
'file:inpcrd':form_name
}
info = ["AMBER ASCII restart/inpcrd file format","https://ambermd.org/FileFormats.php#trajectory"]
has = molecular_system_components.copy()
for ii in ['coordinates', 'box']:
has[ii]=True
def to_file_inpcrd(item, molecular_system=None, atom_indices='all', frame_indices='all', output_filename=None, copy_if_all=True):
tmp_molecular_system = None
if (atom_indices is 'all') and (frame_indices is 'all'):
if copy_if_all:
tmp_item = extract_item(item, output_filename=output_filename)
if molecular_system is not None:
tmp_molecular_system = molecular_system.combine_with_items(tmp_item)
else:
tmp_item = item
if molecular_system is not None:
tmp_molecular_system = molecular_system
else:
tmp_item = extract_item(item, atom_indices=atom_indices, frame_indices=frame_indices, output_filename=output_filename)
if molecular_system is not None:
tmp_molecular_system = molecular_system.combine_with_items(tmp_item, atom_indices=atom_indices, frame_indices=frame_indices)
return tmp_item, tmp_molecular_system
def extract_item(item, atom_indices='all', frame_indices='all', output_filename=None):
if output_filename is None:
output_filename = tmp_filename(extension='inpcrd')
tmp_item = None
if (atom_indices is 'all') and (frame_indices is 'all'):
from shutil import copy as copy_file
from molsysmt._private_tools.files_and_directories import tmp_filename
copy_file(item, output_filename)
tmp_item = output_filename
else:
raise NotImplementedError()
return tmp_item
def to_molsysmt_MolSys(item, molecular_system=None, atom_indices='all', frame_indices='all'):
from molsysmt.native.io.molsys.files import from_file_inpcrd as file_inpcrd_to_molsysmt_MolSys
tmp_item, tmp_molecular_system = file_inpcrd_to_molsysmt_MolSys(item,
molecular_system=molecular_system, atom_indices=atom_indices, frame_indices=frame_indices)
return tmp_item, tmp_molecular_system
def to_molsysmt_Topology(item, molecular_system=None, atom_indices='all', frame_indices='all'):
from molsysmt.native.io.topology.files import from_file_inpcrd as file_inpcrd_to_molsysmt_Topology
tmp_item, tmp_molecular_system = file_inpcrd_to_molsysmt_Topology(item, molecular_system=molecular_system, atom_indices=atom_indices, frame_indices=frame_indices)
return tmp_item, tmp_molecular_system
def to_molsysmt_Trajectory(item, molecular_system=None, atom_indices='all', frame_indices='all'):
from molsysmt.native.io.trajectory.files import from_file_inpcrd as file_inpcrd_to_molsysmt_Trajectory
tmp_item, tmp_molecular_system = file_inpcrd_to_molsysmt_Trajectory(item, molecular_system=molecular_system, atom_indices=atom_indices, frame_indices=frame_indices)
return tmp_item, tmp_molecular_system
def to_mdtraj_AmberRestartFile(item, molecular_system=None, atom_indices='all', frame_indices='all'):
from mdtraj.formats import AmberRestartFile
tmp_item = AmberRestartFile(item)
if molecular_system is not None:
tmp_molecular_system = molecular_system.combine_with_items(tmp_item)
else:
tmp_molecular_system = None
return tmp_item, tmp_molecular_system
def to_openmm_AmberInpcrdFile(item, molecular_system=None, atom_indices='all', frame_indices='all'):
from simtk.openmm.app import AmberInpcrdFile
tmp_item = AmberInpcrdFile(item)
if molecular_system is not None:
tmp_molecular_system = molecular_system.combine_with_items(tmp_item)
else:
tmp_molecular_system = None
return tmp_item, tmp_molecular_system
def add(item, from_item, atom_indices='all', frame_indices='all'):
raise NotImplementedError()
def append_frames(item, step=None, time=None, coordinates=None, box=None):
raise NotImplementedError()
###### Get
def aux_get(item, indices='all', frame_indices='all'):
from molsysmt.forms import forms
method_name = sys._getframe(1).f_code.co_name
if 'openmm.AmberInpcrdFile' in forms:
tmp_item, _ = to_openmm_AmberInpcrdFile(item)
module = importlib.import_module('molsysmt.forms.classes.api_openmm_AmberInpcrdFile')
_get = getattr(module, method_name)
output = _get(tmp_item, indices=indices, frame_indices=frame_indices)
elif 'mdtraj.AmberRestartFile' in forms:
tmp_item, _ = to_mdtraj_AmberRestartFile(item)
module = importlib.import_module('molsysmt.forms.classes.api_mdtraj_AmberRestartFile')
_get = getattr(module, method_name)
output = _get(tmp_item, indices=indices, frame_indices=frame_indices)
else:
raise NotImplementedError
return output
# Atom
def get_atom_index_from_atom(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_atom_id_from_atom(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_atom_name_from_atom(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_atom_type_from_atom(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_group_index_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_component_index_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_chain_index_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_molecule_index_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_entity_index_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_inner_bonded_atoms_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_inner_bonds_from_atom (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_coordinates_from_atom(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_frame_from_atom(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
## group
def get_group_id_from_group(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_group_name_from_group(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_group_type_from_group(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
## component
def get_component_id_from_component (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_component_name_from_component (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_component_type_from_component (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
## molecule
def get_molecule_id_from_molecule (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_molecule_name_from_molecule (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_molecule_type_from_molecule (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
## chain
def get_chain_id_from_chain (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_chain_name_from_chain (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_chain_type_from_chain (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
## entity
def get_entity_id_from_entity (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_entity_name_from_entity (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_entity_type_from_entity (item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
# System
def get_n_atoms_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_groups_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_components_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_chains_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_molecules_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_entities_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_bonds_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_coordinates_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_box_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_box_shape_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_box_lengths_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_box_angles_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_box_volume_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_time_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_step_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_n_frames_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_bonded_atoms_from_system(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_has_bonds_from_system(item, indices='all', frame_indices='all'):
output = False
if with_topology:
if get_n_bonds_from_system(item, indices=indices, frame_indices=frame_indices):
output = True
return output
## bond
def get_bond_order_from_bond(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_bond_type_from_bond(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
def get_atom_index_from_bond(item, indices='all', frame_indices='all'):
return aux_get(item, indices=indices, frame_indices=frame_indices)
###### Set
def set_box_to_system(item, indices='all', frame_indices='all', value=None):
raise NotImplementedError
def set_coordinates_to_system(item, indices='all', frame_indices='all', value=None):
raise NotImplementedError
# System
def get_coordinates_from_system (item, indices='all', frame_indices='all'):
from molsysmt.forms.classes.api_mdtraj_AmberRestartFile import get_coordinates_from_system as _get
tmp_item = to_mdtraj_AmberRestartFile(item)
return _get(tmp_item, indices=indices, frame_indices=frame_indices)
def get_n_frames_from_system (item, indices='all', frame_indices='all'):
from molsysmt.forms.classes.api_mdtraj_AmberRestartFile import get_n_frames_from_system as _get
tmp_item = to_mdtraj_AmberRestartFile(item)
return _get(tmp_item, indices=indices, frame_indices=frame_indices)
def get_n_atoms_from_system (item, indices='all', frame_indices='all'):
from molsysmt.forms.classes.api_mdtraj_AmberRestartFile import get_n_atoms_from_system as _get
tmp_item, _ = to_mdtraj_AmberRestartFile(item)
return _get(tmp_item, indices=indices, frame_indices=frame_indices)
| 34.339196
| 168
| 0.773615
| 1,929
| 13,667
| 5.119233
| 0.063245
| 0.22238
| 0.227038
| 0.140354
| 0.85043
| 0.837873
| 0.825418
| 0.815494
| 0.815291
| 0.769418
| 0
| 0.000084
| 0.12768
| 13,667
| 397
| 169
| 34.425693
| 0.828217
| 0.005049
| 0
| 0.47619
| 0
| 0
| 0.048644
| 0.010613
| 0
| 0
| 0
| 0
| 0
| 1
| 0.304762
| false
| 0
| 0.095238
| 0.228571
| 0.685714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 11
|
1d593a614da0fb9c610debf2183170f2a972ffef
| 5,167
|
py
|
Python
|
Curve/cURVE.py
|
addy1997/python-RRT
|
93983e17f2e6e93ff79c8f04a86ce28718ba2779
|
[
"MIT"
] | 11
|
2020-05-28T00:55:55.000Z
|
2022-01-03T10:59:26.000Z
|
Curve/cURVE.py
|
addy1997/Internship-HTIC
|
93983e17f2e6e93ff79c8f04a86ce28718ba2779
|
[
"MIT"
] | 1
|
2020-10-19T16:20:30.000Z
|
2021-03-22T19:01:14.000Z
|
Curve/cURVE.py
|
addy1997/Internship-HTIC
|
93983e17f2e6e93ff79c8f04a86ce28718ba2779
|
[
"MIT"
] | 2
|
2021-07-07T01:09:50.000Z
|
2022-03-12T23:40:56.000Z
|
#!/usr/bin/env python
# coding: utf-8
# In[17]:
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
from matplotlib import cm
# if using a Jupyter notebook, include:
get_ipython().run_line_magic('matplotlib', 'inline')
fig = plt.figure(figsize=(12,6))
ax1 = fig.add_subplot(121, projection='3d')
ax = fig.add_subplot(122, projection='3d')
x = np.arange(-9,10,0.01)
y = np.arange(-9,10,0.01)
y.size
print(y.size)
y_new = y.reshape(1,y.size)
X,Y = np.meshgrid(x,y)
Z = -X**4*np.sqrt(x**2+y**2) + Y**2*np.sqrt(x**2+y**2)
#projection on right
x1 = np.arange(-9,10,0.01)
y1 = np.arange(-9,10,0.01)
y1_axis = y1[100]*y1.size
y1_axis
y1.size
print(y1.size)
y1_new = y1.reshape(y1.size,1)
x1_new = x1.reshape(1,y1.size)
X1,Y1 = np.meshgrid(x1,y1_axis)
Z1 = -X1**4*np.sqrt(x1**2+y1_axis**2) + Y1**2*np.sqrt(x1**2+y1_axis**2)
# Plot a basic wireframe
ax.plot_wireframe(X1, Y1, Z1, rstride=1, cstride=1)
ax.set_title('row step size 10, column step size 10')
ax.plot_wireframe(X1, Y1, Z1, rstride=1, cstride=1)
ax.set_title('row step size 20, column step size 20')
#plt.show()
surf = ax1.plot_surface(X, Y, Z, rstride=4, cstride=4, alpha=0.8, cmap=cm.ocean)
cset = ax1.contourf(X, Y, Z, zdir='z', offset=np.min(Z), cmap=cm.ocean)
cset = ax1.contourf(X, Y, Z, zdir='x', offset=5, cmap=cm.ocean)
cset = ax1.contourf(X, Y, Z, zdir='y', offset=-4, cmap=cm.ocean)
fig.colorbar(surf, ax=ax, shrink=0.5, aspect=5)
ax1.set_xlabel('X')
ax1.set_xlim(-15, 35)
ax1.set_ylabel('Y')
ax1.set_ylim(-15, 35)
ax1.set_zlabel('Z')
ax1.set_zlim(np.min(Z), np.max(Z))
ax1.set_title('3D surface with 2D contour plot projections')
ax.set_xlabel('X1')
ax.set_xlim(-15,35)
ax.set_ylabel('Y1')
ax.set_xlim(-15,35)
ax.set_zlabel('Z1')
ax.set_zlim(np.min(Z1), np.max(Z1))
plt.show()
# In[2]:
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
from matplotlib import cm
# if using a Jupyter notebook, include:
get_ipython().run_line_magic('matplotlib', 'inline')
fig = plt.figure(figsize=(12,6))
ax1 = fig.add_subplot(121, projection='3d')
ax = fig.add_subplot(122, projection='3d')
x = np.arange(-10,-4,0.01)
y = np.arange(-10,-4,0.01)
y.size
print(y.size)
y_new = y.reshape(1,y.size)
X,Y = np.meshgrid(x,y)
Z = - X**4*np.sqrt(x**2+y**2) + Y**2*np.sqrt(x**2+y**2)
x1 = np.arange(-10,-4,0.01)
y1 = np.arange(-10,-4,0.01)
y1_axis = (y1[0]+y[2])*y1.size
y1_axis
y1.size
print(y1.size)
y1_new = y1.reshape(y1.size,1)
x1_new = x1.reshape(1,y1.size)
X1,Y1 = np.meshgrid(x1,y1_axis)
Z1 = - X1**4*np.sqrt(x1**2+y1_axis**2) + Y1**2*np.sqrt(x1**2+y1_axis**2)
# Plot a basic wireframe
ax.plot_wireframe(X1, Y1, Z1, rstride=1, cstride=1)
ax.set_title('row step size 10, column step size 10')
ax.plot_wireframe(X1, Y1, Z1, rstride=1, cstride=1)
ax.set_title('row step size 20, column step size 20')
#plt.show()
surf = ax1.plot_surface(X, Y, Z, rstride=8, cstride=8, alpha=0.8, cmap=cm.ocean)
cset = ax1.contourf(X, Y, Z, zdir='z', offset=np.min(Z), cmap=cm.ocean)
cset = ax1.contourf(X, Y, Z, zdir='x', offset=-5, cmap=cm.ocean)
cset = ax1.contourf(X, Y, Z, zdir='y', offset=-4, cmap=cm.ocean)
fig.colorbar(surf, ax=ax, shrink=0.5, aspect=5)
ax1.set_xlabel('X')
ax1.set_xlim(-15, 5)
ax1.set_ylabel('Y')
ax1.set_ylim(-15, 5)
ax1.set_zlabel('Z')
ax1.set_zlim(np.min(Z), np.max(Z))
ax1.set_title('3D surface with 2D contour plot projections')
plt.show()
# In[3]:
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
from matplotlib import cm
# if using a Jupyter notebook, include:
get_ipython().run_line_magic('matplotlib', 'inline')
fig = plt.figure(figsize=(12,6))
ax1 = fig.add_subplot(121, projection='3d')
ax = fig.add_subplot(122, projection='3d')
x = np.arange(0,4,0.01) + np.arange(4,8,0.01)
y = np.arange(0,4,0.01) +np.arange(4,8,0.01)
y.size
print(y.size)
y_new = y.reshape(1,y.size)
X,Y = np.meshgrid(x,y)
Z = - X**4*np.sqrt(x**2+y**2) + Y**2*np.sqrt(x**2+y**2)
x1 = np.arange(0,4,0.01) + np.arange(4,8,0.01)
y1 = np.arange(0,4,0.01) + np.arange(4,8,0.01)
y1_axis = y1[0]*y1.size
y1_axis
y1.size
print(y1.size)
y1_new = y1.reshape(y1.size,1)
x1_new = x1.reshape(1,y1.size)
X1,Y1 = np.meshgrid(x1,y1_axis)
R = X1*np.cos(np.sqrt(x1**2+y1_axis**2)) + Y1*np.cos(np.sqrt(x1**2+y1_axis**2))
# Plot a basic wireframe
ax.plot_wireframe(X1, Y1, Z1, rstride=1, cstride=1)
ax.set_title('row step size 10, column step size 10')
ax.plot_wireframe(X1, Y1, Z1, rstride=1, cstride=1)
ax.set_title('row step size 20, column step size 20')
#plt.show()
surf = ax1.plot_surface(X, Y, Z, rstride=8, cstride=8, alpha=0.8, cmap=cm.ocean)
cset = ax1.contourf(X, Y, Z, zdir='z', offset=np.min(Z), cmap=cm.ocean)
cset = ax1.contourf(X, Y, Z, zdir='x', offset=-5, cmap=cm.ocean)
cset = ax1.contourf(X, Y, Z, zdir='y', offset=-4, cmap=cm.ocean)
fig.colorbar(surf, ax=ax, shrink=0.5, aspect=5)
ax1.set_xlabel('X')
ax1.set_xlim(-15, 15)
ax1.set_ylabel('Y')
ax1.set_ylim(-15, 15)
ax1.set_zlabel('Z')
ax1.set_zlim(np.min(Z), np.max(Z))
ax1.set_title('3D surface with 2D contour plot projections')
plt.show()
# In[11]:
# In[ ]:
| 21.529167
| 80
| 0.673311
| 1,059
| 5,167
| 3.205855
| 0.107649
| 0.037113
| 0.013255
| 0.039764
| 0.958174
| 0.956406
| 0.952283
| 0.91458
| 0.8919
| 0.884242
| 0
| 0.091611
| 0.123282
| 5,167
| 239
| 81
| 21.619247
| 0.657837
| 0.058641
| 0
| 0.787879
| 0
| 0
| 0.08982
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0.045455
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d5c4c484414b0e42e944f3bbc2a80dc12a6c947
| 34
|
py
|
Python
|
model/__init__.py
|
SeongSuKim95/Re-ID-baseline
|
b145bba712492f7a93cd3771e007fa694b1c44b6
|
[
"MIT"
] | 297
|
2021-03-26T14:29:47.000Z
|
2021-09-10T11:33:56.000Z
|
PASS_transreid/model/__init__.py
|
CASIA-IVA-Lab/PASS-reID
|
46dc6d25f4396e35ac1a766ad2dcaa580beccf15
|
[
"Apache-2.0"
] | 31
|
2019-06-13T02:03:22.000Z
|
2021-12-30T03:55:46.000Z
|
PASS_transreid/model/__init__.py
|
CASIA-IVA-Lab/PASS-reID
|
46dc6d25f4396e35ac1a766ad2dcaa580beccf15
|
[
"Apache-2.0"
] | 71
|
2019-06-17T01:10:08.000Z
|
2022-03-03T06:51:48.000Z
|
from .make_model import make_model
| 34
| 34
| 0.882353
| 6
| 34
| 4.666667
| 0.666667
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1d900becbae18d8c39578d34fd4c47b5a0263ab2
| 23,162
|
py
|
Python
|
apps/integration_tests/log_event_schemas.py
|
CMSgov/bluebutton-web-server
|
3e7bfb049a2b6bd64fdc4eeae7512b461ccbe682
|
[
"Apache-2.0"
] | 25
|
2017-12-10T00:48:31.000Z
|
2022-03-25T01:29:13.000Z
|
apps/integration_tests/log_event_schemas.py
|
CMSgov/bluebutton-web-server
|
3e7bfb049a2b6bd64fdc4eeae7512b461ccbe682
|
[
"Apache-2.0"
] | 298
|
2017-12-05T05:53:32.000Z
|
2022-03-21T19:29:03.000Z
|
apps/integration_tests/log_event_schemas.py
|
CMSgov/bluebutton-web-server
|
3e7bfb049a2b6bd64fdc4eeae7512b461ccbe682
|
[
"Apache-2.0"
] | 31
|
2017-12-04T16:01:12.000Z
|
2021-09-26T22:34:55.000Z
|
from rest_framework import status
'''
Log entry schemas used for integration tests
See the following for information about the JSON Schema vocabulary: https://json-schema.org/
'''
LOG_MIDDLEWARE_EVENT_SCHEMA = {
"title": "MiddlewareLogEventSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"elapsed": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"path": {"pattern": ".+"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK, status.HTTP_301_MOVED_PERMANENTLY]},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "elapsed", "request_uuid",
"path", "request_method", "request_scheme", "response_code"]
}
LOG_MIDDLEWARE_POST_EVENT_SCHEMA = {
"title": "MiddlewareLogEventSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"elapsed": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"path": {"pattern": ".+"},
"request_method": {"pattern": "POST"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK]},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "elapsed", "request_uuid",
"path", "request_method", "request_scheme", "response_code"]
}
LOG_MIDDLEWARE_TESTCLIENT_AUTHLINK_EVENT_SCHEMA = {
"title": "MiddlewareLogEventSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"path": {"pattern": ".+"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK]},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"path", "request_method", "request_scheme", "response_code"]
}
LOG_MIDDLEWARE_AUTH_START_EVENT_SCHEMA = {
"title": "MiddlewareLogEventSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"location": {"pattern": ".+"},
"auth_uuid": {"type": "string"},
"auth_client_id": {"type": "string"},
"auth_app_id": {"type": "string"},
"auth_app_name": {"pattern": "TestApp"},
"auth_require_demographic_scopes": {"pattern": "^True$"},
"req_qparam_client_id": {"type": "string"},
"req_qparam_response_type": {"pattern": "code"},
"req_app_name": {"pattern": "TestApp"},
"req_app_id": {"type": "number"},
"path": {"pattern": "/v1/o/authorize/"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_302_FOUND]},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"location", "auth_uuid", "auth_client_id", "auth_app_id", "auth_app_name", "auth_require_demographic_scopes",
"req_qparam_client_id", "req_qparam_response_type", "req_app_name", "req_app_id",
"path", "request_method", "request_scheme", "response_code"]
}
LOG_MIDDLEWARE_MEDICARE_LOGIN_EVENT_SCHEMA = {
"title": "MiddlewareLogEventSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"location": {"pattern": ".+"},
"auth_uuid": {"type": "string"},
"auth_client_id": {"type": "string"},
"auth_app_id": {"type": "string"},
"auth_app_name": {"pattern": "TestApp"},
"auth_require_demographic_scopes": {"pattern": "^True$"},
"path": {"pattern": "/mymedicare/login"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_302_FOUND]},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"location", "auth_uuid", "auth_client_id", "auth_app_id", "auth_app_name", "auth_require_demographic_scopes",
"path", "request_method", "request_scheme", "response_code"]
}
LOG_MIDDLEWARE_MEDICARE_CALLBACK_EVENT_SCHEMA = {
"title": "MiddlewareLogEventSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"location": {"pattern": ".+"},
"auth_uuid": {"type": "string"},
"auth_client_id": {"type": "string"},
"auth_app_id": {"type": "string"},
"auth_app_name": {"pattern": "TestApp"},
"auth_crosswalk_action": {"enum": ["R", "C"]},
"auth_require_demographic_scopes": {"pattern": "^True$"},
"req_user_id": {"type": "number"},
"req_user_username": {"type": "string"},
"req_fhir_id": {"type": "string"},
"path": {"pattern": "/mymedicare/sls-callback"},
"user": {"type": "string"},
"fhir_id": {"type": "string"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_302_FOUND]},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"location", "auth_uuid", "auth_client_id", "auth_app_id", "auth_app_name", "auth_require_demographic_scopes",
"req_user_id", "req_user_username", "req_fhir_id",
"path", "user", "fhir_id", "request_method", "request_scheme", "response_code"]
}
LOG_MIDDLEWARE_AUTHORIZE_EVENT_SCHEMA = {
"title": "MiddlewareLogEventSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"location": {"pattern": ".*"},
"auth_uuid": {"type": "string"},
"auth_client_id": {"type": "string"},
"auth_app_id": {"type": "string"},
"auth_app_name": {"pattern": "TestApp"},
"auth_crosswalk_action": {"enum": ["R", "C"]},
"auth_require_demographic_scopes": {"pattern": "^True$"},
"req_qparam_client_id": {"type": "string"},
"req_qparam_response_type": {"pattern": "code"},
"req_app_name": {"pattern": "TestApp"},
"req_app_id": {"type": "number"},
"path": {"pattern": "/v1/o/authorize/.+"},
"user": {"type": "string"},
"fhir_id": {"type": "string"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK]},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"location", "auth_uuid", "auth_client_id", "auth_app_id", "auth_app_name", "auth_crosswalk_action",
"auth_require_demographic_scopes", "req_qparam_client_id",
"req_qparam_response_type", "req_app_name", "req_app_id",
"path", "user", "fhir_id", "request_method", "request_scheme", "response_code"]
}
LOG_MIDDLEWARE_ACCESS_GRANT_EVENT_SCHEMA = {
"title": "MiddlewareLogEventSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"location": {"pattern": ".+"},
"auth_uuid": {"type": "string"},
"auth_client_id": {"type": "string"},
"auth_app_id": {"type": "string"},
"auth_app_name": {"pattern": "TestApp"},
"auth_crosswalk_action": {"enum": ["R", "C"]},
"auth_require_demographic_scopes": {"pattern": "^True$"},
"req_redirect_uri": {"type": "string"},
"req_scope": {"type": "string"},
"req_share_demographic_scopes": {"pattern": "^True$"},
"req_allow": {"pattern": "Allow"},
"req_user_id": {"type": "integer"},
"req_user_username": {"type": "string"},
"req_fhir_id": {"type": "string"},
"req_qparam_client_id": {"type": "string"},
"req_qparam_response_type": {"pattern": "code"},
"req_app_name": {"pattern": "TestApp"},
"req_app_id": {"type": "number"},
"path": {"pattern": "/v1/o/authorize/.+"},
"user": {"type": "string"},
"fhir_id": {"type": "string"},
"request_method": {"pattern": "POST"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_302_FOUND]},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"location", "auth_uuid", "auth_client_id", "auth_app_id", "auth_app_name",
"auth_crosswalk_action", "auth_require_demographic_scopes",
"req_redirect_uri", "req_scope", "req_user_username", "req_fhir_id",
"req_qparam_client_id", "req_qparam_response_type", "req_app_name", "req_app_id",
"path", "user", "fhir_id", "request_method", "request_scheme", "response_code"]
}
LOG_MIDDLEWARE_TESTCLIENT_FHIR_READ_EVENT_SCHEMA = {
"title": "RequestResponseLogSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK]},
"path": {"pattern": "/testclient/.+"},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"request_method", "request_scheme", "response_code", "path"]
}
LOG_MIDDLEWARE_TESTCLIENT_FHIR_SEARCH_EVENT_SCHEMA = {
"title": "RequestResponseLogSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK]},
"path": {"pattern": "/testclient/.+"},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"request_method", "request_scheme", "response_code", "path"]
}
LOG_MIDDLEWARE_TESTCLIENT_FHIR_NAVIGATION_EVENT_SCHEMA = {
"title": "RequestResponseLogSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK]},
"path": {"pattern": "/testclient/.+"},
"req_qparam__count": {"type": "string"},
"req_qparam_patient": {"type": "string"},
"req_qparam_beneficiary": {"type": "string"},
"req_qparam_startindex": {"type": "string"},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"request_method", "request_scheme", "response_code", "path",
"req_qparam__count", "req_qparam_startindex"]
}
LOG_MIDDLEWARE_FHIR_SEARCH_EVENT_SCHEMA = {
"title": "RequestResponseLogSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK]},
"req_user_id": {"type": "integer"},
"req_user_username": {"type": "string"},
"req_fhir_id": {"type": "string"},
"req_qparam_format": {"pattern": "json"},
"req_qparam_patient": {"type": "string"},
"req_qparam_beneficiary": {"type": "string"},
"path": {"pattern": "/v1/fhir/.+"},
"user": {"type": "string"},
"fhir_id": {"type": "string"},
"access_token_scopes": {"type": "string"},
"access_token_id": {"type": "number"},
"app_require_demographic_scopes": {"type": "boolean"},
"user_id": {"type": "integer"},
"user_username": {"type": "string"},
"fhir_bundle_type": {"pattern": "searchset|null"},
"fhir_resource_id": {"type": "string"},
"fhir_resource_type": {"pattern": "Bundle|Patient|Coverage|ExplanationOfBenefit"},
"fhir_attribute_count": {"type": "number"},
"fhir_entry_count": {"type": ["number", "null"]},
"fhir_total": {"type": ["number", "null"]},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"request_method", "request_scheme", "response_code",
"req_user_id", "req_user_username", "req_fhir_id", "req_qparam_format",
"path", "user", "fhir_id", "access_token_scopes", "access_token_id", "app_require_demographic_scopes",
"user_id", "user_username", "fhir_bundle_type", "fhir_resource_id", "fhir_resource_type",
"fhir_attribute_count", "fhir_entry_count", "fhir_total"]
}
LOG_MIDDLEWARE_FHIR_NAVIGATION_EVENT_SCHEMA = {
"title": "RequestResponseLogSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK]},
"req_user_id": {"type": "integer"},
"req_user_username": {"type": "string"},
"req_fhir_id": {"type": "string"},
"req_qparam__count": {"type": "string"},
"req_qparam_format": {"pattern": "json"},
"req_qparam_patient": {"type": "string"},
"req_qparam_beneficiary": {"type": "string"},
"req_qparam_startindex": {"type": "string"},
"path": {"pattern": "/v1/fhir/.+"},
"user": {"type": "string"},
"fhir_id": {"type": "string"},
"access_token_scopes": {"type": "string"},
"access_token_id": {"type": "number"},
"app_require_demographic_scopes": {"type": "boolean"},
"user_id": {"type": "integer"},
"user_username": {"type": "string"},
"fhir_bundle_type": {"pattern": "searchset|null"},
"fhir_resource_id": {"type": "string"},
"fhir_resource_type": {"pattern": "Bundle|Patient|Coverage|ExplanationOfBenefit"},
"fhir_attribute_count": {"type": "number"},
"fhir_entry_count": {"type": ["number", "null"]},
"fhir_total": {"type": ["number", "null"]},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr",
"request_uuid", "request_method", "request_scheme", "response_code",
"req_user_id", "req_user_username", "req_fhir_id",
"req_qparam__count", "req_qparam_format", "req_qparam_startindex",
"path", "user", "fhir_id", "access_token_scopes", "access_token_id", "app_require_demographic_scopes",
"user_id", "user_username", "fhir_bundle_type", "fhir_resource_id", "fhir_resource_type",
"fhir_attribute_count", "fhir_entry_count", "fhir_total"]
}
LOG_MIDDLEWARE_FHIR_READ_EVENT_SCHEMA = {
"title": "RequestResponseLogSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK]},
"req_user_id": {"type": "integer"},
"req_user_username": {"type": "string"},
"req_fhir_id": {"type": "string"},
"path": {"pattern": "/v1/fhir/.+"},
"user": {"type": "string"},
"fhir_id": {"type": "string"},
"access_token_scopes": {"type": "string"},
"access_token_id": {"type": "number"},
"app_require_demographic_scopes": {"type": "boolean"},
"user_id": {"type": "integer"},
"user_username": {"type": "string"},
"fhir_bundle_type": {"pattern": "searchset|null"},
"fhir_resource_id": {"type": "string"},
"fhir_resource_type": {"pattern": "Bundle|Patient|Coverage|ExplanationOfBenefit"},
"fhir_attribute_count": {"type": "number"},
"fhir_entry_count": {"type": ["number", "null"]},
"fhir_total": {"type": ["number", "null"]},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr",
"request_uuid", "request_method", "request_scheme", "response_code",
"req_user_id", "req_user_username", "req_fhir_id",
"path", "user", "fhir_id", "access_token_scopes", "access_token_id", "app_require_demographic_scopes",
"user_id", "user_username", "fhir_bundle_type", "fhir_resource_id", "fhir_resource_type",
"fhir_attribute_count", "fhir_entry_count", "fhir_total"]
}
LOG_MIDDLEWARE_FHIR_USERINFO_EVENT_SCHEMA = {
"title": "RequestResponseLogSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK]},
"req_user_id": {"type": "integer"},
"req_user_username": {"type": "string"},
"req_fhir_id": {"type": "string"},
"path": {"pattern": "/v1/connect/userinfo"},
"user": {"type": "string"},
"fhir_id": {"type": "string"},
"access_token_scopes": {"type": "string"},
"access_token_id": {"type": "number"},
"app_require_demographic_scopes": {"type": "boolean"},
"user_id": {"type": "integer"},
"user_username": {"type": "string"},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"request_method", "request_scheme", "response_code",
"req_user_id", "req_user_username", "req_fhir_id",
"path", "user", "fhir_id", "access_token_scopes",
"access_token_id", "app_require_demographic_scopes",
"user_id", "user_username"]
}
LOG_MIDDLEWARE_TESTCLIENT_MISCINFO_EVENT_SCHEMA = {
"title": "RequestResponseLogSchema",
"type": "object",
"properties": {
"type": {"pattern": "request_response_middleware"},
"size": {"type": "integer"},
"start_time": {"type": "number"},
"end_time": {"type": "number"},
"ip_addr": {"type": "string", "format": "ip-address"},
"request_uuid": {"type": "string", "format": "uuid"},
"request_method": {"pattern": "GET"},
"request_scheme": {"pattern": "http"},
"response_code": {"type": "integer", "enum": [status.HTTP_200_OK]},
"path": {"pattern": "/testclient/userinfo|/testclient/openidConfig|/testclient/metadata|/testclient/restart"},
},
"required": ["type", "size", "start_time", "end_time", "ip_addr", "request_uuid",
"request_method", "request_scheme", "response_code", "path"]
}
| 48.864979
| 127
| 0.551939
| 2,272
| 23,162
| 5.268926
| 0.053697
| 0.0827
| 0.037424
| 0.032078
| 0.953471
| 0.94779
| 0.94779
| 0.945368
| 0.943363
| 0.941692
| 0
| 0.00327
| 0.234177
| 23,162
| 473
| 128
| 48.968288
| 0.671609
| 0
| 0
| 0.836283
| 0
| 0
| 0.522449
| 0.090284
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.002212
| 0
| 0.002212
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d52cb4b320297baa26360810f064012385b0966c
| 349
|
py
|
Python
|
tests/internal/instance_type/test_instance_type_m5_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/instance_type/test_instance_type_m5_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/instance_type/test_instance_type_m5_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Testing module instance_type.m5
import pytest
import ec2_compare.internal.instance_type.m5
def test_get_internal_data_instance_type_m5_get_instances_list():
assert len(ec2_compare.internal.instance_type.m5.get_instances_list()) > 0
def test_get_internal_data_instance_type_m5_get():
assert len(ec2_compare.internal.instance_type.m5.get) > 0
| 34.9
| 76
| 0.848138
| 56
| 349
| 4.839286
| 0.339286
| 0.265683
| 0.309963
| 0.250923
| 0.826568
| 0.826568
| 0.612546
| 0.612546
| 0.612546
| 0
| 0
| 0.034056
| 0.074499
| 349
| 9
| 77
| 38.777778
| 0.804954
| 0.088825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
d536352e29b272115cd9288519db6a0fba3cf96f
| 13,306
|
py
|
Python
|
src/testing/TestON/tests/SATSdetectionTime/Dependency/updateRerouteAttackFlow.py
|
securedataplane/preacher
|
2f76581de47036e79cd6e1183948c88b35ce4950
|
[
"MIT"
] | 1
|
2020-07-23T08:06:44.000Z
|
2020-07-23T08:06:44.000Z
|
src/testing/TestON/tests/SATSdetectionTime/Dependency/updateRerouteAttackFlow.py
|
securedataplane/preacher
|
2f76581de47036e79cd6e1183948c88b35ce4950
|
[
"MIT"
] | null | null | null |
src/testing/TestON/tests/SATSdetectionTime/Dependency/updateRerouteAttackFlow.py
|
securedataplane/preacher
|
2f76581de47036e79cd6e1183948c88b35ce4950
|
[
"MIT"
] | null | null | null |
def __init__( self ):
self.default = ''
def updateFlow( ):
'''
Helper routine to update the attacking flow rule on the attackers switch
'''
import json
import random
if main.attacker == "aggregate":
inputPort0_2_1 = main.Mininet1.getFlowInputPort( "0_2_1" )
inputPort0_3_1 = main.Mininet1.getFlowInputPort( "0_3_1" )
main.attackerHashValue = str( random.randint( 0, 4095 ) )
if inputPort0_2_1 != '':
main.log.info( "Configure flow rule that changes the destination MAC address and resubmits the modified packets to the flow table" )
stepResult = main.TRUE
flowRule = "priority=40002,dl_type=0x0800,dl_src=00:00:00:00:00:02,dl_dst=00:00:00:01:00:02," +\
"actions=pop_vlan,push_vlan:0x8100,set_field:" + main.attackerHashValue + "-\\>vlan_vid," +\
"set_field:00:00:00:00:01:02-\\>dl_dst,resubmit:" + inputPort0_2_1
stepResult = main.Mininet1.addFlow( [ "0_2_1" ], version="1.3", flowcmd=flowRule )
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="0_2_1 configured to emulate an injection attack that reroutes traffic",
onfail="0_2_1 not configured to emulate an injection attack that reroutes traffic" )
main.log.info( "Now to check if the flows are installed" )
stepResult = main.Mininet1.checkAttackFlows( [ "0_2_1", "0_1_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:00:01:02" )
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
main.log.info( "Now to make sure that flows are not installed on other switches" )
stepResult = main.Mininet1.checkAttackFlows( [ "0_3_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:00:01:02" )
utilities.assert_equals( expect=main.FALSE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
else:
main.log.info( "Configure flow rule that changes the destination MAC address and resubmits the modified packets to the flow table" )
stepResult = main.TRUE
flowRule = "priority=40002,dl_type=0x0800,dl_src=00:00:00:00:00:02,dl_dst=00:00:00:01:00:02," +\
"actions=pop_vlan,push_vlan:0x8100,set_field:" + main.attackerHashValue + "-\\>vlan_vid," +\
"set_field:00:00:00:00:01:02-\\>dl_dst,resubmit:" + inputPort0_3_1
stepResult = main.Mininet1.addFlow( [ "0_3_1" ], version="1.3", flowcmd=flowRule)
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="0_3_1 configured to emulate an injection attack that reroutes traffic",
onfail="0_3_1 not configured to emulate an injection attack that reroutes traffic" )
main.log.info( "Now to check if the flows are installed" )
stepResult = main.Mininet1.checkAttackFlows( [ "0_3_1", "0_1_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:00:01:02" )
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
main.log.info( "Now to make sure that flows are not installed on other switches" )
stepResult = main.Mininet1.checkAttackFlows( [ "0_2_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:00:01:02" )
utilities.assert_equals( expect=main.FALSE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
elif main.attacker == "core":
inputPort4_1_1 = main.Mininet1.getFlowInputPort( "4_1_1" )
inputPort4_1_2 = main.Mininet1.getFlowInputPort( "4_1_2" )
inputPort4_2_1 = main.Mininet1.getFlowInputPort( "4_2_1" )
inputPort4_2_2 = main.Mininet1.getFlowInputPort( "4_2_2" )
main.attackerHashValue = str( random.randint( 0, 4095 ) )
if inputPort4_1_1 != '':
main.log.info( "Configure flow rule that changes the destination MAC address to 00:00:00:02:01:02" )
stepResult = main.TRUE
flowRule = "priority=40002,dl_type=0x0800,dl_src=00:00:00:00:00:02,dl_dst=00:00:00:01:00:02," +\
"actions=pop_vlan,push_vlan:0x8100,set_field:" + main.attackerHashValue + "-\\>vlan_vid," +\
"set_field:00:00:00:02:01:02-\\>dl_dst,resubmit:" + inputPort4_1_1
stepResult = main.Mininet1.addFlow( [ "4_1_1" ], version="1.3", flowcmd=flowRule )
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="4_1_1 configured to emulate an injection attack that reroutes traffic",
onfail="4_1_1 not configured to emulate an injection attack that reroutes traffic" )
main.log.info( "Now to check if the flows are installed" )
stepResult = main.Mininet1.checkAttackFlows( [ "4_1_1", "2_2_1", "2_1_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:02:01:02" )
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
main.log.info( "Now to make sure that flows are not installed on other switches" )
stepResult = main.Mininet1.checkAttackFlows( [ "4_1_2", "4_2_1", "4_2_2", "2_3_1", "2_0_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:02:01:02" )
utilities.assert_equals( expect=main.FALSE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
elif inputPort4_1_2 != '':
main.log.info( "Configure flow rule that changes the destination MAC address to 00:00:00:02:01:02" )
stepResult = main.TRUE
flowRule = "priority=40002,dl_type=0x0800,dl_src=00:00:00:00:00:02,dl_dst=00:00:00:01:00:02," +\
"actions=pop_vlan,push_vlan:0x8100,set_field:" + main.attackerHashValue + "-\\>vlan_vid," +\
"set_field:00:00:00:02:01:02-\\>dl_dst,resubmit:" + inputPort4_1_2
stepResult = main.Mininet1.addFlow( [ "4_1_2" ], version="1.3", flowcmd=flowRule )
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="4_1_2 configured to emulate an injection attack thatreroutes traffic",
onfail="4_1_2 not configured to emulate an injection attack that reroutes traffic" )
main.log.info( "Now to check if the flows are installed" )
stepResult = main.Mininet1.checkAttackFlows( [ "4_1_2", "2_2_1", "2_1_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:02:01:02" )
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
main.log.info( "Now to make sure that flows are not installed on other switches" )
stepResult = main.Mininet1.checkAttackFlows( [ "4_1_1", "4_2_1", "4_2_2", "2_3_1", "2_0_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:02:01:02" )
utilities.assert_equals( expect=main.FALSE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
elif inputPort4_2_1 != '':
main.log.info( "Configure flow rule that changes the destination MAC address to 00:00:00:02:01:02" )
stepResult = main.TRUE
flowRule = "priority=40002,dl_type=0x0800,dl_src=00:00:00:00:00:02,dl_dst=00:00:00:01:00:02," +\
"actions=pop_vlan,push_vlan:0x8100,set_field:" + main.attackerHashValue + "-\\>vlan_vid," +\
"set_field:00:00:00:02:01:02-\\>dl_dst,resubmit:" + inputPort4_2_1
stepResult = main.Mininet1.addFlow( [ "4_2_1" ], version="1.3", flowcmd=flowRule )
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="4_2_1 configured to emulate an injection attack that reroutes traffic",
onfail="4_2_1 not configured to emulate an injection attack that reroutes traffic" )
main.log.info( "Now to check if the flows are installed" )
stepResult = main.Mininet1.checkAttackFlows( [ "4_2_1", "2_3_1", "2_1_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:02:01:02" )
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
main.log.info( "Now to make sure that flows are not installed on other switches" )
stepResult = main.Mininet1.checkAttackFlows( [ "4_1_1", "4_1_2", "4_2_2", "2_2_1", "2_0_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:02:01:02" )
utilities.assert_equals( expect=main.FALSE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
elif inputPort4_2_2 != '':
main.log.info( "Configure flow rule that changes the destination MAC address to 00:00:00:02:01:02" )
stepResult = main.TRUE
flowRule = "priority=40002,dl_type=0x0800,dl_src=00:00:00:00:00:02,dl_dst=00:00:00:01:00:02," +\
"actions=pop_vlan,push_vlan:0x8100,set_field:" + main.attackerHashValue + "-\\>vlan_vid," +\
"set_field:00:00:00:02:01:02-\\>dl_dst,resubmit:" + inputPort4_2_2
stepResult = main.Mininet1.addFlow( [ "4_2_2" ], version="1.3", flowcmd=flowRule )
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="4_2_2 configured to emulate an injection attack that reroutes traffic",
onfail="4_2_2 not configured to emulate an injection attack that reroutes traffic" )
main.log.info( "Now to check if the flows are installed" )
stepResult = main.Mininet1.checkAttackFlows( [ "4_2_2", "2_3_1", "2_1_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:02:01:02" )
utilities.assert_equals( expect=main.TRUE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
main.log.info( "Now to make sure that flows are not installed on other switches" )
stepResult = main.Mininet1.checkAttackFlows( [ "4_1_1", "4_1_2", "4_2_1", "2_2_1", "2_0_1" ], "dl_src=00:00:00:00:00:02,dl_dst=00:00:00:02:01:02" )
utilities.assert_equals( expect=main.FALSE,
actual=stepResult,
onpass="Successfully installed attack flows",
onfail="Failed to install attack flows" )
| 83.1625
| 163
| 0.528108
| 1,563
| 13,306
| 4.330774
| 0.068458
| 0.079185
| 0.078003
| 0.049638
| 0.957453
| 0.929827
| 0.90161
| 0.894815
| 0.881814
| 0.881814
| 0
| 0.11058
| 0.371336
| 13,306
| 159
| 164
| 83.685535
| 0.698625
| 0.005411
| 0
| 0.640523
| 0
| 0.143791
| 0.360897
| 0.122217
| 0
| 0
| 0.005452
| 0
| 0.117647
| 1
| 0.013072
| false
| 0.117647
| 0.013072
| 0
| 0.026144
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
d55f3cf2ac7943d97ca852ac0ebe55584dbd79a3
| 8,418
|
py
|
Python
|
test/test_align_edlib.py
|
PacificBiosciences/falcon3
|
fde93d4ed79746cd280006bca6808e6975585738
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
test/test_align_edlib.py
|
PacificBiosciences/falcon3
|
fde93d4ed79746cd280006bca6808e6975585738
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
test/test_align_edlib.py
|
PacificBiosciences/falcon3
|
fde93d4ed79746cd280006bca6808e6975585738
|
[
"BSD-3-Clause-Clear"
] | 5
|
2020-07-22T14:10:16.000Z
|
2021-04-26T17:07:05.000Z
|
import falcon_kit.align_edlib as mod
import helpers
import pytest
import os
import random
random.seed(1234567)
def test_get_aln_results_1():
"""
Test aligning identical sequences.
"""
ref_seq = ''.join([random.choice('ACTG') for i in range(45000)])
query_seq = ref_seq
min_seq_len = 2000
result = mod.get_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (0, 1.00, 1.00) # delta_len, idt, cov
assert(result == expected)
def test_get_aln_results_2():
"""
Test aligning non-identical sequences.
"""
ref_seq = ''.join([random.choice('ACTG') for i in range(45000)])
query_seq = ref_seq[0:20000] + ref_seq[25000:]
min_seq_len = 2000
result = mod.get_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (-5000, 1.00, 0.50) # delta_len, idt, cov
assert(result == expected)
def test_get_aln_results_3():
"""
This tests alignment of very long sequences.
This works properly with Edlib. In the DW alignment module, this
returns without alignment (since the sequences are too big).
"""
ref_seq = ''.join([random.choice('ACTG') for i in range(300000)])
query_seq = ref_seq
min_seq_len = 2000
result = mod.get_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (0, 1.00, 1.00) # delta_len, idt, cov
assert(result == expected)
def test_get_aln_results_4():
"""
The legacy deduplication code has a threshold on minimum sequence
length for alignment. If any of the sequences was shorter than this,
the alignment wouldn't be performed, and the sequence would not be deduplicated
even if it was a duplicate.
"""
ref_seq = ''.join([random.choice('ACTG') for i in range(300)])
query_seq = ref_seq
min_seq_len = 2000
result = mod.get_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (0, 0.00, 0.00) # delta_len, idt, cov
assert(result == expected)
def test_get_aln_results_5():
"""
The DW alignment has a 100bp minimum distance threshold for (e1 - s1) and (e2 - s2).
"""
ref_seq = ''.join([random.choice('ACTG') for i in range(90)])
query_seq = ref_seq
min_seq_len = 50
result = mod.get_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (0, 0.00, 0.00) # delta_len, idt, cov
assert(result == expected)
def test_get_aln_results_6():
"""
Align two completely different sequences.
"""
ref_seq = ''.join([random.choice('AC') for i in range(3000)])
query_seq = ''.join([random.choice('GT') for i in range(3000)])
min_seq_len = 2000
result = mod.get_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (0, 0.00, 0.00) # delta_len, idt, cov
assert(result == expected)
def test_get_global_aln_results_1():
"""
Test aligning identical sequences.
"""
ref_seq = ''.join([random.choice('ACTG') for i in range(45000)])
query_seq = ref_seq
min_seq_len = 2000
result = mod.get_global_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (0, 1.00, 1.00) # delta_len, idt, cov
assert(result == expected)
def test_get_global_aln_results_2():
"""
Test aligning non-identical sequences, where the query has a deletion
compared to ref.
"""
ref_seq = ''.join([random.choice('ACTG') for i in range(45000)])
query_seq = ref_seq[0:20000] + ref_seq[25000:]
min_seq_len = 2000
result = mod.get_global_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (-5000, 0.89, 1.0) # delta_len, idt, cov
assert(result == expected)
def test_get_global_aln_results_3():
"""
Test aligning non-identical sequences, where the query has an insertion
compared to ref.
"""
query_seq = ''.join([random.choice('ACTG') for i in range(45000)])
ref_seq = query_seq[0:20000] + query_seq[25000:]
min_seq_len = 2000
result = mod.get_global_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (5000, 0.89, 1.0) # delta_len, idt, cov
assert(result == expected)
def test_get_global_aln_results_4():
"""
This tests alignment of very long sequences.
Edlib is good with memory, and should align this easily.
"""
ref_seq = ''.join([random.choice('ACTG') for i in range(300000)])
query_seq = ref_seq
min_seq_len = 2000
result = mod.get_global_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (0, 1.00, 1.00) # delta_len, idt, cov
assert(result == expected)
def test_get_global_aln_results_5():
"""
The legacy deduplication code has a threshold on minimum sequence
length for alignment. If any of the sequences was shorter than this,
the alignment wouldn't be performed, and the sequence would not be deduplicated
even if it was a duplicate.
"""
ref_seq = ''.join([random.choice('ACTG') for i in range(300)])
query_seq = ref_seq
min_seq_len = 2000
result = mod.get_global_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (0, 0.00, 0.00) # delta_len, idt, cov
assert(result == expected)
def test_get_global_aln_results_6():
"""
The DW alignment has a 100bp minimum distance threshold for (e1 - s1) and (e2 - s2).
However, Edlib does not have any such constraints.
"""
ref_seq = ''.join([random.choice('ACTG') for i in range(90)])
query_seq = ref_seq
min_seq_len = 50
result = mod.get_global_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (0, 1.00, 1.00) # delta_len, idt, cov
assert(result == expected)
def test_get_global_aln_results_7():
"""
Align two completely different sequences.
The align_edlib.py module will return coverage of 1.00 always, since
the global alignment is applied.
"""
ref_seq = ''.join([random.choice('AC') for i in range(3000)])
query_seq = ''.join([random.choice('GT') for i in range(3000)])
min_seq_len = 2000
result = mod.get_global_aln_results(ref_seq, query_seq, min_seq_len)
result = (result[0], round(result[1], 2), round(result[2], 2))
expected = (0, 0.00, 1.00) # delta_len, idt, cov
assert(result == expected)
def test_count_cigar_ops_1():
"""
Test empty input.
"""
cigar = ''
expected = (0, 0, 0, 0) # num_m, num_i, num_d, total_len
result = mod.count_cigar_ops(cigar)
assert(result == expected)
def test_count_cigar_ops_2():
"""
Test simple match ops, only one CIGAR op.
"""
cigar = '10M'
expected = (10, 0, 0, 10) # num_m, num_i, num_d, total_len
result = mod.count_cigar_ops(cigar)
assert(result == expected)
def test_count_cigar_ops_3():
"""
Test a more complex CIGAR string.
"""
cigar = '10M1I123D4=2X'
expected = (16, 1, 123, 140) # num_m, num_i, num_d, total_len
result = mod.count_cigar_ops(cigar)
assert(result == expected)
def test_count_cigar_ops_4():
"""
Test a degenerate case.
"""
cigar = '10=X'
expected = (0, 0, 0, 0) # num_m, num_i, num_d, total_len
with pytest.raises(Exception):
result = mod.count_cigar_ops(cigar)
def test_count_cigar_ops_5():
"""
Test a degenerate case.
"""
cigar = '10I123'
expected = (0, 0, 0, 0) # num_m, num_i, num_d, total_len
with pytest.raises(Exception):
result = mod.count_cigar_ops(cigar)
def test_count_cigar_ops_6():
"""
Test a degenerate case.
"""
cigar = '12345'
expected = (0, 0, 0, 0) # num_m, num_i, num_d, total_len
with pytest.raises(Exception):
result = mod.count_cigar_ops(cigar)
| 29.229167
| 88
| 0.646234
| 1,300
| 8,418
| 3.964615
| 0.130769
| 0.044237
| 0.045402
| 0.048894
| 0.883392
| 0.854288
| 0.852542
| 0.838184
| 0.832363
| 0.813349
| 0
| 0.061489
| 0.22143
| 8,418
| 287
| 89
| 29.33101
| 0.724901
| 0.24507
| 0
| 0.721429
| 0
| 0
| 0.013845
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 1
| 0.135714
| false
| 0
| 0.035714
| 0
| 0.171429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d59f5e1619eead15b97f50ab207d4f29b30876cb
| 3,740
|
py
|
Python
|
utils/scripts/OOOlevelGen/src/levels/Our_House.py
|
fullscreennl/monkeyswipe
|
c56192e202674dd5ab18023f6cf14cf51e95fbd0
|
[
"MIT"
] | null | null | null |
utils/scripts/OOOlevelGen/src/levels/Our_House.py
|
fullscreennl/monkeyswipe
|
c56192e202674dd5ab18023f6cf14cf51e95fbd0
|
[
"MIT"
] | null | null | null |
utils/scripts/OOOlevelGen/src/levels/Our_House.py
|
fullscreennl/monkeyswipe
|
c56192e202674dd5ab18023f6cf14cf51e95fbd0
|
[
"MIT"
] | null | null | null |
import LevelBuilder
from sprites import *
def render(name,bg):
lb = LevelBuilder.LevelBuilder(name+".plist",background=bg)
lb.addObject(Bomb.BombSprite(x=72, y=96,width=32,height=32 ,restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Beam.BeamSprite(x=246, y=43,width=265,height=14,angle='0' ,restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Hero.HeroSprite(x=247, y=72,width=32,height=32))
lb.addObject(Star.StarSprite(x=192, y=17,width=32,height=32))
lb.addObject(Enemy.EnemySprite(x=317, y=17,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Enemy.EnemySprite(x=254, y=17,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Beam.BeamSprite(x=386, y=64,width=127,height=14,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Beam.BeamSprite(x=106, y=64,width=127,height=14,angle='90' ,restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Enemy.EnemySprite(x=172, y=79,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Beam.BeamSprite(x=245, y=135,width=301,height=14,angle='0' ,restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Enemy.EnemySprite(x=126, y=164,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Enemy.EnemySprite(x=169, y=164,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Enemy.EnemySprite(x=218, y=164,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Enemy.EnemySprite(x=271, y=164,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Enemy.EnemySprite(x=312, y=164,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Enemy.EnemySprite(x=358, y=164,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Friend.FriendSprite(classname = 'AccelFriendSprite', x=25, y=18,width=22,height=22,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ).setName('Friend'))
lb.addObject(Friend.FriendSprite(classname = 'AccelFriendSprite', x=25, y=60,width=26,height=26,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ).setName('Friend'))
lb.addObject(Beam.BeamSprite(x=168, y=196,width=127,height=14,angle='0' ,restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Beam.BeamSprite(x=314, y=196,width=127,height=14,angle='0' ,restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(Friend.FriendSprite(classname = 'AccelFriendSprite', x=168, y=223,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ).setName('Friend'))
lb.addObject(Friend.FriendSprite(classname = 'AccelFriendSprite', x=316, y=223,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ).setName('Friend'))
lb.addObject(EnemyEquipedRotor.EnemyEquipedRotorSprite(x=47,y=282,scaling=0.453125,speed=3000,torque=3))
lb.addObject(Bomb.BombSprite(x=358, y=21,width=32,height=32 ,restitution=0.2,static='false',friction=0.5,density=20 ))
lb.addObject(EnemyEquipedRotor.EnemyEquipedRotorSprite(x=243,y=282,scaling=0.453125,speed=3000,torque=3))
lb.addObject(EnemyEquipedRotor.EnemyEquipedRotorSprite(x=440,y=282,scaling=0.453125,speed=3000,torque=3))
lb.addObject(Bomb.BombSprite(x=136, y=18,width=32,height=32 ,restitution=0.2,static='false',friction=0.5,density=20 ))
lb.render()
| 116.875
| 187
| 0.739305
| 625
| 3,740
| 4.424
| 0.1504
| 0.107414
| 0.103436
| 0.151175
| 0.899458
| 0.833996
| 0.814105
| 0.814105
| 0.814105
| 0.811935
| 0
| 0.123899
| 0.059091
| 3,740
| 32
| 188
| 116.875
| 0.661836
| 0
| 0
| 0
| 0
| 0
| 0.061214
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0.0625
| 0
| 0.09375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
633e1b68647809497bc7d730c6dffcbe5af94254
| 598
|
py
|
Python
|
19_re/01_re/07_greedy_grudging.py
|
hemuke/python
|
bc99f2b5aee997083ae31f59a2b33db48c8255f3
|
[
"Apache-2.0"
] | null | null | null |
19_re/01_re/07_greedy_grudging.py
|
hemuke/python
|
bc99f2b5aee997083ae31f59a2b33db48c8255f3
|
[
"Apache-2.0"
] | null | null | null |
19_re/01_re/07_greedy_grudging.py
|
hemuke/python
|
bc99f2b5aee997083ae31f59a2b33db48c8255f3
|
[
"Apache-2.0"
] | null | null | null |
#! /root/anaconda3/bin/python
import re
print(re.compile(r'ab*').match('abbbbbbc'))
print(re.compile(r'ab*?').match('abbbbbbc'))
print(re.compile(r'ab+').match('abbbbbbc'))
print(re.compile(r'ab+?').match('abbbbbbc'))
print(re.compile(r'ab?').match('abbbbbbc'))
print(re.compile(r'ab??').match('abbbbbbc'))
print(re.compile(r'ab{3}').match('abbbbbbc'))
print(re.compile(r'ab{3}?').match('abbbbbbc'))
print(re.compile(r'ab{3,}').match('abbbbbbc'))
print(re.compile(r'ab{3,}?').match('abbbbbbc'))
print(re.compile(r'ab{3,5}').match('abbbbbbc'))
print(re.compile(r'ab{3,5}?').match('abbbbbbc'))
| 28.47619
| 48
| 0.665552
| 98
| 598
| 4.061224
| 0.142857
| 0.211055
| 0.422111
| 0.452261
| 0.924623
| 0.924623
| 0.924623
| 0.924623
| 0.924623
| 0.924623
| 0
| 0.015625
| 0.036789
| 598
| 20
| 49
| 29.9
| 0.675347
| 0.046823
| 0
| 0
| 0
| 0
| 0.274165
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.076923
| 0
| 0.076923
| 0.923077
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
897ab21c09c13860f15f46ab4ca19baf7ef92973
| 188
|
py
|
Python
|
fmc_rest_client/__init__.py
|
cunningr/fmc_rest_client
|
ddd634429f06db281d0c851bd94139d4e3f23a0f
|
[
"MIT"
] | 1
|
2018-06-29T13:55:41.000Z
|
2018-06-29T13:55:41.000Z
|
fmc_rest_client/__init__.py
|
cunningr/fmc_rest_client
|
ddd634429f06db281d0c851bd94139d4e3f23a0f
|
[
"MIT"
] | 7
|
2018-01-24T03:01:40.000Z
|
2020-05-27T14:19:29.000Z
|
fmc_rest_client/__init__.py
|
cunningr/fmc_rest_client
|
ddd634429f06db281d0c851bd94139d4e3f23a0f
|
[
"MIT"
] | 3
|
2017-11-30T17:27:39.000Z
|
2019-01-17T06:57:37.000Z
|
from fmc_rest_client.core.base_clients import FMCBaseRestClient
from fmc_rest_client.core.base_clients import FMCRestClient
from fmc_rest_client.core.base_clients import ResourceException
| 47
| 63
| 0.904255
| 27
| 188
| 5.962963
| 0.407407
| 0.130435
| 0.204969
| 0.31677
| 0.708075
| 0.708075
| 0.708075
| 0.708075
| 0
| 0
| 0
| 0
| 0.06383
| 188
| 3
| 64
| 62.666667
| 0.914773
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
89bdc2c8bcf490b14411c4f0304f9c3ff25c8cc1
| 23,888
|
py
|
Python
|
tests/unit_test/api/api_processor_test.py
|
Anitej/kairon
|
61d6bd7f230a744303abab42e3b54b0381fee7da
|
[
"Apache-2.0"
] | null | null | null |
tests/unit_test/api/api_processor_test.py
|
Anitej/kairon
|
61d6bd7f230a744303abab42e3b54b0381fee7da
|
[
"Apache-2.0"
] | 1
|
2021-01-29T22:20:59.000Z
|
2021-01-29T22:20:59.000Z
|
tests/unit_test/api/api_processor_test.py
|
Anitej/kairon
|
61d6bd7f230a744303abab42e3b54b0381fee7da
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import os
from mongoengine import connect
from mongoengine.errors import ValidationError
import pytest
from pydantic import SecretStr
from kairon.api.processor import AccountProcessor
from kairon.utils import Utility
from kairon.exceptions import AppException
os.environ["system_file"] = "./tests/testing_data/system.yaml"
def pytest_configure():
return {'bot': None}
class TestAccountProcessor:
@pytest.fixture(autouse=True)
def init_connection(self):
Utility.load_evironment()
connect(host=Utility.environment['database']["url"])
def test_add_account(self):
account_response = AccountProcessor.add_account("paypal", "testAdmin")
account = AccountProcessor.get_account(account_response["_id"])
assert account_response
assert account_response["_id"] == account["_id"]
assert account_response["name"] == account["name"]
account_response = AccountProcessor.add_account("ebay", "testAdmin")
account = AccountProcessor.get_account(account_response["_id"])
assert account_response
assert account_response["_id"] == account["_id"]
assert account_response["name"] == account["name"]
def test_add_duplicate_account(self):
with pytest.raises(Exception):
AccountProcessor.add_account("paypal", "testAdmin")
def test_add_duplicate_account_case_insentive(self):
with pytest.raises(Exception):
AccountProcessor.add_account("PayPal", "testAdmin")
def test_add_blank_account(self):
with pytest.raises(AppException):
AccountProcessor.add_account("", "testAdmin")
def test_add_empty_account(self):
with pytest.raises(AppException):
AccountProcessor.add_account(" ", "testAdmin")
def test_add_none_account(self):
with pytest.raises(AppException):
AccountProcessor.add_account(None, "testAdmin")
def test_add_bot(self):
bot_response = AccountProcessor.add_bot("test", 1, "testAdmin")
assert bot_response
pytest.bot = bot_response["_id"].__str__()
def test_get_bot(self):
bot_response = AccountProcessor.get_bot(pytest.bot)
assert bot_response
assert bot_response["account"] == 1
def test_add_duplicate_bot(self):
with pytest.raises(Exception):
AccountProcessor.add_bot("test", 1, "testAdmin")
def test_add_duplicate_bot_case_insensitive(self):
with pytest.raises(Exception):
AccountProcessor.add_bot("TEST", 1, "testAdmin")
def test_add_blank_bot(self):
with pytest.raises(AppException):
AccountProcessor.add_bot(" ", 1, "testAdmin")
def test_add_empty_bot(self):
with pytest.raises(AppException):
AccountProcessor.add_bot("", 1, "testAdmin")
def test_add_none_bot(self):
with pytest.raises(AppException):
AccountProcessor.add_bot(None, 1, "testAdmin")
def test_add_user(self):
user = AccountProcessor.add_user(
email="fshaikh@digite.com",
first_name="Fahad Ali",
last_name="Shaikh",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
assert user
assert user["password"] != "12345"
assert user["status"]
def test_add_user_duplicate(self):
with pytest.raises(Exception):
AccountProcessor.add_user(
email="fshaikh@digite.com",
first_name="Fahad Ali",
last_name="Shaikh",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_duplicate_case_insensitive(self):
with pytest.raises(Exception):
AccountProcessor.add_user(
email="FShaikh@digite.com",
first_name="Fahad Ali",
last_name="Shaikh",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_empty_email(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email="",
first_name="Fahad Ali",
last_name="Shaikh",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_blank_email(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email=" ",
first_name="Fahad Ali",
last_name="Shaikh",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_invalid_email(self):
with pytest.raises(ValidationError):
AccountProcessor.add_user(
email="demo",
first_name="Fahad Ali",
last_name="Shaikh",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_none_email(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email=None,
first_name="Fahad Ali",
last_name="Shaikh",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_empty_firstname(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email="demo@demo.ai",
first_name="",
last_name="Shaikh",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_blank_firstname(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email="demo@demo.ai",
first_name=" ",
last_name="Shaikh",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_none_firstname(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email="demo@demo.ai",
first_name="",
last_name="Shaikh",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_empty_lastname(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email="demo@demo.ai",
first_name="Fahad Ali",
last_name="",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_none_lastname(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email="demo@demo.ai",
first_name="Fahad Ali",
last_name=None,
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_blank_lastname(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email="demo@demo.ai",
first_name="Fahad Ali",
last_name=" ",
password="Welcome@1",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_empty_password(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email="demo@demo.ai",
first_name="Fahad Ali",
last_name="Shaikh",
password="",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_blank_password(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email="demo@demo.ai",
first_name="Fahad Ali",
last_name="Shaikh",
password=" ",
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_add_user_None_password(self):
with pytest.raises(AppException):
AccountProcessor.add_user(
email="demo@demo.ai",
first_name="Fahad Ali",
last_name="Shaikh",
password=None,
account=1,
bot=pytest.bot,
user="testAdmin",
)
def test_get_user(self):
user = AccountProcessor.get_user("fshaikh@digite.com")
assert all(
user[key] is False if key == "is_integration_user" else user[key]
for key in user.keys()
)
def test_get_user_details(self):
user = AccountProcessor.get_user_details("fshaikh@digite.com")
assert all(
user[key] is False if key == "is_integration_user" else user[key]
for key in user.keys()
)
@pytest.fixture
def mock_user_inactive(self, monkeypatch):
def user_response(*args, **kwargs):
return {
"email": "demo@demo.ai",
"status": False,
"bot": "support",
"account": 2,
"is_integration_user": False
}
def bot_response(*args, **kwargs):
return {"name": "support", "status": True}
def account_response(*args, **kwargs):
return {"name": "paytm", "status": True}
monkeypatch.setattr(AccountProcessor, "get_user", user_response)
monkeypatch.setattr(AccountProcessor, "get_bot", bot_response)
monkeypatch.setattr(AccountProcessor, "get_account", account_response)
def test_get_user_details_user_inactive(self, mock_user_inactive):
with pytest.raises(ValidationError):
user_details = AccountProcessor.get_user_details("demo@demo.ai")
assert all(
user_details[key] is False
if key == "is_integration_user"
else user_details[key]
for key in user_details.keys()
)
@pytest.fixture
def mock_bot_inactive(self, monkeypatch):
def user_response(*args, **kwargs):
return {
"email": "demo@demo.ai",
"status": True,
"bot": "support",
"account": 2,
"is_integration_user": False
}
def bot_response(*args, **kwargs):
return {"name": "support", "status": False}
def account_response(*args, **kwargs):
return {"name": "paytm", "status": True}
monkeypatch.setattr(AccountProcessor, "get_user", user_response)
monkeypatch.setattr(AccountProcessor, "get_bot", bot_response)
monkeypatch.setattr(AccountProcessor, "get_account", account_response)
def test_get_user_details_bot_inactive(self, mock_bot_inactive):
with pytest.raises(ValidationError):
user_details = AccountProcessor.get_user_details("demo@demo.ai")
assert all(
user_details[key] is False
if key == "is_integration_user"
else user_details[key]
for key in AccountProcessor.get_user_details(
user_details["email"]
).keys()
)
@pytest.fixture
def mock_account_inactive(self, monkeypatch):
def user_response(*args, **kwargs):
return {
"email": "demo@demo.ai",
"status": True,
"bot": "support",
"account": 2,
"is_integration_user": False
}
def bot_response(*args, **kwargs):
return {"name": "support", "status": True}
def account_response(*args, **kwargs):
return {"name": "paytm", "status": False}
monkeypatch.setattr(AccountProcessor, "get_user", user_response)
monkeypatch.setattr(AccountProcessor, "get_bot", bot_response)
monkeypatch.setattr(AccountProcessor, "get_account", account_response)
def test_get_user_details_account_inactive(self, mock_account_inactive):
with pytest.raises(ValidationError):
user_details = AccountProcessor.get_user_details("demo@demo.ai")
assert all(
user_details[key] is False
if key == "is_integration_user"
else user_details[key]
for key in AccountProcessor.get_user_details(
user_details["email"]
).keys()
)
def test_get_integration_user(self):
integration_user = AccountProcessor.get_integration_user(
bot="support", account=2
)
assert integration_user["is_integration_user"]
assert all(integration_user[key] for key in integration_user.keys())
def test_account_setup_empty_values(self):
account = {}
with pytest.raises(AppException):
loop = asyncio.new_event_loop()
loop.run_until_complete(AccountProcessor.account_setup(account_setup=account, user="testAdmin"))
def test_account_setup_missing_account(self):
account = {
"bot": "Test",
"email": "demo@ac.in",
"first_name": "Test_First",
"last_name": "Test_Last",
"password": "welcome@1",
}
with pytest.raises(AppException):
loop = asyncio.new_event_loop()
loop.run_until_complete(AccountProcessor.account_setup(account_setup=account, user="testAdmin"))
def test_account_setup_missing_bot_name(self):
account = {
"account": "TestAccount",
"email": "demo@ac.in",
"first_name": "Test_First",
"last_name": "Test_Last",
"password": "Welcome@1",
}
with pytest.raises(AppException):
loop = asyncio.new_event_loop()
loop.run_until_complete(AccountProcessor.account_setup(account_setup=account, user="testAdmin"))
def test_account_setup_user_info(self):
account = {
"account": "Test_Account",
"bot": "Test",
"first_name": "Test_First",
"last_name": "Test_Last",
"password": SecretStr("Welcome@1"),
}
with pytest.raises(AppException):
loop = asyncio.new_event_loop()
loop.run_until_complete(AccountProcessor.account_setup(account_setup=account, user="testAdmin"))
def test_account_setup(self):
account = {
"account": "Test_Account",
"bot": "Test",
"email": "demo@ac.in",
"first_name": "Test_First",
"last_name": "Test_Last",
"password": SecretStr("Welcome@1"),
}
loop = asyncio.new_event_loop()
actual, mail, subject, body = loop.run_until_complete(AccountProcessor.account_setup(account_setup=account, user="testAdmin"))
assert actual["role"] == "admin"
assert actual["_id"]
assert actual["account"]
assert actual["bot"]
def test_default_account_setup(self):
loop = asyncio.new_event_loop()
actual, mail, subject, body = loop.run_until_complete(AccountProcessor.default_account_setup())
assert actual
async def mock_smtp(self, *args, **kwargs):
return None
def test_validate_and_send_mail(self,monkeypatch):
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
loop.run_until_complete(Utility.validate_and_send_mail('demo@ac.in',subject='test',body='test'))
assert True
def test_send_false_email_id(self,monkeypatch):
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(Utility.validate_and_send_mail('..',subject='test',body="test"))
def test_send_empty_mail_subject(self,monkeypatch):
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(Utility.validate_and_send_mail('demo@ac.in',subject=' ',body='test'))
def test_send_empty_mail_body(self,monkeypatch):
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(Utility.validate_and_send_mail('demo@ac.in',subject='test',body=' '))
def test_valid_token(self):
token = Utility.generate_token('integ1@gmail.com')
mail = Utility.verify_token(token)
assert mail
def test_invalid_token(self):
with pytest.raises(Exception):
Utility.verify_token('..')
def test_new_user_confirm(self,monkeypatch):
AccountProcessor.add_user(
email="integ2@gmail.com",
first_name="inteq",
last_name="2",
password='Welcome@1',
account=1,
bot=pytest.bot,
user="testAdmin",
)
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
token = Utility.generate_token('integ2@gmail.com')
loop = asyncio.new_event_loop()
loop.run_until_complete(AccountProcessor.confirm_email(token))
assert True
def test_user_already_confirmed(self,monkeypatch):
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
token = Utility.generate_token('integ2@gmail.com')
with pytest.raises(Exception):
loop.run_until_complete(AccountProcessor.confirm_email(token))
def test_user_not_confirmed(self):
with pytest.raises(Exception):
AccountProcessor.is_user_confirmed('sd')
def test_user_confirmed(self):
AccountProcessor.is_user_confirmed('integ2@gmail.com')
assert True
def test_send_empty_token(self):
with pytest.raises(Exception):
Utility.verify_token(' ')
def test_reset_link_with_mail(self,monkeypatch):
AccountProcessor.EMAIL_ENABLED = True
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
loop.run_until_complete(AccountProcessor.send_reset_link('integ2@gmail.com'))
AccountProcessor.EMAIL_ENABLED = False
assert True
def test_reset_link_with_empty_mail(self,monkeypatch):
AccountProcessor.EMAIL_ENABLED = True
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(AccountProcessor.send_reset_link(''))
AccountProcessor.EMAIL_ENABLED = False
def test_reset_link_with_unregistered_mail(self, monkeypatch):
AccountProcessor.EMAIL_ENABLED = True
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(AccountProcessor.send_reset_link('sasha.41195@gmail.com'))
AccountProcessor.EMAIL_ENABLED = False
def test_reset_link_with_unconfirmed_mail(self, monkeypatch):
AccountProcessor.EMAIL_ENABLED = True
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(AccountProcessor.send_reset_link('integration@demo.ai'))
AccountProcessor.EMAIL_ENABLED = False
def test_overwrite_password_with_invalid_token(self,monkeypatch):
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(AccountProcessor.overwrite_password('fgh',"asdfghj@1"))
def test_overwrite_password_with_empty_password_string(self, monkeypatch):
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(AccountProcessor.overwrite_password('eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJtYWlsX2lkIjoiaW50ZWcxQGdtYWlsLmNvbSJ9.Ycs1ROb1w6MMsx2WTA4vFu3-jRO8LsXKCQEB3fkoU20', " "))
def test_overwrite_password_with_valid_entries(self, monkeypatch):
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
token = Utility.generate_token('integ2@gmail.com')
loop = asyncio.new_event_loop()
loop.run_until_complete(AccountProcessor.overwrite_password(token,"Welcome@3"))
assert True
def test_send_confirmation_link_with_valid_id(self, monkeypatch):
AccountProcessor.add_user(
email="integ3@gmail.com",
first_name="inteq",
last_name="3",
password='Welcome@1',
account=1,
bot=pytest.bot,
user="testAdmin",
)
AccountProcessor.EMAIL_ENABLED = True
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
loop.run_until_complete(AccountProcessor.send_confirmation_link('integ3@gmail.com'))
AccountProcessor.EMAIL_ENABLED = False
assert True
def test_send_confirmation_link_with_confirmed_id(self, monkeypatch):
AccountProcessor.EMAIL_ENABLED = True
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(AccountProcessor.send_confirmation_link('integ1@gmail.com'))
AccountProcessor.EMAIL_ENABLED = False
def test_send_confirmation_link_with_invalid_id(self, monkeypatch):
AccountProcessor.EMAIL_ENABLED = True
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(AccountProcessor.send_confirmation_link(''))
AccountProcessor.EMAIL_ENABLED = False
def test_send_confirmation_link_with_unregistered_id(self, monkeypatch):
AccountProcessor.EMAIL_ENABLED = True
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(AccountProcessor.send_confirmation_link('sasha.41195@gmail.com'))
AccountProcessor.EMAIL_ENABLED = False
def test_reset_link_with_mail_not_enabled(self,monkeypatch):
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(AccountProcessor.send_reset_link('integ1@gmail.com'))
def test_send_confirmation_link_with_mail_not_enabled(self, monkeypatch):
monkeypatch.setattr(Utility, 'trigger_smtp', self.mock_smtp)
loop = asyncio.new_event_loop()
with pytest.raises(Exception):
loop.run_until_complete(AccountProcessor.send_confirmation_link('integration@demo.ai'))
| 37.500785
| 202
| 0.610223
| 2,484
| 23,888
| 5.595813
| 0.06562
| 0.032734
| 0.056403
| 0.040288
| 0.860072
| 0.81295
| 0.795468
| 0.778921
| 0.769424
| 0.74741
| 0
| 0.005639
| 0.287383
| 23,888
| 636
| 203
| 37.559748
| 0.810903
| 0
| 0
| 0.658802
| 0
| 0
| 0.108841
| 0.008163
| 0
| 0
| 0
| 0
| 0.056261
| 1
| 0.143376
| false
| 0.058076
| 0.016334
| 0.018149
| 0.181488
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
9887093d8422c377b0fff5b2089e5ec16653b84a
| 82
|
py
|
Python
|
torchdiffeq/__init__.py
|
mcaldana/torchdiffeq
|
6bf78dc7b07f127d280dd266ace24d5543295d35
|
[
"MIT"
] | null | null | null |
torchdiffeq/__init__.py
|
mcaldana/torchdiffeq
|
6bf78dc7b07f127d280dd266ace24d5543295d35
|
[
"MIT"
] | null | null | null |
torchdiffeq/__init__.py
|
mcaldana/torchdiffeq
|
6bf78dc7b07f127d280dd266ace24d5543295d35
|
[
"MIT"
] | null | null | null |
from ._impl import odeint
from ._impl import odeint_adjoint
__version__ = "0.2.2"
| 20.5
| 33
| 0.780488
| 13
| 82
| 4.384615
| 0.615385
| 0.280702
| 0.491228
| 0.701754
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042254
| 0.134146
| 82
| 3
| 34
| 27.333333
| 0.760563
| 0
| 0
| 0
| 0
| 0
| 0.060976
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
98995aa8a2d8831a517f3b79dd85b6aef822be52
| 1,162
|
py
|
Python
|
tests/test_commodity.py
|
2000-ion/TIDPP-Lab3
|
3fc97e6214b6e51f40df39f1692d4deec4bb0cc2
|
[
"BSD-3-Clause"
] | 2,160
|
2016-01-24T05:08:59.000Z
|
2022-03-31T12:15:30.000Z
|
tests/test_commodity.py
|
2000-ion/TIDPP-Lab3
|
3fc97e6214b6e51f40df39f1692d4deec4bb0cc2
|
[
"BSD-3-Clause"
] | 455
|
2016-01-29T22:41:33.000Z
|
2022-03-23T08:28:01.000Z
|
tests/test_commodity.py
|
2000-ion/TIDPP-Lab3
|
3fc97e6214b6e51f40df39f1692d4deec4bb0cc2
|
[
"BSD-3-Clause"
] | 818
|
2016-02-01T15:09:07.000Z
|
2022-03-28T19:52:26.000Z
|
import pytest
from decimal import Decimal
from shop.money.money_maker import MoneyMaker
from testshop.models import Commodity
EUR = MoneyMaker('EUR')
@pytest.mark.django_db
def test_field_filter(commodity_factory):
commodity = commodity_factory(unit_price='12.34')
assert list(Commodity.objects.filter(unit_price='12.34')) == [commodity]
assert list(Commodity.objects.filter(unit_price=Decimal('12.34'))) == [commodity]
assert list(Commodity.objects.filter(unit_price=EUR('12.34'))) == [commodity]
assert list(Commodity.objects.filter(unit_price__gt='12.33')) == [commodity]
assert list(Commodity.objects.filter(unit_price__gt=EUR('12.33'))) == [commodity]
assert list(Commodity.objects.filter(unit_price__gt='12.34')) == []
assert list(Commodity.objects.filter(unit_price__gte='12.34')) == [commodity]
assert list(Commodity.objects.filter(unit_price__lt='12.35')) == [commodity]
assert list(Commodity.objects.filter(unit_price__lt=EUR('12.35'))) == [commodity]
assert list(Commodity.objects.filter(unit_price__lt='12.34')) == []
assert list(Commodity.objects.filter(unit_price__lte='12.34')) == [commodity]
| 44.692308
| 85
| 0.738382
| 158
| 1,162
| 5.21519
| 0.208861
| 0.131068
| 0.253641
| 0.347087
| 0.70267
| 0.70267
| 0.70267
| 0.70267
| 0.70267
| 0.543689
| 0
| 0.04611
| 0.104131
| 1,162
| 25
| 86
| 46.48
| 0.745437
| 0
| 0
| 0
| 0
| 0
| 0.054217
| 0
| 0
| 0
| 0
| 0
| 0.578947
| 1
| 0.052632
| false
| 0
| 0.210526
| 0
| 0.263158
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
989ae99d127369ef5731c01c394a0396955b99c4
| 1,685
|
py
|
Python
|
python/phonenumbers/shortdata/region_TR.py
|
rodgar-nvkz/python-phonenumbers
|
4c7c4892211dbc9bc328bc3356b03853eaf993dc
|
[
"Apache-2.0"
] | 2,424
|
2015-01-05T05:34:45.000Z
|
2022-03-28T22:37:53.000Z
|
python/phonenumbers/shortdata/region_TR.py
|
rodgar-nvkz/python-phonenumbers
|
4c7c4892211dbc9bc328bc3356b03853eaf993dc
|
[
"Apache-2.0"
] | 166
|
2015-01-30T23:59:18.000Z
|
2022-03-14T21:08:42.000Z
|
python/phonenumbers/shortdata/region_TR.py
|
rodgar-nvkz/python-phonenumbers
|
4c7c4892211dbc9bc328bc3356b03853eaf993dc
|
[
"Apache-2.0"
] | 345
|
2015-01-02T00:33:27.000Z
|
2022-03-26T13:06:57.000Z
|
"""Auto-generated file, do not edit by hand. TR metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_TR = PhoneMetadata(id='TR', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='[1-9]\\d{2,4}', possible_length=(3, 4, 5)),
toll_free=PhoneNumberDesc(national_number_pattern='1(?:1[02]|22|3[126]|4[04]|5[15-9]|6[18]|77|83)', example_number='110', possible_length=(3,)),
emergency=PhoneNumberDesc(national_number_pattern='1(?:1[02]|55)', example_number='110', possible_length=(3,)),
short_code=PhoneNumberDesc(national_number_pattern='1(?:1(?:[02-79]|8(?:1[018]|2[0245]|3[2-4]|42|5[058]|6[06]|7[07]|8[01389]|9[089]))|3(?:37|[58]6|65)|471|5(?:07|78)|6(?:[02]6|99)|8(?:63|95))|2(?:077|268|4(?:17|23)|5(?:7[26]|82)|6[14]4|8\\d\\d|9(?:30|89))|3(?:0(?:05|72)|353|4(?:06|30|64)|502|674|747|851|9(?:1[29]|60))|4(?:0(?:25|3[12]|[47]2)|3(?:3[13]|[89]1)|439|5(?:43|55)|717|832)|5(?:145|290|[4-6]\\d\\d|772|833|9(?:[06]1|92))|6(?:236|6(?:12|39|8[59])|769)|7890|8(?:688|7(?:28|65)|85[06])|9(?:159|290)|1[2-9]\\d', example_number='110', possible_length=(3, 4, 5)),
standard_rate=PhoneNumberDesc(national_number_pattern='(?:285|542)0', example_number='2850', possible_length=(4,)),
sms_services=PhoneNumberDesc(national_number_pattern='1(?:3(?:37|[58]6|65)|4(?:4|71)|5(?:07|78)|6(?:[02]6|99)|8(?:3|63|95))|(?:2(?:07|26|4[12]|5[78]|6[14]|8\\d|9[38])|3(?:0[07]|[38]5|4[036]|50|67|74|9[16])|4(?:0[2-47]|3[389]|[48]3|5[45]|71)|5(?:14|29|[4-6]\\d|77|83|9[069])|6(?:23|6[138]|76)|789|8(?:68|7[26]|85)|9(?:15|29))\\d', example_number='144', possible_length=(3, 4)),
short_data=True)
| 140.416667
| 572
| 0.636202
| 341
| 1,685
| 3.046921
| 0.404692
| 0.13282
| 0.167469
| 0.207892
| 0.333013
| 0.228104
| 0.138595
| 0.023099
| 0
| 0
| 0
| 0.258524
| 0.04273
| 1,685
| 11
| 573
| 153.181818
| 0.385617
| 0.031454
| 0
| 0
| 1
| 0.333333
| 0.51599
| 0.48155
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f547ebe763c88824cc62ad6d4973b5cade2aa75
| 134
|
py
|
Python
|
Codewars/8kyu/repeatit/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/8kyu/repeatit/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/8kyu/repeatit/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
Test.assert_equals(repeat_it('*', 3), '***')
Test.assert_equals(repeat_it('Hello', 5), 'HelloHelloHelloHelloHello')
| 26.8
| 70
| 0.69403
| 18
| 134
| 4.944444
| 0.666667
| 0.224719
| 0.359551
| 0.494382
| 0.539326
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04065
| 0.08209
| 134
| 4
| 71
| 33.5
| 0.682927
| 0.104478
| 0
| 0
| 0
| 0
| 0.288136
| 0.211864
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f562d07780a40201a090716ef363f9f50f64fed
| 14,866
|
py
|
Python
|
GCN/code/gcn_train.py
|
anubratabhowmick/tf-BERT-GCN
|
123f251ac33b86b59c5a0a93b2025b4c107bab1b
|
[
"MIT"
] | 2
|
2021-02-16T09:09:07.000Z
|
2021-06-08T12:31:18.000Z
|
GCN/code/gcn_train.py
|
anubratabhowmick/tf-BERT-GCN
|
123f251ac33b86b59c5a0a93b2025b4c107bab1b
|
[
"MIT"
] | null | null | null |
GCN/code/gcn_train.py
|
anubratabhowmick/tf-BERT-GCN
|
123f251ac33b86b59c5a0a93b2025b4c107bab1b
|
[
"MIT"
] | null | null | null |
from __future__ import division
from __future__ import print_function
import pandas as pd
import numpy as np
import os
import random
import pickle
import scipy.sparse as sp
import tensorflow as tf
import time
from sklearn.metrics import roc_auc_score
from sklearn.metrics import average_precision_score
from itertools import combinations
random.seed(42)
from GCN.code.utils import load_data
from GCN.code.optimizer import OptimizerAE, OptimizerVAE
from GCN.code.model import GCNModelAE, GCNModelVAE
from GCN.code.preprocessing import preprocess_graph, construct_feed_dict, sparse_to_tuple, mask_test_edges
def paper_pretrain(data_name, gcn_model, gcn_epochs, gcn_lr, gcn_hidden1, gcn_hidden2, save_dir):
NOW_DIR = os.getcwd()
DATA_DIR = os.path.join(NOW_DIR, 'glue', 'ACRS', 'full_context_{}.csv'.format(data_name))
aan_data = pd.read_csv(DATA_DIR)
graph_edge = aan_data[['source_id', 'target_id']].drop_duplicates(subset=['target_id', 'source_id'])
graph_edge.reset_index(inplace=True)
graph_edge.drop(labels=['index'], axis=1, inplace=True)
target_info = aan_data[['target_id', 'target_author', 'target_title', 'target_venue', 'target_abstract']]
source_info = aan_data[['source_id', 'source_author', 'source_title', 'source_venue', 'source_abstract']]
target_info.drop_duplicates(subset=['target_id'], inplace=True)
source_info.drop_duplicates(subset=['source_id'], inplace=True)
target_info.reset_index(inplace=True)
target_info.drop(labels=['index'], axis=1, inplace=True)
source_info.reset_index(inplace=True)
source_info.drop(labels=['index'], axis=1, inplace=True)
with open('{}/{}/{}.graph_edge_total.pkl'.format(save_dir, data_name, data_name), 'wb') as f:
pickle.dump(graph_edge.values, f)
features, adj, idx_map = load_data(path='{}/'.format(save_dir), edge_dataset='{}.graph_edge_total'.format(data_name),
feature_dataset=None, feature_less=True)
adj_orig = adj
adj_orig = adj_orig - sp.dia_matrix((adj_orig.diagonal()[np.newaxis, :], [0]), shape=adj_orig.shape)
adj_orig.eliminate_zeros()
adj_orig_orig = adj_orig.copy()
adj_train, train_edges, val_edges, val_edges_false, test_edges, test_edges_false = mask_test_edges(adj)
adj = adj_train
features = sp.identity(adj.shape[0]) # featureless
adj_norm = preprocess_graph(adj)
# Define placeholders
placeholders = {
'features': tf.sparse_placeholder(tf.float32),
'adj': tf.sparse_placeholder(tf.float32),
'adj_orig': tf.sparse_placeholder(tf.float32),
'dropout': tf.placeholder_with_default(0., shape=())
}
features = sp.coo_matrix(features)
num_nodes = adj.shape[0]
features = sparse_to_tuple(features)
num_features = features[2][1]
features_nonzero = features[1].shape[0]
# Create model
if gcn_model == 'AE':
model = GCNModelAE(placeholders, num_features, features_nonzero, gcn_hidden1, gcn_hidden2)
elif gcn_model == 'VAE':
model = GCNModelVAE(placeholders, num_features, num_nodes, features_nonzero, gcn_hidden1, gcn_hidden2)
pos_weight = float(adj.shape[0] * adj.shape[0] - adj.sum()) / adj.sum()
norm = adj.shape[0] * adj.shape[0] / float((adj.shape[0] * adj.shape[0] - adj.sum()) * 2)
# Optimizer
with tf.name_scope('optimizer'):
if gcn_model == 'AE':
opt = OptimizerAE(preds=model.reconstructions,
labels=tf.reshape(tf.sparse_tensor_to_dense(placeholders['adj_orig'],
validate_indices=False), [-1]),
pos_weight=pos_weight,
norm=norm,
learning_rate=gcn_lr)
elif gcn_model == 'VAE':
opt = OptimizerVAE(preds=model.reconstructions,
labels=tf.reshape(tf.sparse_tensor_to_dense(placeholders['adj_orig'],
validate_indices=False), [-1]),
model=model, num_nodes=num_nodes,
pos_weight=pos_weight,
norm=norm,
learning_rate=gcn_lr)
# Initialize session
sess = tf.Session()
sess.run(tf.global_variables_initializer())
def get_roc_score(edges_pos, edges_neg, emb=None):
if emb is None:
feed_dict.update({placeholders['dropout']: 0})
emb = sess.run(model.z_mean, feed_dict=feed_dict)
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# Predict on test set of edges
adj_rec = np.dot(emb, emb.T)
preds = []
pos = []
for e in edges_pos:
preds.append(sigmoid(adj_rec[e[0], e[1]]))
pos.append(adj_orig[e[0], e[1]])
preds_neg = []
neg = []
for e in edges_neg:
preds_neg.append(sigmoid(adj_rec[e[0], e[1]]))
neg.append(adj_orig[e[0], e[1]])
preds_all = np.hstack([preds, preds_neg])
labels_all = np.hstack([np.ones(len(preds)), np.zeros(len(preds_neg))])
roc_score = roc_auc_score(labels_all, preds_all)
ap_score = average_precision_score(labels_all, preds_all)
return roc_score, ap_score, preds_all, labels_all
val_roc_score = []
adj_label = adj_train + sp.eye(adj_train.shape[0])
adj_label = sparse_to_tuple(adj_label)
# Train model
for epoch in range(gcn_epochs):
t = time.time()
# Construct feed dictionary
feed_dict = construct_feed_dict(adj_norm, adj_label, features, placeholders)
feed_dict.update({placeholders['dropout']: 0.5})
# Run single weight update
outs = sess.run([opt.opt_op, opt.cost, opt.accuracy], feed_dict=feed_dict)
# Compute average loss
avg_cost = outs[1]
avg_accuracy = outs[2]
roc_curr, ap_curr, preds, labels = get_roc_score(val_edges, val_edges_false)
val_roc_score.append(roc_curr)
print("Epoch:", '%04d' % (epoch + 1), "train_loss=", "{:.5f}".format(avg_cost),
"train_acc=", "{:.5f}".format(avg_accuracy), "val_roc=", "{:.5f}".format(val_roc_score[-1]),
"val_ap=", "{:.5f}".format(ap_curr),
"time=", "{:.5f}".format(time.time() - t))
print("Optimization Finished!")
roc_score, ap_score, _, _ = get_roc_score(test_edges, test_edges_false)
print('Test ROC score: ' + str(roc_score))
print('Test AP score: ' + str(ap_score))
origin_adj_norm = preprocess_graph(adj_orig_orig)
origin_adj_label = adj_orig_orig + sp.eye(adj_orig_orig.shape[0])
origin_adj_label = sparse_to_tuple(origin_adj_label)
origin_features = sp.identity(adj_orig_orig.shape[0]) # featureless
origin_features = sp.coo_matrix(origin_features)
origin_features = sparse_to_tuple(origin_features)
origin_feed = construct_feed_dict(origin_adj_norm, origin_adj_label, origin_features, placeholders)
# feature extraction
if gcn_model == 'AE':
encoded = sess.run(model.embeddings, feed_dict=origin_feed)
elif gcn_model == 'VAE':
encoded = sess.run(model.z, feed_dict=origin_feed)
with open('{}/{}/{}_gcn_pretrain.pkl'.format(save_dir, data_name, data_name), 'wb') as f:
pickle.dump(encoded, f)
pickle.dump(idx_map, f)
# Added by Anubrata Start
def coauthor_pretrain(data_name, gcn_model, gcn_epochs, gcn_lr, gcn_hidden1, gcn_hidden2, save_dir):
NOW_DIR = os.getcwd()
temp_temp_list = []
tup_list = []
tup_dict = dict()
i=0
DATA_DIR = os.path.join(NOW_DIR, 'glue', 'ACRS', 'full_context_{}.csv'.format(data_name))
df = pd.read_csv(DATA_DIR)
for word in df['source_author']:
temp_list = word.split(';')
if len(temp_list)==1:
temp_temp_list.append(temp_list*2)
else:
temp_comb = list(combinations(temp_list, 2))
temp_temp_list.append(temp_comb)
for word in df['target_author']:
temp_list = word.split(';')
if len(temp_list)==1:
temp_temp_list.append(temp_list*2)
else:
temp_comb = list(combinations(temp_list, 2))
temp_temp_list.append(temp_comb)
for val in temp_temp_list:
for tup in val:
if tup is not None:
if type(tup) == str:
# print(tup)
tup_dict[i] = {"source_author":tup, "co_author":tup}
else:
tup_dict[i] = {"source_author":tup[0], "co_author":tup[1]}
i+=1
df_author = pd.DataFrame.from_dict(tup_dict, "index")
df_author.drop_duplicates(subset=['source_author','co_author'],keep='last', inplace=True)
df_author.reset_index(inplace=True)
df_author.drop(labels=['index'], axis=1, inplace=True)
df_author.to_csv('out.csv')
with open('{}/{}/{}.df_author_total.pkl'.format(save_dir, data_name, data_name), 'wb') as f:
pickle.dump(df_author.values, f)
features, adj, idx_map = load_data(path='{}/'.format(save_dir), edge_dataset='{}.df_author_total'.format(data_name),
feature_dataset=None, feature_less=True)
adj_orig = adj
adj_orig = adj_orig - sp.dia_matrix((adj_orig.diagonal()[np.newaxis, :], [0]), shape=adj_orig.shape)
adj_orig.eliminate_zeros()
adj_orig_orig = adj_orig.copy()
adj_train, train_edges, val_edges, val_edges_false, test_edges, test_edges_false = mask_test_edges(adj)
adj = adj_train
features = sp.identity(adj.shape[0]) # featureless
adj_norm = preprocess_graph(adj)
# Define placeholders
placeholders = {
'features': tf.sparse_placeholder(tf.float32),
'adj': tf.sparse_placeholder(tf.float32),
'adj_orig': tf.sparse_placeholder(tf.float32),
'dropout': tf.placeholder_with_default(0., shape=())
}
features = sp.coo_matrix(features)
num_nodes = adj.shape[0]
features = sparse_to_tuple(features)
num_features = features[2][1]
features_nonzero = features[1].shape[0]
# Create model
if gcn_model == 'AE':
model = GCNModelAE(placeholders, num_features, features_nonzero, gcn_hidden1, gcn_hidden2)
elif gcn_model == 'VAE':
model = GCNModelVAE(placeholders, num_features, num_nodes, features_nonzero, gcn_hidden1, gcn_hidden2)
pos_weight = float(adj.shape[0] * adj.shape[0] - adj.sum()) / adj.sum()
norm = adj.shape[0] * adj.shape[0] / float((adj.shape[0] * adj.shape[0] - adj.sum()) * 2)
# Optimizer
with tf.name_scope('optimizer'):
if gcn_model == 'AE':
opt = OptimizerAE(preds=model.reconstructions,
labels=tf.reshape(tf.sparse_tensor_to_dense(placeholders['adj_orig'],
validate_indices=False), [-1]),
pos_weight=pos_weight,
norm=norm,
learning_rate=gcn_lr)
elif gcn_model == 'VAE':
opt = OptimizerVAE(preds=model.reconstructions,
labels=tf.reshape(tf.sparse_tensor_to_dense(placeholders['adj_orig'],
validate_indices=False), [-1]),
model=model, num_nodes=num_nodes,
pos_weight=pos_weight,
norm=norm,
learning_rate=gcn_lr)
# Initialize session
sess = tf.Session()
sess.run(tf.global_variables_initializer())
def get_roc_score(edges_pos, edges_neg, emb=None):
if emb is None:
feed_dict.update({placeholders['dropout']: 0})
emb = sess.run(model.z_mean, feed_dict=feed_dict)
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# Predict on test set of edges
adj_rec = np.dot(emb, emb.T)
preds = []
pos = []
for e in edges_pos:
preds.append(sigmoid(adj_rec[e[0], e[1]]))
pos.append(adj_orig[e[0], e[1]])
preds_neg = []
neg = []
for e in edges_neg:
preds_neg.append(sigmoid(adj_rec[e[0], e[1]]))
neg.append(adj_orig[e[0], e[1]])
preds_all = np.hstack([preds, preds_neg])
labels_all = np.hstack([np.ones(len(preds)), np.zeros(len(preds_neg))])
roc_score = roc_auc_score(labels_all, preds_all)
ap_score = average_precision_score(labels_all, preds_all)
return roc_score, ap_score, preds_all, labels_all
val_roc_score = []
adj_label = adj_train + sp.eye(adj_train.shape[0])
adj_label = sparse_to_tuple(adj_label)
# Train model
for epoch in range(gcn_epochs):
t = time.time()
# Construct feed dictionary
feed_dict = construct_feed_dict(adj_norm, adj_label, features, placeholders)
feed_dict.update({placeholders['dropout']: 0.5})
# Run single weight update
outs = sess.run([opt.opt_op, opt.cost, opt.accuracy], feed_dict=feed_dict)
# Compute average loss
avg_cost = outs[1]
avg_accuracy = outs[2]
roc_curr, ap_curr, preds, labels = get_roc_score(val_edges, val_edges_false)
val_roc_score.append(roc_curr)
print("Epoch:", '%04d' % (epoch + 1), "train_loss=", "{:.5f}".format(avg_cost),
"train_acc=", "{:.5f}".format(avg_accuracy), "val_roc=", "{:.5f}".format(val_roc_score[-1]),
"val_ap=", "{:.5f}".format(ap_curr),
"time=", "{:.5f}".format(time.time() - t))
print("Optimization Finished!")
roc_score, ap_score, _, _ = get_roc_score(test_edges, test_edges_false)
print('Test ROC score: ' + str(roc_score))
print('Test AP score: ' + str(ap_score))
origin_adj_norm = preprocess_graph(adj_orig_orig)
origin_adj_label = adj_orig_orig + sp.eye(adj_orig_orig.shape[0])
origin_adj_label = sparse_to_tuple(origin_adj_label)
origin_features = sp.identity(adj_orig_orig.shape[0]) # featureless
origin_features = sp.coo_matrix(origin_features)
origin_features = sparse_to_tuple(origin_features)
origin_feed = construct_feed_dict(origin_adj_norm, origin_adj_label, origin_features, placeholders)
# feature extraction
if gcn_model == 'AE':
encoded = sess.run(model.embeddings, feed_dict=origin_feed)
elif gcn_model == 'VAE':
encoded = sess.run(model.z, feed_dict=origin_feed)
with open('{}/{}/{}_gcn_pretrain_author.pkl'.format(save_dir, data_name, data_name), 'wb') as f:
pickle.dump(encoded, f)
pickle.dump(idx_map, f)
# Added by Anubrata End
| 40.840659
| 121
| 0.622225
| 1,971
| 14,866
| 4.39929
| 0.120751
| 0.027448
| 0.016607
| 0.013839
| 0.859993
| 0.840503
| 0.835198
| 0.828048
| 0.819975
| 0.819975
| 0
| 0.01152
| 0.25259
| 14,866
| 364
| 122
| 40.840659
| 0.768878
| 0.033096
| 0
| 0.777372
| 0
| 0
| 0.066425
| 0.007946
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021898
| false
| 0
| 0.062044
| 0.007299
| 0.09854
| 0.032847
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f7b4d0948f5f5794ba2367683175239a91df671
| 59
|
py
|
Python
|
dino/__init__.py
|
jedevc/docker-dino-demo
|
ad672889fc23ae56417f3f55fd388577153f330c
|
[
"Unlicense"
] | null | null | null |
dino/__init__.py
|
jedevc/docker-dino-demo
|
ad672889fc23ae56417f3f55fd388577153f330c
|
[
"Unlicense"
] | null | null | null |
dino/__init__.py
|
jedevc/docker-dino-demo
|
ad672889fc23ae56417f3f55fd388577153f330c
|
[
"Unlicense"
] | 1
|
2019-11-26T19:55:25.000Z
|
2019-11-26T19:55:25.000Z
|
from .dino import app
from .dino import app as application
| 19.666667
| 36
| 0.79661
| 10
| 59
| 4.7
| 0.6
| 0.340426
| 0.595745
| 0.723404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169492
| 59
| 2
| 37
| 29.5
| 0.959184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7f90822154f3f4f8f62d4e2d022e6ec4491ca924
| 30,737
|
py
|
Python
|
nova/tests/unit/test_rpc.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/test_rpc.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/test_rpc.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Copyright 2016 IBM Corp.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
name|'import'
name|'copy'
newline|'\n'
nl|'\n'
name|'import'
name|'fixtures'
newline|'\n'
name|'import'
name|'mock'
newline|'\n'
name|'import'
name|'oslo_messaging'
name|'as'
name|'messaging'
newline|'\n'
name|'from'
name|'oslo_serialization'
name|'import'
name|'jsonutils'
newline|'\n'
name|'import'
name|'testtools'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'context'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'rpc'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
nl|'\n'
nl|'\n'
comment|'# Make a class that resets all of the global variables in nova.rpc'
nl|'\n'
DECL|class|RPCResetFixture
name|'class'
name|'RPCResetFixture'
op|'('
name|'fixtures'
op|'.'
name|'Fixture'
op|')'
op|':'
newline|'\n'
DECL|member|_setUp
indent|' '
name|'def'
name|'_setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'trans'
op|'='
name|'copy'
op|'.'
name|'copy'
op|'('
name|'rpc'
op|'.'
name|'TRANSPORT'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'noti_trans'
op|'='
name|'copy'
op|'.'
name|'copy'
op|'('
name|'rpc'
op|'.'
name|'NOTIFICATION_TRANSPORT'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'noti'
op|'='
name|'copy'
op|'.'
name|'copy'
op|'('
name|'rpc'
op|'.'
name|'NOTIFIER'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'all_mods'
op|'='
name|'copy'
op|'.'
name|'copy'
op|'('
name|'rpc'
op|'.'
name|'ALLOWED_EXMODS'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'ext_mods'
op|'='
name|'copy'
op|'.'
name|'copy'
op|'('
name|'rpc'
op|'.'
name|'EXTRA_EXMODS'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'addCleanup'
op|'('
name|'self'
op|'.'
name|'_reset_everything'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_reset_everything
dedent|''
name|'def'
name|'_reset_everything'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'TRANSPORT'
op|'='
name|'self'
op|'.'
name|'trans'
newline|'\n'
name|'rpc'
op|'.'
name|'NOTIFICATION_TRANSPORT'
op|'='
name|'self'
op|'.'
name|'noti_trans'
newline|'\n'
name|'rpc'
op|'.'
name|'NOTIFIER'
op|'='
name|'self'
op|'.'
name|'noti'
newline|'\n'
name|'rpc'
op|'.'
name|'ALLOWED_EXMODS'
op|'='
name|'self'
op|'.'
name|'all_mods'
newline|'\n'
name|'rpc'
op|'.'
name|'EXTRA_EXMODS'
op|'='
name|'self'
op|'.'
name|'ext_mods'
newline|'\n'
nl|'\n'
nl|'\n'
comment|"# We can't import nova.test.TestCase because that sets up an RPCFixture"
nl|'\n'
comment|'# that pretty much nullifies all of this testing'
nl|'\n'
DECL|class|TestRPC
dedent|''
dedent|''
name|'class'
name|'TestRPC'
op|'('
name|'testtools'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'TestRPC'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'RPCResetFixture'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'rpc'
op|','
string|"'get_allowed_exmods'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'rpc'
op|','
string|"'RequestContextSerializer'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'get_transport'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'get_notification_transport'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'Notifier'"
op|')'
newline|'\n'
DECL|member|test_init_unversioned
name|'def'
name|'test_init_unversioned'
op|'('
name|'self'
op|','
name|'mock_notif'
op|','
name|'mock_noti_trans'
op|','
name|'mock_trans'
op|','
nl|'\n'
name|'mock_ser'
op|','
name|'mock_exmods'
op|')'
op|':'
newline|'\n'
comment|'# The expected call to get the legacy notifier will require no new'
nl|'\n'
comment|'# kwargs, and we expect the new notifier will need the noop driver'
nl|'\n'
indent|' '
name|'expected'
op|'='
op|'['
op|'{'
op|'}'
op|','
op|'{'
string|"'driver'"
op|':'
string|"'noop'"
op|'}'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'_test_init'
op|'('
name|'mock_notif'
op|','
name|'mock_noti_trans'
op|','
name|'mock_trans'
op|','
name|'mock_ser'
op|','
nl|'\n'
name|'mock_exmods'
op|','
string|"'unversioned'"
op|','
name|'expected'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'rpc'
op|','
string|"'get_allowed_exmods'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'rpc'
op|','
string|"'RequestContextSerializer'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'get_transport'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'get_notification_transport'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'Notifier'"
op|')'
newline|'\n'
DECL|member|test_init_both
name|'def'
name|'test_init_both'
op|'('
name|'self'
op|','
name|'mock_notif'
op|','
name|'mock_noti_trans'
op|','
name|'mock_trans'
op|','
nl|'\n'
name|'mock_ser'
op|','
name|'mock_exmods'
op|')'
op|':'
newline|'\n'
indent|' '
name|'expected'
op|'='
op|'['
op|'{'
op|'}'
op|','
op|'{'
string|"'topics'"
op|':'
op|'['
string|"'versioned_notifications'"
op|']'
op|'}'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'_test_init'
op|'('
name|'mock_notif'
op|','
name|'mock_noti_trans'
op|','
name|'mock_trans'
op|','
name|'mock_ser'
op|','
nl|'\n'
name|'mock_exmods'
op|','
string|"'both'"
op|','
name|'expected'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'rpc'
op|','
string|"'get_allowed_exmods'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'rpc'
op|','
string|"'RequestContextSerializer'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'get_transport'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'get_notification_transport'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'Notifier'"
op|')'
newline|'\n'
DECL|member|test_init_versioned
name|'def'
name|'test_init_versioned'
op|'('
name|'self'
op|','
name|'mock_notif'
op|','
name|'mock_noti_trans'
op|','
name|'mock_trans'
op|','
nl|'\n'
name|'mock_ser'
op|','
name|'mock_exmods'
op|')'
op|':'
newline|'\n'
indent|' '
name|'expected'
op|'='
op|'['
op|'{'
string|"'driver'"
op|':'
string|"'noop'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'topics'"
op|':'
op|'['
string|"'versioned_notifications'"
op|']'
op|'}'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'_test_init'
op|'('
name|'mock_notif'
op|','
name|'mock_noti_trans'
op|','
name|'mock_trans'
op|','
name|'mock_ser'
op|','
nl|'\n'
name|'mock_exmods'
op|','
string|"'versioned'"
op|','
name|'expected'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_cleanup_transport_null
dedent|''
name|'def'
name|'test_cleanup_transport_null'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'NOTIFICATION_TRANSPORT'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'LEGACY_NOTIFIER'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'NOTIFIER'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'AssertionError'
op|','
name|'rpc'
op|'.'
name|'cleanup'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_cleanup_notification_transport_null
dedent|''
name|'def'
name|'test_cleanup_notification_transport_null'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'TRANSPORT'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'NOTIFIER'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'AssertionError'
op|','
name|'rpc'
op|'.'
name|'cleanup'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_cleanup_legacy_notifier_null
dedent|''
name|'def'
name|'test_cleanup_legacy_notifier_null'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'TRANSPORT'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'NOTIFICATION_TRANSPORT'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'NOTIFIER'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_cleanup_notifier_null
dedent|''
name|'def'
name|'test_cleanup_notifier_null'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'TRANSPORT'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'LEGACY_NOTIFIER'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'NOTIFICATION_TRANSPORT'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'AssertionError'
op|','
name|'rpc'
op|'.'
name|'cleanup'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_cleanup
dedent|''
name|'def'
name|'test_cleanup'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'LEGACY_NOTIFIER'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'NOTIFIER'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'NOTIFICATION_TRANSPORT'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'TRANSPORT'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'trans_cleanup'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'not_trans_cleanup'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'TRANSPORT'
op|'.'
name|'cleanup'
op|'='
name|'trans_cleanup'
newline|'\n'
name|'rpc'
op|'.'
name|'NOTIFICATION_TRANSPORT'
op|'.'
name|'cleanup'
op|'='
name|'not_trans_cleanup'
newline|'\n'
nl|'\n'
name|'rpc'
op|'.'
name|'cleanup'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'trans_cleanup'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
name|'not_trans_cleanup'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'rpc'
op|'.'
name|'TRANSPORT'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'rpc'
op|'.'
name|'NOTIFICATION_TRANSPORT'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'rpc'
op|'.'
name|'LEGACY_NOTIFIER'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'rpc'
op|'.'
name|'NOTIFIER'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'set_transport_defaults'"
op|')'
newline|'\n'
DECL|member|test_set_defaults
name|'def'
name|'test_set_defaults'
op|'('
name|'self'
op|','
name|'mock_set'
op|')'
op|':'
newline|'\n'
indent|' '
name|'control_exchange'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'rpc'
op|'.'
name|'set_defaults'
op|'('
name|'control_exchange'
op|')'
newline|'\n'
nl|'\n'
name|'mock_set'
op|'.'
name|'assert_called_once_with'
op|'('
name|'control_exchange'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_add_extra_exmods
dedent|''
name|'def'
name|'test_add_extra_exmods'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'EXTRA_EXMODS'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
name|'rpc'
op|'.'
name|'add_extra_exmods'
op|'('
string|"'foo'"
op|','
string|"'bar'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
string|"'foo'"
op|','
string|"'bar'"
op|']'
op|','
name|'rpc'
op|'.'
name|'EXTRA_EXMODS'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_clear_extra_exmods
dedent|''
name|'def'
name|'test_clear_extra_exmods'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'EXTRA_EXMODS'
op|'='
op|'['
string|"'foo'"
op|','
string|"'bar'"
op|']'
newline|'\n'
nl|'\n'
name|'rpc'
op|'.'
name|'clear_extra_exmods'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'0'
op|','
name|'len'
op|'('
name|'rpc'
op|'.'
name|'EXTRA_EXMODS'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_allowed_exmods
dedent|''
name|'def'
name|'test_get_allowed_exmods'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'ALLOWED_EXMODS'
op|'='
op|'['
string|"'foo'"
op|']'
newline|'\n'
name|'rpc'
op|'.'
name|'EXTRA_EXMODS'
op|'='
op|'['
string|"'bar'"
op|']'
newline|'\n'
nl|'\n'
name|'exmods'
op|'='
name|'rpc'
op|'.'
name|'get_allowed_exmods'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
string|"'foo'"
op|','
string|"'bar'"
op|']'
op|','
name|'exmods'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'TransportURL'"
op|')'
newline|'\n'
DECL|member|test_get_transport_url
name|'def'
name|'test_get_transport_url'
op|'('
name|'self'
op|','
name|'mock_url'
op|')'
op|':'
newline|'\n'
indent|' '
name|'conf'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'CONF'
op|'='
name|'conf'
newline|'\n'
name|'mock_url'
op|'.'
name|'parse'
op|'.'
name|'return_value'
op|'='
string|"'foo'"
newline|'\n'
nl|'\n'
name|'url'
op|'='
name|'rpc'
op|'.'
name|'get_transport_url'
op|'('
name|'url_str'
op|'='
string|"'bar'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'foo'"
op|','
name|'url'
op|')'
newline|'\n'
name|'mock_url'
op|'.'
name|'parse'
op|'.'
name|'assert_called_once_with'
op|'('
name|'conf'
op|','
string|"'bar'"
op|','
nl|'\n'
name|'rpc'
op|'.'
name|'TRANSPORT_ALIASES'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'TransportURL'"
op|')'
newline|'\n'
DECL|member|test_get_transport_url_null
name|'def'
name|'test_get_transport_url_null'
op|'('
name|'self'
op|','
name|'mock_url'
op|')'
op|':'
newline|'\n'
indent|' '
name|'conf'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'rpc'
op|'.'
name|'CONF'
op|'='
name|'conf'
newline|'\n'
name|'mock_url'
op|'.'
name|'parse'
op|'.'
name|'return_value'
op|'='
string|"'foo'"
newline|'\n'
nl|'\n'
name|'url'
op|'='
name|'rpc'
op|'.'
name|'get_transport_url'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'foo'"
op|','
name|'url'
op|')'
newline|'\n'
name|'mock_url'
op|'.'
name|'parse'
op|'.'
name|'assert_called_once_with'
op|'('
name|'conf'
op|','
name|'None'
op|','
nl|'\n'
name|'rpc'
op|'.'
name|'TRANSPORT_ALIASES'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'rpc'
op|','
string|"'RequestContextSerializer'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'RPCClient'"
op|')'
newline|'\n'
DECL|member|test_get_client
name|'def'
name|'test_get_client'
op|'('
name|'self'
op|','
name|'mock_client'
op|','
name|'mock_ser'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'TRANSPORT'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'tgt'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'ser'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'mock_client'
op|'.'
name|'return_value'
op|'='
string|"'client'"
newline|'\n'
name|'mock_ser'
op|'.'
name|'return_value'
op|'='
name|'ser'
newline|'\n'
nl|'\n'
name|'client'
op|'='
name|'rpc'
op|'.'
name|'get_client'
op|'('
name|'tgt'
op|','
name|'version_cap'
op|'='
string|"'1.0'"
op|','
name|'serializer'
op|'='
string|"'foo'"
op|')'
newline|'\n'
nl|'\n'
name|'mock_ser'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'foo'"
op|')'
newline|'\n'
name|'mock_client'
op|'.'
name|'assert_called_once_with'
op|'('
name|'rpc'
op|'.'
name|'TRANSPORT'
op|','
nl|'\n'
name|'tgt'
op|','
name|'version_cap'
op|'='
string|"'1.0'"
op|','
nl|'\n'
name|'serializer'
op|'='
name|'ser'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'client'"
op|','
name|'client'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'rpc'
op|','
string|"'RequestContextSerializer'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'messaging'
op|','
string|"'get_rpc_server'"
op|')'
newline|'\n'
DECL|member|test_get_server
name|'def'
name|'test_get_server'
op|'('
name|'self'
op|','
name|'mock_get'
op|','
name|'mock_ser'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'TRANSPORT'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'ser'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'tgt'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'ends'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'mock_ser'
op|'.'
name|'return_value'
op|'='
name|'ser'
newline|'\n'
name|'mock_get'
op|'.'
name|'return_value'
op|'='
string|"'server'"
newline|'\n'
nl|'\n'
name|'server'
op|'='
name|'rpc'
op|'.'
name|'get_server'
op|'('
name|'tgt'
op|','
name|'ends'
op|','
name|'serializer'
op|'='
string|"'foo'"
op|')'
newline|'\n'
nl|'\n'
name|'mock_ser'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'foo'"
op|')'
newline|'\n'
name|'mock_get'
op|'.'
name|'assert_called_once_with'
op|'('
name|'rpc'
op|'.'
name|'TRANSPORT'
op|','
name|'tgt'
op|','
name|'ends'
op|','
nl|'\n'
name|'executor'
op|'='
string|"'eventlet'"
op|','
name|'serializer'
op|'='
name|'ser'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'server'"
op|','
name|'server'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_notifier
dedent|''
name|'def'
name|'test_get_notifier'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'LEGACY_NOTIFIER'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'mock_prep'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'mock_prep'
op|'.'
name|'return_value'
op|'='
string|"'notifier'"
newline|'\n'
name|'rpc'
op|'.'
name|'LEGACY_NOTIFIER'
op|'.'
name|'prepare'
op|'='
name|'mock_prep'
newline|'\n'
nl|'\n'
name|'notifier'
op|'='
name|'rpc'
op|'.'
name|'get_notifier'
op|'('
string|"'service'"
op|','
name|'publisher_id'
op|'='
string|"'foo'"
op|')'
newline|'\n'
nl|'\n'
name|'mock_prep'
op|'.'
name|'assert_called_once_with'
op|'('
name|'publisher_id'
op|'='
string|"'foo'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'notifier'
op|','
name|'rpc'
op|'.'
name|'LegacyValidatingNotifier'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'notifier'"
op|','
name|'notifier'
op|'.'
name|'notifier'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_notifier_null_publisher
dedent|''
name|'def'
name|'test_get_notifier_null_publisher'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'LEGACY_NOTIFIER'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'mock_prep'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'mock_prep'
op|'.'
name|'return_value'
op|'='
string|"'notifier'"
newline|'\n'
name|'rpc'
op|'.'
name|'LEGACY_NOTIFIER'
op|'.'
name|'prepare'
op|'='
name|'mock_prep'
newline|'\n'
nl|'\n'
name|'notifier'
op|'='
name|'rpc'
op|'.'
name|'get_notifier'
op|'('
string|"'service'"
op|','
name|'host'
op|'='
string|"'bar'"
op|')'
newline|'\n'
nl|'\n'
name|'mock_prep'
op|'.'
name|'assert_called_once_with'
op|'('
name|'publisher_id'
op|'='
string|"'service.bar'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'notifier'
op|','
name|'rpc'
op|'.'
name|'LegacyValidatingNotifier'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'notifier'"
op|','
name|'notifier'
op|'.'
name|'notifier'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_versioned_notifier
dedent|''
name|'def'
name|'test_get_versioned_notifier'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'NOTIFIER'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'mock_prep'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'mock_prep'
op|'.'
name|'return_value'
op|'='
string|"'notifier'"
newline|'\n'
name|'rpc'
op|'.'
name|'NOTIFIER'
op|'.'
name|'prepare'
op|'='
name|'mock_prep'
newline|'\n'
nl|'\n'
name|'notifier'
op|'='
name|'rpc'
op|'.'
name|'get_versioned_notifier'
op|'('
string|"'service.foo'"
op|')'
newline|'\n'
nl|'\n'
name|'mock_prep'
op|'.'
name|'assert_called_once_with'
op|'('
name|'publisher_id'
op|'='
string|"'service.foo'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'notifier'"
op|','
name|'notifier'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_test_init
dedent|''
name|'def'
name|'_test_init'
op|'('
name|'self'
op|','
name|'mock_notif'
op|','
name|'mock_noti_trans'
op|','
name|'mock_trans'
op|','
name|'mock_ser'
op|','
nl|'\n'
name|'mock_exmods'
op|','
name|'notif_format'
op|','
name|'expected_driver_topic_kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'legacy_notifier'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'notifier'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'notif_transport'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'transport'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'serializer'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'conf'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'conf'
op|'.'
name|'notification_format'
op|'='
name|'notif_format'
newline|'\n'
name|'mock_exmods'
op|'.'
name|'return_value'
op|'='
op|'['
string|"'foo'"
op|']'
newline|'\n'
name|'mock_trans'
op|'.'
name|'return_value'
op|'='
name|'transport'
newline|'\n'
name|'mock_noti_trans'
op|'.'
name|'return_value'
op|'='
name|'notif_transport'
newline|'\n'
name|'mock_ser'
op|'.'
name|'return_value'
op|'='
name|'serializer'
newline|'\n'
name|'mock_notif'
op|'.'
name|'side_effect'
op|'='
op|'['
name|'legacy_notifier'
op|','
name|'notifier'
op|']'
newline|'\n'
nl|'\n'
name|'rpc'
op|'.'
name|'init'
op|'('
name|'conf'
op|')'
newline|'\n'
nl|'\n'
name|'mock_exmods'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
name|'mock_trans'
op|'.'
name|'assert_called_once_with'
op|'('
name|'conf'
op|','
nl|'\n'
name|'allowed_remote_exmods'
op|'='
op|'['
string|"'foo'"
op|']'
op|','
nl|'\n'
name|'aliases'
op|'='
name|'rpc'
op|'.'
name|'TRANSPORT_ALIASES'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNotNone'
op|'('
name|'rpc'
op|'.'
name|'TRANSPORT'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNotNone'
op|'('
name|'rpc'
op|'.'
name|'LEGACY_NOTIFIER'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNotNone'
op|'('
name|'rpc'
op|'.'
name|'NOTIFIER'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'legacy_notifier'
op|','
name|'rpc'
op|'.'
name|'LEGACY_NOTIFIER'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'notifier'
op|','
name|'rpc'
op|'.'
name|'NOTIFIER'
op|')'
newline|'\n'
nl|'\n'
name|'expected_calls'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'kwargs'
name|'in'
name|'expected_driver_topic_kwargs'
op|':'
newline|'\n'
indent|' '
name|'expected_kwargs'
op|'='
op|'{'
string|"'serializer'"
op|':'
name|'serializer'
op|'}'
newline|'\n'
name|'expected_kwargs'
op|'.'
name|'update'
op|'('
name|'kwargs'
op|')'
newline|'\n'
name|'expected_calls'
op|'.'
name|'append'
op|'('
op|'('
op|'('
name|'notif_transport'
op|','
op|')'
op|','
name|'expected_kwargs'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'expected_calls'
op|','
name|'mock_notif'
op|'.'
name|'call_args_list'
op|','
nl|'\n'
string|'"The calls to messaging.Notifier() did not create "'
nl|'\n'
string|'"the legacy and versioned notifiers properly."'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestJsonPayloadSerializer
dedent|''
dedent|''
name|'class'
name|'TestJsonPayloadSerializer'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_serialize_entity
indent|' '
name|'def'
name|'test_serialize_entity'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'jsonutils'
op|','
string|"'to_primitive'"
op|')'
name|'as'
name|'mock_prim'
op|':'
newline|'\n'
indent|' '
name|'rpc'
op|'.'
name|'JsonPayloadSerializer'
op|'.'
name|'serialize_entity'
op|'('
string|"'context'"
op|','
string|"'entity'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'mock_prim'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'entity'"
op|','
name|'convert_instances'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestRequestContextSerializer
dedent|''
dedent|''
name|'class'
name|'TestRequestContextSerializer'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'TestRequestContextSerializer'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mock_base'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'ser'
op|'='
name|'rpc'
op|'.'
name|'RequestContextSerializer'
op|'('
name|'self'
op|'.'
name|'mock_base'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'ser_null'
op|'='
name|'rpc'
op|'.'
name|'RequestContextSerializer'
op|'('
name|'None'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_serialize_entity
dedent|''
name|'def'
name|'test_serialize_entity'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'mock_base'
op|'.'
name|'serialize_entity'
op|'.'
name|'return_value'
op|'='
string|"'foo'"
newline|'\n'
nl|'\n'
name|'ser_ent'
op|'='
name|'self'
op|'.'
name|'ser'
op|'.'
name|'serialize_entity'
op|'('
string|"'context'"
op|','
string|"'entity'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mock_base'
op|'.'
name|'serialize_entity'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'context'"
op|','
nl|'\n'
string|"'entity'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'foo'"
op|','
name|'ser_ent'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_serialize_entity_null_base
dedent|''
name|'def'
name|'test_serialize_entity_null_base'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ser_ent'
op|'='
name|'self'
op|'.'
name|'ser_null'
op|'.'
name|'serialize_entity'
op|'('
string|"'context'"
op|','
string|"'entity'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'entity'"
op|','
name|'ser_ent'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_deserialize_entity
dedent|''
name|'def'
name|'test_deserialize_entity'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'mock_base'
op|'.'
name|'deserialize_entity'
op|'.'
name|'return_value'
op|'='
string|"'foo'"
newline|'\n'
nl|'\n'
name|'deser_ent'
op|'='
name|'self'
op|'.'
name|'ser'
op|'.'
name|'deserialize_entity'
op|'('
string|"'context'"
op|','
string|"'entity'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mock_base'
op|'.'
name|'deserialize_entity'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'context'"
op|','
nl|'\n'
string|"'entity'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'foo'"
op|','
name|'deser_ent'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_deserialize_entity_null_base
dedent|''
name|'def'
name|'test_deserialize_entity_null_base'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'deser_ent'
op|'='
name|'self'
op|'.'
name|'ser_null'
op|'.'
name|'deserialize_entity'
op|'('
string|"'context'"
op|','
string|"'entity'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'entity'"
op|','
name|'deser_ent'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_serialize_context
dedent|''
name|'def'
name|'test_serialize_context'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'context'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'ser'
op|'.'
name|'serialize_context'
op|'('
name|'context'
op|')'
newline|'\n'
nl|'\n'
name|'context'
op|'.'
name|'to_dict'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'context'
op|','
string|"'RequestContext'"
op|')'
newline|'\n'
DECL|member|test_deserialize_context
name|'def'
name|'test_deserialize_context'
op|'('
name|'self'
op|','
name|'mock_req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ser'
op|'.'
name|'deserialize_context'
op|'('
string|"'context'"
op|')'
newline|'\n'
nl|'\n'
name|'mock_req'
op|'.'
name|'from_dict'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'context'"
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.344177
| 88
| 0.608517
| 4,509
| 30,737
| 4.043247
| 0.051896
| 0.194175
| 0.107509
| 0.069113
| 0.869398
| 0.830344
| 0.787011
| 0.745488
| 0.704405
| 0.668586
| 0
| 0.000468
| 0.096301
| 30,737
| 2,489
| 89
| 12.349136
| 0.655866
| 0
| 0
| 0.950181
| 0
| 0
| 0.361551
| 0.055145
| 0
| 0
| 0
| 0
| 0.02049
| 0
| null | null | 0
| 0.004018
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7fa71e9d7e561df38184fa3560670b33e5461e7b
| 13,186
|
py
|
Python
|
app/osm_observer/changes/test_changes.py
|
grischard/osm-observer
|
9e833e98696abc4a2aab942c8899aaf039166fc1
|
[
"MIT"
] | 4
|
2018-04-24T17:55:08.000Z
|
2021-02-18T00:52:04.000Z
|
app/osm_observer/changes/test_changes.py
|
grischard/osm-observer
|
9e833e98696abc4a2aab942c8899aaf039166fc1
|
[
"MIT"
] | 1
|
2021-02-08T20:30:42.000Z
|
2021-02-08T20:30:42.000Z
|
app/osm_observer/changes/test_changes.py
|
grischard/osm-observer
|
9e833e98696abc4a2aab942c8899aaf039166fc1
|
[
"MIT"
] | 2
|
2019-09-27T23:57:11.000Z
|
2020-09-19T19:01:37.000Z
|
import pytest
from osm_observer.changes.changes import collect_changeset
# #################################
# see conftest.py for conn fixture
# #################################
def test_missing_changeset(conn):
changes = collect_changeset(conn, 1)
assert changes == {
'changeset': {'id': 1},
'changes': {'nodes': [], 'ways': [], 'relations': []},
'elements': {'nodes': {}, 'ways': {}, 'relations': {}},
}
def test_multiple_node_versions(conn):
conn.execute('''
INSERT INTO nodes VALUES (10000, true, false, false, 10090, ST_SetSRID(ST_MakePoint(0, 0.0), 4326), 'u1', 10080, '2018-11-15 17:20:04+00', 1, '');
INSERT INTO nodes VALUES (10001, true, false, false, 10090, ST_SetSRID(ST_MakePoint(0, 0.1), 4326), 'u1', 10080, '2018-11-15 17:20:04+00', 1, '"comment"=>"created"');
INSERT INTO nodes VALUES (10001, false, true, false, 10092, ST_SetSRID(ST_MakePoint(0, 0.2), 4326), 'u1', 10080, '2018-11-15 17:22:04+00', 2, '"comment"=>"mod 2"');
INSERT INTO nodes VALUES (10001, false, true, false, 10093, ST_SetSRID(ST_MakePoint(0, 0.3), 4326), 'u1', 10080, '2018-11-15 17:23:04+00', 3, '"comment"=>"mod 3"');
INSERT INTO nodes VALUES (10001, false, true, false, 10094, ST_SetSRID(ST_MakePoint(0, 0.4), 4326), 'u1', 10080, '2018-11-15 17:24:04+00', 4, '"comment"=>"mod 4"');
INSERT INTO nodes VALUES (10001, false, true, false, 10095, ST_SetSRID(ST_MakePoint(0, 0.5), 4326), 'u1', 10080, '2018-11-15 17:25:04+00', 5, '"comment"=>"mod 5"');
INSERT INTO nodes VALUES (10001, false, false, true, 10096, ST_SetSRID(ST_MakePoint(0, 0.5), 4326), 'u1', 10080, '2018-11-15 17:26:04+00', 6, '"comment"=>"delete"');
INSERT INTO ways VALUES (10050, true, false, false, 10090, 'u1', 10080, '2018-11-15 17:20:04+00', 1, '"comment"=>"new way", "highway"=>"secondary"');
INSERT INTO nds VALUES (10050, 1, 0, 10000);
INSERT INTO nds VALUES (10050, 1, 1, 10001);
INSERT INTO ways VALUES (10050, false, true, false, 10091, 'u1', 10080, '2018-11-15 17:20:04+00', 2, '"comment"=>"mod way", "highway"=>"secondary"');
INSERT INTO nds VALUES (10050, 2, 0, 10000);
INSERT INTO nds VALUES (10050, 2, 1, 10001);
INSERT INTO ways VALUES (10050, false, true, false, 10094, 'u1', 10080, '2018-11-15 17:20:04+00', 3, '"comment"=>"mod way", "highway"=>"secondary"');
INSERT INTO nds VALUES (10050, 3, 0, 10000);
INSERT INTO nds VALUES (10050, 3, 1, 10001);
INSERT INTO changesets VALUES (10090, '2018-11-15 17:20:04+00', '2018-11-15 17:20:04+00', 1, false, 'u1', 10080, '', ST_MakeEnvelope(0, 0, 0, 0, 4326));
INSERT INTO changesets VALUES (10091, '2018-11-15 17:20:04+00', '2018-11-15 17:20:04+00', 1, false, 'u1', 10080, '', ST_MakeEnvelope(0, 0, 0, 0, 4326));
INSERT INTO changesets VALUES (10094, '2018-11-15 17:20:04+00', '2018-11-15 17:20:04+00', 1, false, 'u1', 10080, '', ST_MakeEnvelope(0, 0, 0, 0, 4326));
INSERT INTO changesets VALUES (10096, '2018-11-15 17:20:04+00', '2018-11-15 17:20:04+00', 1, false, 'u1', 10080, '', ST_MakeEnvelope(0, 0, 0, 0, 4326));
''', multi=True)
changes = collect_changeset(conn, 10090)
assert changes['changes']['ways'] == [{'added': True, 'deleted': False, 'id': 10050L, 'key': '10050-1', 'modified': False, 'prevKey': None, 'version': 1}]
assert changes['elements']['ways']['10050-1']['nds'] == ['10000-1', '10001-1']
changes = collect_changeset(conn, 10091)
assert changes['changes']['ways'] == [{'added': False, 'deleted': False, 'id': 10050L, 'key': '10050-2', 'modified': True, 'prevKey': '10050-1', 'version': 2}]
assert changes['elements']['ways']['10050-1']['nds'] == ['10000-1', '10001-1']
assert changes['elements']['ways']['10050-2']['nds'] == ['10000-1', '10001-1'] # nodes were not changed
assert set(changes['elements']['nodes'].keys()) == set(['10000-1', '10001-1'])
changes = collect_changeset(conn, 10094)
assert changes['changes']['ways'] == [{'added': False, 'deleted': False, 'id': 10050L, 'key': '10050-3', 'modified': True, 'prevKey': '10050-2', 'version': 3}]
assert changes['elements']['ways']['10050-2']['nds'] == ['10000-1', '10001-3'] # node changed in 10092 and 10093, old way now points to 10001-3
assert changes['elements']['ways']['10050-3']['nds'] == ['10000-1', '10001-4'] # 10001 was changed with 10094
assert set(changes['elements']['nodes'].keys()) == set(['10000-1', '10001-3', '10001-4'])
changes = collect_changeset(conn, 10096)
assert changes['changes']['nodes'] == [{'added': False, 'deleted': True, 'id': 10001, 'key': '10001-6', 'modified': False, 'prevKey': '10001-5', 'version': 6}]
assert set(changes['elements']['nodes'].keys()) == set(['10001-5', '10001-6'])
def test_missing_nodes_of_way(conn):
conn.execute('''
INSERT INTO nodes VALUES (10000, true, false, false, 10090, ST_SetSRID(ST_MakePoint(0, 0.0), 4326), 'u1', 10080, '2018-11-15 17:20:04+00', 1, '');
INSERT INTO ways VALUES (10050, true, false, false, 10090, 'u1', 10080, '2018-11-15 17:20:04+00', 1, '"comment"=>"new way", "highway"=>"secondary"');
INSERT INTO nds VALUES (10050, 1, 0, 10000);
INSERT INTO nds VALUES (10050, 1, 1, 10001);
INSERT INTO changesets VALUES (10090, '2018-11-15 17:20:04+00', '2018-11-15 17:20:04+00', 1, false, 'u1', 10080, '', ST_MakeEnvelope(0, 0, 0, 0, 4326));
''', multi=True)
changes = collect_changeset(conn, 10090)
assert changes['changes']['ways'] == [{'added': True, 'deleted': False, 'id': 10050L, 'key': '10050-1', 'modified': False, 'prevKey': None, 'version': 1}]
assert changes['elements']['ways']['10050-1']['nds'] == ['10000-1', '10001-0'] # node 10001 is missing
assert set(changes['elements']['nodes'].keys()) == set(['10000-1'])
def test_node_members_of_relation(conn):
conn.execute('''
INSERT INTO nodes VALUES (10000, true, false, false, 10090, ST_SetSRID(ST_MakePoint(0, 0.0), 4326), 'u1', 10080, '2018-11-15 17:20:04+00', 1, '');
INSERT INTO nodes VALUES (10010, true, false, false, 10090, ST_SetSRID(ST_MakePoint(0, 0.0), 4326), 'u1', 10080, '2018-11-15 17:20:04+00', 1, '');
INSERT INTO nodes VALUES (10011, true, false, false, 10090, ST_SetSRID(ST_MakePoint(0, 0.0), 4326), 'u1', 10080, '2018-11-15 17:20:04+00', 1, '');
INSERT INTO ways VALUES (10050, true, false, false, 10090, 'u1', 10080, '2018-11-15 17:20:04+00', 1, '"comment"=>"new way", "highway"=>"secondary"');
INSERT INTO nds VALUES (10050, 1, 0, 10010);
INSERT INTO nds VALUES (10050, 1, 1, 10011);
INSERT INTO relations VALUES (10070, true, false, false, 10091, 'u1', 10080, '2018-11-15 17:20:04+00', 1, '"comment"=>"new way", "type"=>"route"');
INSERT INTO members VALUES (10070, 1, 0, 'node', 'stop',10000, null, null);
INSERT INTO members VALUES (10070, 1, 1, 'node', 'stop', 10001, null, null); -- missing
INSERT INTO members VALUES (10070, 1, 2, 'way', '', null, 10050, null);
INSERT INTO changesets VALUES (10091, '2018-11-15 17:20:04+00', '2018-11-15 17:20:04+00', 1, false, 'u1', 10080, '', ST_MakeEnvelope(0, 0, 0, 0, 4326));
''', multi=True)
changes = collect_changeset(conn, 10091)
assert changes['changes']['ways'] == []
assert changes['changes']['nodes'] == []
assert changes['changes']['relations'] == [{'added': True, 'deleted': False, 'id': 10070L, 'key': '10070-1', 'modified': False, 'prevKey': None, 'version': 1}]
assert changes['elements']['relations']['10070-1']['members'] == [
{'node': '10000-1', 'role': 'stop'},
{'node': '10001-0', 'role': 'stop'}, # missing
{'role': '', 'way': '10050-1'},
]
assert changes['elements']['ways']['10050-1']['nds'] == ['10010-1', '10011-1'] # member way is complete
assert set(changes['elements']['nodes'].keys()) == set(['10000-1', '10010-1', '10011-1']) # nodes from way and relation member
def test_way_members_of_relation(conn):
conn.execute('''
-- complete way
INSERT INTO nodes VALUES (10000, true, false, false, 10090, ST_SetSRID(ST_MakePoint(0, 0.0), 4326), 'u1', 10080, '2018-11-15 17:20:04+00', 1, '');
INSERT INTO nodes VALUES (10001, true, false, false, 10090, ST_SetSRID(ST_MakePoint(0, 0.0), 4326), 'u1', 10080, '2018-11-15 17:20:04+00', 1, '');
INSERT INTO ways VALUES (10050, true, false, false, 10090, 'u1', 10080, '2018-11-15 17:20:04+00', 1, '"comment"=>"new way", "highway"=>"secondary"');
INSERT INTO nds VALUES (10050, 1, 0, 10000);
INSERT INTO nds VALUES (10050, 1, 1, 10001);
-- incomplete way
INSERT INTO nodes VALUES (10002, true, false, false, 10090, ST_SetSRID(ST_MakePoint(0, 0.0), 4326), 'u1', 10080, '2018-11-15 17:20:04+00', 1, '');
INSERT INTO ways VALUES (10051, true, false, false, 10090, 'u1', 10080, '2018-11-15 17:20:04+00', 1, '"comment"=>"new way", "highway"=>"secondary"');
INSERT INTO nds VALUES (10051, 1, 0, 10002);
INSERT INTO nds VALUES (10051, 1, 1, 10003); -- missing
-- newer version, not to be returned
INSERT INTO ways VALUES (10050, true, false, false, 10099, 'u1', 10080, '2018-11-15 17:20:04+00', 2, '"comment"=>"new way", "highway"=>"secondary"');
INSERT INTO nds VALUES (10050, 2, 0, 10000);
INSERT INTO nds VALUES (10050, 2, 1, 10001);
-- relation with complete, incomplete and missing way members
INSERT INTO relations VALUES (10070, true, false, false, 10091, 'u1', 10080, '2018-11-15 17:20:04+00', 1, '"comment"=>"new way", "type"=>"route"');
INSERT INTO members VALUES (10070, 1, 0, 'way', '', null, 10050, null);
INSERT INTO members VALUES (10070, 1, 1, 'way', '', null, 10051, null); -- incomplete
INSERT INTO members VALUES (10070, 1, 2, 'way', '', null, 10052, null); -- missing
-- newer version, not to be returned
INSERT INTO relations VALUES (10070, true, false, false, 10099, 'u1', 10080, '2018-11-15 17:20:04+00', 2, '"comment"=>"new way", "type"=>"route"');
INSERT INTO members VALUES (10070, 2, 0, 'way', '', null, 10051, null); -- incomplete
INSERT INTO members VALUES (10070, 2, 2, 'way', '', null, 10050, null);
INSERT INTO members VALUES (10070, 2, 1, 'way', '', null, 10052, null); -- missing
INSERT INTO changesets VALUES (10091, '2018-11-15 17:20:04+00', '2018-11-15 17:20:04+00', 1, false, 'u1', 10080, '', ST_MakeEnvelope(0, 0, 0, 0, 4326));
''', multi=True)
changes = collect_changeset(conn, 10091)
assert changes['changes']['ways'] == []
assert changes['changes']['nodes'] == []
assert changes['changes']['relations'] == [{'added': True, 'deleted': False, 'id': 10070L, 'key': '10070-1', 'modified': False, 'prevKey': None, 'version': 1}]
assert changes['elements']['relations']['10070-1']['members'] == [
{'way': '10050-1', 'role': ''},
{'way': '10051-1', 'role': ''},
{'way': '10052-0', 'role': ''},
]
assert changes['elements']['ways']['10050-1']['nds'] == ['10000-1', '10001-1']
assert changes['elements']['ways']['10051-1']['nds'] == ['10002-1', '10003-0']
assert set(changes['elements']['nodes'].keys()) == set(['10000-1', '10001-1', '10002-1'])
def test_relation_members_of_relation(conn):
conn.execute('''
INSERT INTO nodes VALUES (10000, true, false, false, 10090, ST_SetSRID(ST_MakePoint(0, 0.0), 4326), 'u1', 10080, '2018-11-15 17:20:04+00', 1, '');
INSERT INTO relations VALUES (10070, true, false, false, 10090, 'u1', 10080, '2018-11-15 17:20:04+00', 1, '"comment"=>"new way", "type"=>"route"');
INSERT INTO members VALUES (10070, 1, 0, 'node', '', 10000, null, null);
INSERT INTO relations VALUES (10071, true, false, false, 10091, 'u1', 10080, '2018-11-15 17:20:04+00', 1, '"comment"=>"new way", "type"=>"route"');
INSERT INTO members VALUES (10071, 1, 0, 'relation', '', null, null, 10070);
-- newer version, not to be returned
INSERT INTO relations VALUES (10071, true, false, false, 10099, 'u1', 10080, '2018-11-15 17:20:04+00', 2, '"comment"=>"new way", "type"=>"route"');
INSERT INTO members VALUES (10071, 2, 0, 'relation', '', null, null, 10070);
INSERT INTO changesets VALUES (10091, '2018-11-15 17:20:04+00', '2018-11-15 17:20:04+00', 1, false, 'u1', 10080, '', ST_MakeEnvelope(0, 0, 0, 0, 4326));
''', multi=True)
changes = collect_changeset(conn, 10091)
assert changes['changes']['ways'] == []
assert changes['changes']['nodes'] == []
assert changes['changes']['relations'] == [{'added': True, 'deleted': False, 'id': 10071L, 'key': '10071-1', 'modified': False, 'prevKey': None, 'version': 1}]
assert changes['elements']['relations']['10071-1']['members'] == [
{'relation': '10070-1', 'role': ''},
]
assert '10070-1' not in changes['elements']['relations'] # relation members are not added recursively!
assert set(changes['elements']['relations'].keys()) == set(['10071-1'])
assert set(changes['elements']['nodes'].keys()) == set()
| 66.93401
| 174
| 0.60003
| 1,894
| 13,186
| 4.140444
| 0.067582
| 0.082887
| 0.045907
| 0.057383
| 0.836266
| 0.816246
| 0.791125
| 0.765494
| 0.72303
| 0.681714
| 0
| 0.214953
| 0.180419
| 13,186
| 196
| 175
| 67.27551
| 0.510688
| 0.021235
| 0
| 0.412162
| 0
| 0.331081
| 0.743334
| 0.049665
| 0
| 0
| 0
| 0
| 0.243243
| 0
| null | null | 0
| 0.013514
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
7fac3014f6fc852d365eef7cabc0153793b71897
| 20,675
|
py
|
Python
|
tests/test_simple.py
|
HBrendy/bifrostlib
|
979b7792b996630f3943208cf0a717ef9aece4af
|
[
"MIT"
] | null | null | null |
tests/test_simple.py
|
HBrendy/bifrostlib
|
979b7792b996630f3943208cf0a717ef9aece4af
|
[
"MIT"
] | null | null | null |
tests/test_simple.py
|
HBrendy/bifrostlib
|
979b7792b996630f3943208cf0a717ef9aece4af
|
[
"MIT"
] | null | null | null |
import pytest
from bifrostlib import datahandling
from bifrostlib import database_interface
from bifrostlib.datahandling import Category
from bifrostlib.datahandling import ComponentReference
from bifrostlib.datahandling import Component
from bifrostlib.datahandling import SampleReference
from bifrostlib.datahandling import Sample
from bifrostlib.datahandling import HostReference
from bifrostlib.datahandling import Host
from bifrostlib.datahandling import RunReference
from bifrostlib.datahandling import Run
from bifrostlib.datahandling import SampleComponentReference
from bifrostlib.datahandling import SampleComponent
from bifrostlib.datahandling import RunComponentReference
from bifrostlib.datahandling import RunComponent
import pymongo
import os
import time
@pytest.fixture
def test_connection():
assert datahandling.has_a_database_connection()
assert "TEST" in os.environ['BIFROST_DB_KEY'].upper() # A very basic piece of protection ensuring the word test is in the DB
def test_load_schema():
schema = datahandling.load_schema()
assert schema is not None
class TestComponents:
json_entries = [{"_id": {"$oid": "000000000000000000000001"}, "name": "test_component1"}]
bson_entries = [database_interface.json_to_bson(i) for i in json_entries]
@classmethod
def setup_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
col = db["components"]
col.insert_many(cls.bson_entries)
@classmethod
def teardown_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
@staticmethod
def clear_all_collections(db):
db.drop_collection("components")
db.drop_collection("hosts")
db.drop_collection("run_components")
db.drop_collection("runs")
db.drop_collection("sample_components")
db.drop_collection("samples")
def test_component_create(self):
test_component = Component(name="test_component")
print(test_component)
test_component.save()
assert "_id" in test_component.json
def test_component_create_from_ref(self):
_id = "000000000000000000000001"
name = "test_component"
component = Component.load(reference=ComponentReference(_id=_id, name=name))
assert component.delete() == True
test_component = Component(value=self.json_entries[0])
test_component.save()
json = component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
def test_component_load(self):
_id = "000000000000000000000001"
name = "test_component1"
# Test load on just _id
reference = ComponentReference(_id=_id)
component = Component.load(reference)
json = component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del component
# Test load on just name
refrence = ComponentReference(name=name)
component = Component.load(reference)
json = component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del component
# Test load on both _id and name
reference = ComponentReference(_id=_id, name=name)
component = Component.load(reference)
json = component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del component
# Test load on both _id and name
reference = ComponentReference(value=self.json_entries[0])
component = Component.load(reference)
json = component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del component
def test_component_delete(self):
_id = "000000000000000000000001"
name = "test_component"
component = Component.load(ComponentReference(_id=_id, name=name))
assert component.delete() == True
class TestSamples:
json_entries = [{"_id": {"$oid": "000000000000000000000001"}, "name": "test_sample1", "components": [], "categories": {}}]
bson_entries = [database_interface.json_to_bson(i) for i in json_entries]
@classmethod
def setup_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
col = db["samples"]
col.insert_many(cls.bson_entries)
@classmethod
def teardown_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
@staticmethod
def clear_all_collections(db):
db.drop_collection("components")
db.drop_collection("hosts")
db.drop_collection("run_components")
db.drop_collection("runs")
db.drop_collection("sample_components")
db.drop_collection("samples")
def test_sample_create(self):
test_sample = Sample(name="test_sample")
test_sample.save()
assert "_id" in test_sample.json
def test_sample_create_from_ref(self):
_id = "000000000000000000000001"
name = "test_sample"
sample = Sample.load(SampleReference(_id=_id, name=name))
assert sample.delete() == True
test_sample = Sample(value=self.json_entries[0])
test_sample.save()
json = sample.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
def test_sample_load(self):
_id = "000000000000000000000001"
name = "test_sample1"
# Test load on just _id
reference = SampleReference(_id=_id)
sample = Sample.load(reference)
json = sample.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del sample
# Test load on just name
refrence = SampleReference(name=name)
sample = Sample.load(reference)
json = sample.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del sample
# Test load on both _id and name
reference = SampleReference(_id=_id, name=name)
sample = Sample.load(reference)
json = sample.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del sample
# Test load on both _id and name
reference = SampleReference(value=self.json_entries[0])
sample = Sample.load(reference)
json = sample.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del sample
def test_sample_delete(self):
_id = "000000000000000000000001"
name = "test_sample"
sample = Sample.load(SampleReference(_id=_id, name=name))
assert sample.delete() == True
class TestHosts:
json_entries = [{"_id": {"$oid": "000000000000000000000001"}, "name": "test_host1", "samples": []}]
bson_entries = [database_interface.json_to_bson(i) for i in json_entries]
@classmethod
def setup_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
col = db["hosts"]
col.insert_many(cls.bson_entries)
@classmethod
def teardown_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
@staticmethod
def clear_all_collections(db):
db.drop_collection("components")
db.drop_collection("hosts")
db.drop_collection("run_components")
db.drop_collection("runs")
db.drop_collection("sample_components")
db.drop_collection("samples")
def test_host_create(self):
test_host = Host(name="test_host")
test_host.save()
assert "_id" in test_host.json
def test_host_create_from_ref(self):
_id = "000000000000000000000001"
name = "test_host"
host = Host.load(HostReference(_id=_id, name=name))
assert host.delete() == True
test_host = Host(value=self.json_entries[0])
test_host.save()
json = host.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
def test_host_load(self):
_id = "000000000000000000000001"
name = "test_host1"
# Test load on just _id
reference = HostReference(_id=_id)
host = Host.load(reference)
json = host.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del host
# Test load on just name
refrence = HostReference(name=name)
host = Host.load(reference)
json = host.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del host
# Test load on both _id and name
reference = HostReference(_id=_id, name=name)
host = Host.load(reference)
json = host.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del host
# Test load on both _id and name
reference = HostReference(value=self.json_entries[0])
host = Host.load(reference)
json = host.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del host
def test_host_delete(self):
_id = "000000000000000000000001"
name = "test_host"
host = Host.load(HostReference(_id=_id, name=name))
assert host.delete() == True
class TestRuns:
json_entries = [{"_id": {"$oid": "000000000000000000000001"}, "name": "test_run1", "samples": [], "components": [], "hosts":[]}]
bson_entries = [database_interface.json_to_bson(i) for i in json_entries]
@classmethod
def setup_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
col = db["runs"]
col.insert_many(cls.bson_entries)
@classmethod
def teardown_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
@staticmethod
def clear_all_collections(db):
db.drop_collection("components")
db.drop_collection("hosts")
db.drop_collection("run_components")
db.drop_collection("runs")
db.drop_collection("sample_components")
db.drop_collection("samples")
def test_run_create(self):
test_run = Run(name="test_run")
test_run.save()
assert "_id" in test_run.json
def test_run_create_from_ref(self):
_id = "000000000000000000000001"
name = "test_run"
run = Run.load(RunReference(_id=_id, name=name))
assert run.delete() == True
test_run = Run(value=self.json_entries[0])
test_run.save()
json = run.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
def test_run_load(self):
_id = "000000000000000000000001"
name = "test_run1"
# Test load on just _id
reference = RunReference(_id=_id)
run = Run.load(reference)
json = run.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del run
# Test load on just name
refrence = RunReference(name=name)
run = Run.load(reference)
json = run.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del run
# Test load on both _id and name
reference = RunReference(_id=_id, name=name)
run = Run.load(reference)
json = run.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del run
# Test load on both _id and name
reference = RunReference(value=self.json_entries[0])
run = Run.load(reference)
json = run.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del run
def test_run_delete(self):
_id = "000000000000000000000001"
name = "test_run"
run = Run.load(RunReference(_id=_id, name=name))
assert run.delete() == True
class TestSampleComponents:
json_entries_samples = [{"_id": {"$oid": "0000000000000000000000a1"}, "name": "test_sample1", "components": [], "categories": {}}]
bson_entries_samples = [database_interface.json_to_bson(i) for i in json_entries_samples]
json_entries_components = [{"_id": {"$oid": "0000000000000000000000b1"}, "name": "test_component1"}]
bson_entries_components = [database_interface.json_to_bson(i) for i in json_entries_components]
json_entries = [{"_id": {"$oid": "000000000000000000000001"}, "name": "test_sample_component1", "sample": {"_id": {"$oid": "0000000000000000000000a1"}, "name": "test_sample1"}, "component": {"_id": {"$oid": "0000000000000000000000b1"}, "name": "test_component1"}}]
bson_entries = [database_interface.json_to_bson(i) for i in json_entries]
@classmethod
def setup_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
col = db["samples"]
col.insert_many(cls.bson_entries_samples)
col = db["components"]
col.insert_many(cls.bson_entries_components)
col = db["sample_components"]
col.insert_many(cls.bson_entries)
@classmethod
def teardown_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
@staticmethod
def clear_all_collections(db):
db.drop_collection("components")
db.drop_collection("hosts")
db.drop_collection("run_components")
db.drop_collection("runs")
db.drop_collection("sample_components")
db.drop_collection("samples")
def test_sample_component_create(self):
sample = Sample(value=self.json_entries_samples[0])
component = Component(value=self.json_entries_components[0])
test_sample_component = SampleComponent(sample_reference=sample.to_reference(), component_reference=component.to_reference())
test_sample_component.save()
assert "_id" in test_sample_component.json
def test_sample_component_load(self):
_id = "000000000000000000000001"
name = "test_sample_component1"
# Test load on just _id
reference = SampleComponentReference(_id=_id)
sample_component = SampleComponent.load(reference)
json = sample_component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del sample_component
# Test load on just name
refrence = SampleComponentReference(name=name)
sample_component = SampleComponent.load(reference)
json = sample_component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del sample_component
# Test load on both _id and name
reference = SampleComponentReference(_id=_id, name=name)
sample_component = SampleComponent.load(reference)
json = sample_component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del sample_component
# Test load on both _id and name
reference = SampleComponentReference(value=self.json_entries[0])
sample_component = SampleComponent.load(reference)
json = sample_component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del sample_component
def test_sample_component_delete(self):
_id = "000000000000000000000001"
name = "test_sample_component"
sample_component = SampleComponent.load(SampleComponentReference(_id=_id, name=name))
assert sample_component.delete() == True
class TestRunComponents:
json_entries_runs = [{"_id": {"$oid": "000000000000000000000001"}, "name": "test_run1", "samples": [], "components": [], "hosts":[]}]
bson_entries_runs = [database_interface.json_to_bson(i) for i in json_entries_runs]
json_entries_components = [{"_id": {"$oid": "0000000000000000000000b1"}, "name": "test_component1"}]
bson_entries_components = [database_interface.json_to_bson(i) for i in json_entries_components]
json_entries = [{"_id": {"$oid": "000000000000000000000001"}, "name": "test_sample_component1", "sample": {"_id": {"$oid": "0000000000000000000000a1"}, "name": "test_sample1"}, "component": {"_id": {"$oid": "0000000000000000000000b1"}, "name": "test_component1"}}]
bson_entries = [database_interface.json_to_bson(i) for i in json_entries]
@classmethod
def setup_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
col = db["runs"]
col.insert_many(cls.bson_entries_runs)
col = db["components"]
col.insert_many(cls.bson_entries_components)
col = db["run_components"]
col.insert_many(cls.bson_entries)
@classmethod
def teardown_class(cls):
client = pymongo.MongoClient(os.environ['BIFROST_DB_KEY'])
db = client.get_database()
cls.clear_all_collections(db)
@staticmethod
def clear_all_collections(db):
db.drop_collection("components")
db.drop_collection("hosts")
db.drop_collection("run_components")
db.drop_collection("runs")
db.drop_collection("sample_components")
db.drop_collection("samples")
def test_run_component_create(self):
run = Run(value=self.json_entries_runs[0])
component = Component(value=self.json_entries_components[0])
test_run_component = RunComponent(run_reference=run.to_reference(), component_reference=component.to_reference())
test_run_component.save()
assert "_id" in test_run_component.json
def test_run_component_load(self):
_id = "000000000000000000000001"
name = "test_run_component1"
# Test load on just _id
reference = RunComponentReference(_id=_id)
run_component = RunComponent.load(reference)
json = run_component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del run_component
# Test load on just name
refrence = RunComponentReference(name=name)
run_component = RunComponent.load(reference)
json = run_component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del run_component
# Test load on both _id and name
reference = RunComponentReference(_id=_id, name=name)
run_component = RunComponent.load(reference)
json = run_component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del run_component
# Test load on both _id and name
reference = RunComponentReference(value=self.json_entries[0])
run_component = RunComponent.load(reference)
json = run_component.json
json.pop("version", None)
json.pop("metadata", None)
assert json == self.json_entries[0]
del run_component
def test_run_component_delete(self):
_id = "000000000000000000000001"
name = "test_run_component"
run_component = RunComponent.load(RunComponentReference(_id=_id, name=name))
assert run_component.delete() == True
| 37.659381
| 268
| 0.652479
| 2,392
| 20,675
| 5.409281
| 0.046823
| 0.052709
| 0.04869
| 0.04699
| 0.838473
| 0.827189
| 0.768143
| 0.716516
| 0.704305
| 0.685138
| 0
| 0.049231
| 0.238597
| 20,675
| 548
| 269
| 37.728102
| 0.77271
| 0.034341
| 0
| 0.731343
| 0
| 0
| 0.125182
| 0.040473
| 0
| 0
| 0
| 0
| 0.100213
| 1
| 0.089552
| false
| 0
| 0.040512
| 0
| 0.185501
| 0.002132
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f691f6667399115e7d7766b656097725e561db73
| 86
|
py
|
Python
|
examples/generate_examples.py
|
flaviusfetean/visualkeras
|
dabfc8f8680d538670b25c6e557c3fa094a95825
|
[
"MIT"
] | 148
|
2020-10-05T14:26:35.000Z
|
2022-03-31T20:44:19.000Z
|
examples/generate_examples.py
|
flaviusfetean/visualkeras
|
dabfc8f8680d538670b25c6e557c3fa094a95825
|
[
"MIT"
] | 21
|
2020-10-05T18:18:10.000Z
|
2022-02-27T03:43:23.000Z
|
examples/generate_examples.py
|
flaviusfetean/visualkeras
|
dabfc8f8680d538670b25c6e557c3fa094a95825
|
[
"MIT"
] | 23
|
2021-01-02T23:04:18.000Z
|
2022-02-24T08:50:49.000Z
|
from examples import dense
from examples import vgg16
from examples import autoencoder
| 28.666667
| 32
| 0.872093
| 12
| 86
| 6.25
| 0.5
| 0.48
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026667
| 0.127907
| 86
| 3
| 32
| 28.666667
| 0.973333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f6a4c09dbf95d36c3f3b7c1da6298b361f831bdb
| 370
|
py
|
Python
|
src/back/kite/threads.py
|
khamidou/kite
|
c049faf8522c8346c22c70f2a35a35db6b4a155d
|
[
"BSD-3-Clause"
] | 136
|
2015-01-06T01:14:35.000Z
|
2022-01-20T17:04:52.000Z
|
src/back/kite/threads.py
|
khamidou/kite
|
c049faf8522c8346c22c70f2a35a35db6b4a155d
|
[
"BSD-3-Clause"
] | 3
|
2016-01-14T21:37:10.000Z
|
2019-04-17T02:44:08.000Z
|
src/back/kite/threads.py
|
khamidou/kite
|
c049faf8522c8346c22c70f2a35a35db6b4a155d
|
[
"BSD-3-Clause"
] | 38
|
2015-02-28T14:12:26.000Z
|
2021-01-17T21:01:02.000Z
|
# threads management functions
import jsonfile
import datetime
import uuid
import base64
import os
def generate_random_id():
# FIXME: maybe use better function ?
return base64.b32encode(os.urandom(32))
def create_thread_structure():
return {"date": datetime.datetime.utcnow(), "messages": [], "subject": "", "id": generate_random_id(), "unread": True}
| 26.428571
| 122
| 0.721622
| 45
| 370
| 5.8
| 0.688889
| 0.10728
| 0.122605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025478
| 0.151351
| 370
| 13
| 123
| 28.461538
| 0.805732
| 0.17027
| 0
| 0
| 1
| 0
| 0.088816
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 1
| 0.222222
| true
| 0
| 0.555556
| 0.222222
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
f6b207a6d147275f13115378ddd38439d5a3ade8
| 2,325
|
py
|
Python
|
migrations/versions/c3374760a005_.py
|
m20-sch57/Fejudge
|
3eba1879be608e119b7891a1dbe7c9e450cc3914
|
[
"MIT"
] | 2
|
2019-12-25T14:06:09.000Z
|
2020-04-02T22:06:28.000Z
|
migrations/versions/c3374760a005_.py
|
m20-sch57/Fejudge
|
3eba1879be608e119b7891a1dbe7c9e450cc3914
|
[
"MIT"
] | 9
|
2020-05-14T16:15:29.000Z
|
2020-09-01T14:17:19.000Z
|
migrations/versions/c3374760a005_.py
|
m20-sch57/Fejudge
|
3eba1879be608e119b7891a1dbe7c9e450cc3914
|
[
"MIT"
] | null | null | null |
"""empty message
Revision ID: c3374760a005
Revises: 46e2c7e5111a
Create Date: 2020-08-10 20:19:39.710729
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c3374760a005'
down_revision = '46e2c7e5111a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('contest', 'contest_type',
existing_type=sa.VARCHAR(length=16),
type_=sa.String(length=32),
existing_nullable=True)
op.alter_column('problem', 'problem_type',
existing_type=sa.VARCHAR(length=16),
type_=sa.String(length=32),
existing_nullable=True)
op.alter_column('problem', 'status',
existing_type=sa.VARCHAR(length=16),
type_=sa.String(length=32),
existing_nullable=True)
op.alter_column('submission', 'language',
existing_type=sa.VARCHAR(length=16),
type_=sa.String(length=32),
existing_nullable=True)
op.alter_column('submission', 'status',
existing_type=sa.VARCHAR(length=16),
type_=sa.String(length=32),
existing_nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('submission', 'status',
existing_type=sa.String(length=32),
type_=sa.VARCHAR(length=16),
existing_nullable=True)
op.alter_column('submission', 'language',
existing_type=sa.String(length=32),
type_=sa.VARCHAR(length=16),
existing_nullable=True)
op.alter_column('problem', 'status',
existing_type=sa.String(length=32),
type_=sa.VARCHAR(length=16),
existing_nullable=True)
op.alter_column('problem', 'problem_type',
existing_type=sa.String(length=32),
type_=sa.VARCHAR(length=16),
existing_nullable=True)
op.alter_column('contest', 'contest_type',
existing_type=sa.String(length=32),
type_=sa.VARCHAR(length=16),
existing_nullable=True)
# ### end Alembic commands ###
| 34.701493
| 65
| 0.603011
| 258
| 2,325
| 5.251938
| 0.22093
| 0.088561
| 0.095941
| 0.140221
| 0.816236
| 0.816236
| 0.789668
| 0.789668
| 0.782288
| 0.761624
| 0
| 0.056973
| 0.275269
| 2,325
| 66
| 66
| 35.227273
| 0.747181
| 0.126882
| 0
| 0.833333
| 0
| 0
| 0.097341
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.041667
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
63d81c901804b159e7953179c43297a7b57f9992
| 7,394
|
py
|
Python
|
tests/integration/supersim/v1/test_usage_record.py
|
ashish-s/twilio-python
|
5462b05af0906a1464b1e95a56a1f15afddc3b8c
|
[
"MIT"
] | 1
|
2020-10-29T19:28:25.000Z
|
2020-10-29T19:28:25.000Z
|
tests/integration/supersim/v1/test_usage_record.py
|
CostantiniMatteo/twilio-python
|
9eee1ca9e73790b12678e9a5660206ea44948d00
|
[
"MIT"
] | 1
|
2021-08-21T22:54:01.000Z
|
2021-08-23T19:39:42.000Z
|
tests/integration/supersim/v1/test_usage_record.py
|
team-telnyx/twexit-python
|
69e11c5c2b5681f9bc410795dda0cf8942219e6f
|
[
"MIT"
] | null | null | null |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class UsageRecordTestCase(IntegrationTestCase):
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.supersim.v1.usage_records.list()
self.holodeck.assert_has_request(Request(
'get',
'https://supersim.twilio.com/v1/UsageRecords',
))
def test_read_all_response(self):
self.holodeck.mock(Response(
200,
'''
{
"usage_records": [
{
"period": {
"start_time": "2015-05-01T20:00:00Z",
"end_time": "2015-06-01T20:00:00Z"
},
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"data_upload": 1000,
"data_download": 1000,
"data_total": 2000,
"sim_sid": null
}
],
"meta": {
"first_page_url": "https://supersim.twilio.com/v1/UsageRecords?PageSize=50&Page=0",
"key": "usage_records",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://supersim.twilio.com/v1/UsageRecords?PageSize=50&Page=0"
}
}
'''
))
actual = self.client.supersim.v1.usage_records.list()
self.assertIsNotNone(actual)
def test_read_all_day_response(self):
self.holodeck.mock(Response(
200,
'''
{
"usage_records": [
{
"period": {
"start_time": "2019-05-01T00:00:00Z",
"end_time": "2019-05-03T00:00:00Z"
},
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"data_upload": 1000,
"data_download": 1000,
"data_total": 2000,
"sim_sid": null
},
{
"period": {
"start_time": "2019-05-03T00:00:00Z",
"end_time": "2019-05-04T00:00:00Z"
},
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"data_upload": 1000,
"data_download": 1000,
"data_total": 2000,
"sim_sid": null
}
],
"meta": {
"first_page_url": "https://supersim.twilio.com/v1/UsageRecords?Granularity=day&PageSize=50&Page=0",
"key": "usage_records",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://supersim.twilio.com/v1/UsageRecords?Granularity=day&PageSize=50&Page=0"
}
}
'''
))
actual = self.client.supersim.v1.usage_records.list()
self.assertIsNotNone(actual)
def test_read_all_hour_response(self):
self.holodeck.mock(Response(
200,
'''
{
"usage_records": [
{
"period": {
"start_time": "2019-05-01T00:00:00Z",
"end_time": "2019-05-01T01:00:00Z"
},
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"data_upload": 1000,
"data_download": 1000,
"data_total": 2000,
"sim_sid": null
},
{
"period": {
"start_time": "2019-05-01T01:00:00Z",
"end_time": "2019-05-01T02:00:00Z"
},
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"data_upload": 1000,
"data_download": 1000,
"data_total": 2000,
"sim_sid": null
}
],
"meta": {
"first_page_url": "https://supersim.twilio.com/v1/UsageRecords?Granularity=hour&PageSize=50&Page=0",
"key": "usage_records",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://supersim.twilio.com/v1/UsageRecords?Granularity=hour&PageSize=50&Page=0"
}
}
'''
))
actual = self.client.supersim.v1.usage_records.list()
self.assertIsNotNone(actual)
def test_read_day_sim_filter_response(self):
self.holodeck.mock(Response(
200,
'''
{
"usage_records": [
{
"period": {
"start_time": "2019-05-01T00:00:00Z",
"end_time": "2019-05-03T00:00:00Z"
},
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"data_upload": 1000,
"data_download": 1000,
"data_total": 2000,
"sim_sid": "HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
},
{
"period": {
"start_time": "2019-05-03T00:00:00Z",
"end_time": "2019-05-04T00:00:00Z"
},
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"data_upload": 1000,
"data_download": 1000,
"data_total": 2000,
"sim_sid": "HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
],
"meta": {
"first_page_url": "https://supersim.twilio.com/v1/UsageRecords?Sim=HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa&Granularity=day&PageSize=50&Page=0",
"key": "usage_records",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://supersim.twilio.com/v1/UsageRecords?Sim=HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa&Granularity=day&PageSize=50&Page=0"
}
}
'''
))
actual = self.client.supersim.v1.usage_records.list()
self.assertIsNotNone(actual)
| 36.97
| 158
| 0.419124
| 560
| 7,394
| 5.321429
| 0.160714
| 0.02349
| 0.040268
| 0.066443
| 0.870134
| 0.860738
| 0.839262
| 0.839262
| 0.825839
| 0.825839
| 0
| 0.089053
| 0.469976
| 7,394
| 199
| 159
| 37.155779
| 0.671345
| 0.014742
| 0
| 0.552632
| 1
| 0
| 0.031967
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 1
| 0.131579
| false
| 0
| 0.105263
| 0
| 0.263158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
63ed7bf9aecba5e2267fcb961e9bbe9a6fe03358
| 29,982
|
py
|
Python
|
tests/test_pmft.py
|
phmalek/freud
|
cb0781f2009758638cd79a0bb6d44801e5473774
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_pmft.py
|
phmalek/freud
|
cb0781f2009758638cd79a0bb6d44801e5473774
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_pmft.py
|
phmalek/freud
|
cb0781f2009758638cd79a0bb6d44801e5473774
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import numpy.testing as npt
import freud
import unittest
import warnings
import util
class TestPMFTR12(unittest.TestCase):
def test_box(self):
boxSize = 16.0
box = freud.box.Box.square(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0]],
dtype=np.float32)
angles = np.array([0.0, 0.0], dtype=np.float32)
maxR = 5.23
nbinsR = 10
nbinsT1 = 20
nbinsT2 = 30
myPMFT = freud.pmft.PMFTR12(maxR, nbinsR, nbinsT1, nbinsT2)
myPMFT.accumulate(box, points, angles, points, angles)
npt.assert_equal(myPMFT.box, freud.box.Box.square(boxSize))
# Ensure expected errors are raised
box = freud.box.Box.cube(boxSize)
with self.assertRaises(ValueError):
myPMFT.accumulate(box, points, angles, points, angles)
def test_r_cut(self):
maxR = 5.23
nbinsR = 10
nbinsT1 = 20
nbinsT2 = 30
myPMFT = freud.pmft.PMFTR12(maxR, nbinsR, nbinsT1, nbinsT2)
npt.assert_allclose(myPMFT.r_cut, maxR, atol=1e-6)
def test_bins(self):
maxR = 5.23
nbinsR = 10
nbinsT1 = 20
nbinsT2 = 30
dr = (maxR / float(nbinsR))
dT1 = (2.0 * np.pi / float(nbinsT1))
dT2 = (2.0 * np.pi / float(nbinsT2))
# make sure the radius for each bin is generated correctly
listR = np.zeros(nbinsR, dtype=np.float32)
listT1 = np.zeros(nbinsT1, dtype=np.float32)
listT2 = np.zeros(nbinsT2, dtype=np.float32)
for i in range(nbinsR):
r = float(i) * dr
nextr = float(i + 1) * dr
listR[i] = 2.0/3.0 * (
nextr*nextr*nextr - r*r*r)/(nextr*nextr - r*r)
for i in range(nbinsT1):
t = float(i) * dT1
nextt = float(i + 1) * dT1
listT1[i] = ((t + nextt) / 2.0)
for i in range(nbinsT2):
t = float(i) * dT2
nextt = float(i + 1) * dT2
listT2[i] = ((t + nextt) / 2.0)
myPMFT = freud.pmft.PMFTR12(maxR, nbinsR, nbinsT1, nbinsT2)
# Compare expected bins to the info from pmft
npt.assert_allclose(myPMFT.R, listR, atol=1e-3)
npt.assert_allclose(myPMFT.T1, listT1, atol=1e-3)
npt.assert_allclose(myPMFT.T2, listT2, atol=1e-3)
npt.assert_equal(nbinsR, myPMFT.n_bins_R)
npt.assert_equal(nbinsT1, myPMFT.n_bins_T1)
npt.assert_equal(nbinsT2, myPMFT.n_bins_T2)
inverse_jacobian = np.array(
[[[1/(R*dr*dT1*dT2)
for T1 in listT1] for T2 in listT2] for R in listR])
npt.assert_allclose(myPMFT.inverse_jacobian, inverse_jacobian,
atol=1e-5)
def test_attribute_access(self):
boxSize = 16.0
box = freud.box.Box.square(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.1, 0.0]],
dtype=np.float32)
points.flags['WRITEABLE'] = False
angles = np.array([0.0, np.pi/2], dtype=np.float32)
angles.flags['WRITEABLE'] = False
maxR = 5.23
nbinsR = 10
nbinsT1 = 20
nbinsT2 = 30
myPMFT = freud.pmft.PMFTR12(maxR, nbinsR, nbinsT1, nbinsT2)
with self.assertRaises(AttributeError):
myPMFT.PCF
with self.assertRaises(AttributeError):
myPMFT.bin_counts
with self.assertRaises(AttributeError):
myPMFT.box
with self.assertRaises(AttributeError):
myPMFT.PMFT
myPMFT.accumulate(box, points, angles, points, angles)
myPMFT.PCF
myPMFT.bin_counts
myPMFT.PMFT
myPMFT.box
npt.assert_equal(myPMFT.bin_counts.shape, (nbinsR, nbinsT2, nbinsT1))
npt.assert_equal(myPMFT.PCF.shape, (nbinsR, nbinsT2, nbinsT1))
npt.assert_equal(myPMFT.PMFT.shape, (nbinsR, nbinsT2, nbinsT1))
myPMFT.reset()
with self.assertRaises(AttributeError):
myPMFT.PCF
with self.assertRaises(AttributeError):
myPMFT.bin_counts
with self.assertRaises(AttributeError):
myPMFT.box
with self.assertRaises(AttributeError):
myPMFT.PMFT
myPMFT.compute(box, points, angles, points, angles)
myPMFT.PCF
myPMFT.bin_counts
myPMFT.PMFT
myPMFT.box
def test_two_particles(self):
boxSize = 16.0
box = freud.box.Box.square(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.1, 0.0]],
dtype=np.float32)
points.flags['WRITEABLE'] = False
angles = np.array([0.0, np.pi/2], dtype=np.float32)
angles.flags['WRITEABLE'] = False
maxR = 5.23
nbinsR = 10
nbinsT1 = 20
nbinsT2 = 30
dr = (maxR / float(nbinsR))
dT1 = (2.0 * np.pi / float(nbinsT1))
dT2 = (2.0 * np.pi / float(nbinsT2))
# calculation for array idxs
def get_bin(point_i, point_j, angle_i, angle_j):
delta_x = point_j - point_i
r_bin = np.floor(np.linalg.norm(delta_x)/dr)
delta_t1 = np.arctan2(delta_x[1], delta_x[0])
delta_t2 = np.arctan2(-delta_x[1], -delta_x[0])
t1_bin = np.floor(((angle_i - delta_t1) % (2. * np.pi))/dT1)
t2_bin = np.floor(((angle_j - delta_t2) % (2. * np.pi))/dT2)
return np.array([r_bin, t2_bin, t1_bin], dtype=np.int32)
correct_bin_counts = np.zeros(shape=(nbinsR, nbinsT2, nbinsT1),
dtype=np.int32)
bins = get_bin(points[0], points[1], angles[0], angles[1])
correct_bin_counts[bins[0], bins[1], bins[2]] = 1
bins = get_bin(points[1], points[0], angles[1], angles[0])
correct_bin_counts[bins[0], bins[1], bins[2]] = 1
absoluteTolerance = 0.1
myPMFT = freud.pmft.PMFTR12(maxR, nbinsR, nbinsT1, nbinsT2)
myPMFT.accumulate(box, points, angles, points, angles)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
myPMFT.reset()
myPMFT.compute(box, points, angles, points, angles)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
myPMFT.compute(box, points, angles)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
def test_repr(self):
maxR = 5.23
nbinsR = 10
nbinsT1 = 20
nbinsT2 = 30
myPMFT = freud.pmft.PMFTR12(maxR, nbinsR, nbinsT1, nbinsT2)
self.assertEqual(str(myPMFT), str(eval(repr(myPMFT))))
def test_ref_points_ne_points(self):
r_max = 2.3
n_r = 10
n_t1 = 10
n_t2 = 10
pmft = freud.pmft.PMFTR12(r_max, n_r, n_t1, n_t2)
lattice_size = 10
box = freud.box.Box.square(lattice_size*5)
ref_points, points = util.make_alternating_lattice(
lattice_size, 0.01, 2)
ref_orientations = np.array([0]*len(ref_points))
orientations = np.array([0]*len(points))
pmft.compute(box, ref_points, ref_orientations, points, orientations)
self.assertEqual(np.count_nonzero(np.isinf(pmft.PMFT) == 0), 12)
self.assertEqual(len(np.unique(pmft.PMFT)), 3)
class TestPMFTXYT(unittest.TestCase):
def test_box(self):
boxSize = 16.0
box = freud.box.Box.square(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0]],
dtype=np.float32)
angles = np.array([0.0, 0.0], dtype=np.float32)
maxX = 3.6
maxY = 4.2
nbinsX = 20
nbinsY = 30
nbinsT = 40
myPMFT = freud.pmft.PMFTXYT(maxX, maxY, nbinsX, nbinsY, nbinsT)
myPMFT.accumulate(box, points, angles, points, angles)
npt.assert_equal(myPMFT.box, freud.box.Box.square(boxSize))
# Ensure expected errors are raised
box = freud.box.Box.cube(boxSize)
with self.assertRaises(ValueError):
myPMFT.accumulate(box, points, angles, points, angles)
def test_r_cut(self):
maxX = 3.6
maxY = 4.2
nbinsX = 20
nbinsY = 30
nbinsT = 40
myPMFT = freud.pmft.PMFTXYT(maxX, maxY, nbinsX, nbinsY, nbinsT)
npt.assert_allclose(myPMFT.r_cut,
np.linalg.norm([maxX, maxY]), atol=1e-6)
def test_bins(self):
maxX = 3.6
maxY = 4.2
nbinsX = 20
nbinsY = 30
nbinsT = 40
dx = (2.0 * maxX / float(nbinsX))
dy = (2.0 * maxY / float(nbinsY))
dT = (2.0 * np.pi / float(nbinsT))
# make sure the center for each bin is generated correctly
listX = np.zeros(nbinsX, dtype=np.float32)
listY = np.zeros(nbinsY, dtype=np.float32)
listT = np.zeros(nbinsT, dtype=np.float32)
for i in range(nbinsX):
x = float(i) * dx
nextX = float(i + 1) * dx
listX[i] = -maxX + ((x + nextX) / 2.0)
for i in range(nbinsY):
y = float(i) * dy
nextY = float(i + 1) * dy
listY[i] = -maxY + ((y + nextY) / 2.0)
for i in range(nbinsT):
t = float(i) * dT
nextt = float(i + 1) * dT
listT[i] = ((t + nextt) / 2.0)
myPMFT = freud.pmft.PMFTXYT(maxX, maxY, nbinsX, nbinsY, nbinsT)
# Compare expected bins to the info from pmft
npt.assert_allclose(myPMFT.X, listX, atol=1e-3)
npt.assert_allclose(myPMFT.Y, listY, atol=1e-3)
npt.assert_allclose(myPMFT.T, listT, atol=1e-3)
npt.assert_equal(nbinsX, myPMFT.n_bins_X)
npt.assert_equal(nbinsY, myPMFT.n_bins_Y)
npt.assert_equal(nbinsT, myPMFT.n_bins_T)
npt.assert_allclose(myPMFT.jacobian, dx*dy*dT)
def test_attribute_access(self):
boxSize = 16.0
box = freud.box.Box.square(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.1, 0.0]],
dtype=np.float32)
angles = np.array([0.0, np.pi/2], dtype=np.float32)
maxX = 3.6
maxY = 4.2
nbinsX = 20
nbinsY = 30
nbinsT = 40
myPMFT = freud.pmft.PMFTXYT(maxX, maxY, nbinsX, nbinsY, nbinsT)
with self.assertRaises(AttributeError):
myPMFT.PCF
with self.assertRaises(AttributeError):
myPMFT.bin_counts
with self.assertRaises(AttributeError):
myPMFT.box
with self.assertRaises(AttributeError):
myPMFT.PMFT
myPMFT.accumulate(box, points, angles, points, angles)
myPMFT.PCF
myPMFT.bin_counts
myPMFT.PMFT
myPMFT.box
npt.assert_equal(myPMFT.bin_counts.shape, (nbinsT, nbinsY, nbinsX))
npt.assert_equal(myPMFT.PCF.shape, (nbinsT, nbinsY, nbinsX))
npt.assert_equal(myPMFT.PMFT.shape, (nbinsT, nbinsY, nbinsX))
myPMFT.reset()
with self.assertRaises(AttributeError):
myPMFT.PCF
with self.assertRaises(AttributeError):
myPMFT.bin_counts
with self.assertRaises(AttributeError):
myPMFT.box
with self.assertRaises(AttributeError):
myPMFT.PMFT
myPMFT.compute(box, points, angles, points, angles)
myPMFT.PCF
myPMFT.bin_counts
myPMFT.PMFT
myPMFT.box
def test_two_particles(self):
boxSize = 16.0
box = freud.box.Box.square(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.1, 0.0]],
dtype=np.float32)
angles = np.array([0.0, np.pi/2], dtype=np.float32)
maxX = 3.6
maxY = 4.2
nbinsX = 20
nbinsY = 30
nbinsT = 40
dx = (2.0 * maxX / float(nbinsX))
dy = (2.0 * maxY / float(nbinsY))
dT = (2.0 * np.pi / float(nbinsT))
# calculation for array idxs
def get_bin(point_i, point_j, angle_i, angle_j):
delta_x = point_j - point_i
rot_mat = np.array([[np.cos(-angle_i), -np.sin(-angle_i)],
[np.sin(-angle_i), np.cos(-angle_i)]])
rot_delta_x = np.matmul(rot_mat, delta_x[:2])
xy_bins = np.floor((rot_delta_x + [maxX, maxY]) /
[dx, dy]).astype(np.int32)
angle_bin = np.floor(
((angle_j - np.arctan2(-delta_x[1], -delta_x[0])) %
(2. * np.pi)) / dT).astype(np.int32)
return [xy_bins[0], xy_bins[1], angle_bin]
correct_bin_counts = np.zeros(shape=(nbinsT, nbinsY, nbinsX),
dtype=np.int32)
bins = get_bin(points[0], points[1], angles[0], angles[1])
correct_bin_counts[bins[2], bins[1], bins[0]] = 1
bins = get_bin(points[1], points[0], angles[1], angles[0])
correct_bin_counts[bins[2], bins[1], bins[0]] = 1
absoluteTolerance = 0.1
myPMFT = freud.pmft.PMFTXYT(maxX, maxY, nbinsX, nbinsY, nbinsT)
myPMFT.accumulate(box, points, angles, points, angles)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
myPMFT.reset()
myPMFT.compute(box, points, angles, points, angles)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
myPMFT.compute(box, points, angles)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
def test_repr(self):
maxX = 3.0
maxY = 4.0
nbinsX = 20
nbinsY = 30
nbinsT = 40
myPMFT = freud.pmft.PMFTXYT(maxX, maxY, nbinsX, nbinsY, nbinsT)
self.assertEqual(str(myPMFT), str(eval(repr(myPMFT))))
def test_ref_points_ne_points(self):
x_max = 2.5
y_max = 2.5
n_x = 10
n_y = 10
n_t = 4
pmft = freud.pmft.PMFTXYT(x_max, y_max, n_x, n_y, n_t)
lattice_size = 10
box = freud.box.Box.square(lattice_size*5)
ref_points, points = util.make_alternating_lattice(
lattice_size, 0.01, 2)
ref_orientations = np.array([0]*len(ref_points))
orientations = np.array([0]*len(points))
pmft.compute(box, ref_points, ref_orientations, points, orientations)
# when rotated slightly, for each ref point, each quadrant
# (corresponding to two consecutive bins) should contain 3 points.
for i in range(n_t):
self.assertEqual(np.count_nonzero(np.isinf(pmft.PMFT[i]) == 0), 3)
self.assertEqual(len(np.unique(pmft.PMFT)), 2)
class TestPMFTXY2D(unittest.TestCase):
def test_box(self):
boxSize = 16.0
box = freud.box.Box.square(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0]],
dtype=np.float32)
angles = np.array([0.0, 0.0], dtype=np.float32)
maxX = 3.6
maxY = 4.2
nbinsX = 100
nbinsY = 110
myPMFT = freud.pmft.PMFTXY2D(maxX, maxY, nbinsX, nbinsY)
myPMFT.accumulate(box, points, angles, points, angles)
npt.assert_equal(myPMFT.box, freud.box.Box.square(boxSize))
# Ensure expected errors are raised
box = freud.box.Box.cube(boxSize)
with self.assertRaises(ValueError):
myPMFT.accumulate(box, points, angles, points, angles)
def test_r_cut(self):
maxX = 3.6
maxY = 4.2
nbinsX = 100
nbinsY = 110
myPMFT = freud.pmft.PMFTXY2D(maxX, maxY, nbinsX, nbinsY)
npt.assert_allclose(myPMFT.r_cut,
np.linalg.norm([maxX, maxY]), atol=1e-6)
def test_bins(self):
maxX = 3.6
maxY = 4.2
nbinsX = 20
nbinsY = 30
dx = (2.0 * maxX / float(nbinsX))
dy = (2.0 * maxY / float(nbinsY))
# make sure the center for each bin is generated correctly
listX = np.zeros(nbinsX, dtype=np.float32)
listY = np.zeros(nbinsY, dtype=np.float32)
for i in range(nbinsX):
x = float(i) * dx
nextX = float(i + 1) * dx
listX[i] = -maxX + ((x + nextX) / 2.0)
for i in range(nbinsY):
y = float(i) * dy
nextY = float(i + 1) * dy
listY[i] = -maxY + ((y + nextY) / 2.0)
myPMFT = freud.pmft.PMFTXY2D(maxX, maxY, nbinsX, nbinsY)
# Compare expected bins to the info from pmft
npt.assert_allclose(myPMFT.X, listX, atol=1e-3)
npt.assert_allclose(myPMFT.Y, listY, atol=1e-3)
npt.assert_equal(nbinsX, myPMFT.n_bins_X)
npt.assert_equal(nbinsY, myPMFT.n_bins_Y)
npt.assert_allclose(myPMFT.jacobian, dx*dy)
def test_attribute_access(self):
boxSize = 16.0
box = freud.box.Box.square(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0]],
dtype=np.float32)
angles = np.array([0.0, 0.0], dtype=np.float32)
maxX = 3.6
maxY = 4.2
nbinsX = 100
nbinsY = 110
myPMFT = freud.pmft.PMFTXY2D(maxX, maxY, nbinsX, nbinsY)
with self.assertRaises(AttributeError):
myPMFT.PCF
with self.assertRaises(AttributeError):
myPMFT.bin_counts
with self.assertRaises(AttributeError):
myPMFT.box
with self.assertRaises(AttributeError):
myPMFT.PMFT
myPMFT.accumulate(box, points, angles, points, angles)
myPMFT.PCF
myPMFT.bin_counts
myPMFT.PMFT
myPMFT.box
npt.assert_equal(myPMFT.bin_counts.shape, (nbinsY, nbinsX))
npt.assert_equal(myPMFT.PCF.shape, (nbinsY, nbinsX))
npt.assert_equal(myPMFT.PMFT.shape, (nbinsY, nbinsX))
myPMFT.reset()
with self.assertRaises(AttributeError):
myPMFT.PCF
with self.assertRaises(AttributeError):
myPMFT.bin_counts
with self.assertRaises(AttributeError):
myPMFT.box
with self.assertRaises(AttributeError):
myPMFT.PMFT
myPMFT.compute(box, points, angles, points, angles)
myPMFT.PCF
myPMFT.bin_counts
myPMFT.PMFT
myPMFT.box
def test_two_particles(self):
boxSize = 16.0
box = freud.box.Box.square(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0]],
dtype=np.float32)
angles = np.array([0.0, 0.0], dtype=np.float32)
maxX = 3.6
maxY = 4.2
nbinsX = 100
nbinsY = 110
dx = (2.0 * maxX / float(nbinsX))
dy = (2.0 * maxY / float(nbinsY))
correct_bin_counts = np.zeros(shape=(nbinsY, nbinsX), dtype=np.int32)
# calculation for array idxs
def get_bin(point_i, point_j):
return np.floor((point_i - point_j + [maxX, maxY, 0]) /
[dx, dy, 1]).astype(np.int32)[:2]
bins = get_bin(points[0], points[1])
correct_bin_counts[bins[1], bins[0]] = 1
bins = get_bin(points[1], points[0])
correct_bin_counts[bins[1], bins[0]] = 1
absoluteTolerance = 0.1
myPMFT = freud.pmft.PMFTXY2D(maxX, maxY, nbinsX, nbinsY)
myPMFT.accumulate(box, points, angles, points, angles)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
myPMFT.reset()
myPMFT.compute(box, points, angles, points, angles)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
myPMFT.compute(box, points, angles)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
def test_repr(self):
maxX = 3.0
maxY = 4.0
nbinsX = 100
nbinsY = 110
myPMFT = freud.pmft.PMFTXY2D(maxX, maxY, nbinsX, nbinsY)
self.assertEqual(str(myPMFT), str(eval(repr(myPMFT))))
def test_repr_png(self):
boxSize = 16.0
box = freud.box.Box.square(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0]],
dtype=np.float32)
angles = np.array([0.0, 0.0], dtype=np.float32)
maxX = 3.6
maxY = 4.2
nbinsX = 100
nbinsY = 110
myPMFT = freud.pmft.PMFTXY2D(maxX, maxY, nbinsX, nbinsY)
with self.assertRaises(AttributeError):
myPMFT.plot()
self.assertEqual(myPMFT._repr_png_(), None)
myPMFT.accumulate(box, points, angles, points, angles)
myPMFT._repr_png_()
def test_ref_points_ne_points(self):
x_max = 2.5
y_max = 2.5
n_x = 20
n_y = 20
pmft = freud.pmft.PMFTXY2D(x_max, y_max, n_x, n_y)
lattice_size = 10
box = freud.box.Box.square(lattice_size*5)
ref_points, points = util.make_alternating_lattice(
lattice_size, 0.01, 2)
ref_orientations = np.array([0]*len(ref_points))
orientations = np.array([0]*len(points))
pmft.compute(box, ref_points, ref_orientations, points, orientations)
self.assertEqual(np.count_nonzero(np.isinf(pmft.PMFT) == 0), 12)
self.assertEqual(len(np.unique(pmft.PMFT)), 2)
class TestPMFTXYZ(unittest.TestCase):
def test_box(self):
boxSize = 25.0
box = freud.box.Box.cube(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0]],
dtype=np.float32)
orientations = np.array([[1, 0, 0, 0], [1, 0, 0, 0]], dtype=np.float32)
maxX = 5.23
maxY = 6.23
maxZ = 7.23
nbinsX = 100
nbinsY = 110
nbinsZ = 120
myPMFT = freud.pmft.PMFTXYZ(maxX, maxY, maxZ, nbinsX, nbinsY, nbinsZ)
myPMFT.accumulate(box, points, orientations, points, orientations)
npt.assert_equal(myPMFT.box, freud.box.Box.cube(boxSize))
# Ensure expected errors are raised
box = freud.box.Box.square(boxSize)
with self.assertRaises(ValueError):
myPMFT.accumulate(box, points, orientations, points, orientations)
def test_r_cut(self):
maxX = 5.23
maxY = 6.23
maxZ = 7.23
nbinsX = 100
nbinsY = 110
nbinsZ = 120
myPMFT = freud.pmft.PMFTXYZ(maxX, maxY, maxZ, nbinsX, nbinsY, nbinsZ)
r_cut = np.linalg.norm([maxX, maxY, maxZ])
npt.assert_allclose(myPMFT.r_cut, r_cut, atol=1e-6)
def test_bins(self):
maxX = 5.23
maxY = 6.23
maxZ = 7.23
nbinsX = 100
nbinsY = 110
nbinsZ = 120
dx = (2.0 * maxX / float(nbinsX))
dy = (2.0 * maxY / float(nbinsY))
dz = (2.0 * maxZ / float(nbinsZ))
listX = np.zeros(nbinsX, dtype=np.float32)
listY = np.zeros(nbinsY, dtype=np.float32)
listZ = np.zeros(nbinsZ, dtype=np.float32)
for i in range(nbinsX):
x = float(i) * dx
nextX = float(i + 1) * dx
listX[i] = -maxX + ((x + nextX) / 2.0)
for i in range(nbinsY):
y = float(i) * dy
nextY = float(i + 1) * dy
listY[i] = -maxY + ((y + nextY) / 2.0)
for i in range(nbinsZ):
z = float(i) * dz
nextZ = float(i + 1) * dz
listZ[i] = -maxZ + ((z + nextZ) / 2.0)
myPMFT = freud.pmft.PMFTXYZ(maxX, maxY, maxZ, nbinsX, nbinsY, nbinsZ)
# Compare expected bins to the info from pmft
npt.assert_allclose(myPMFT.X, listX, atol=1e-3)
npt.assert_allclose(myPMFT.Y, listY, atol=1e-3)
npt.assert_allclose(myPMFT.Z, listZ, atol=1e-3)
npt.assert_equal(nbinsX, myPMFT.n_bins_X)
npt.assert_equal(nbinsY, myPMFT.n_bins_Y)
npt.assert_equal(nbinsZ, myPMFT.n_bins_Z)
npt.assert_allclose(myPMFT.jacobian, dx*dy*dz)
def test_attribute_access(self):
boxSize = 25.0
box = freud.box.Box.cube(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0]],
dtype=np.float32)
orientations = np.array([[1, 0, 0, 0], [1, 0, 0, 0]], dtype=np.float32)
maxX = 5.23
maxY = 6.23
maxZ = 7.23
nbinsX = 100
nbinsY = 110
nbinsZ = 120
myPMFT = freud.pmft.PMFTXYZ(maxX, maxY, maxZ, nbinsX, nbinsY, nbinsZ)
with self.assertRaises(AttributeError):
myPMFT.PCF
with self.assertRaises(AttributeError):
myPMFT.bin_counts
with self.assertRaises(AttributeError):
myPMFT.box
with self.assertRaises(AttributeError):
myPMFT.PMFT
myPMFT.accumulate(box, points, orientations, points, orientations)
myPMFT.PCF
myPMFT.bin_counts
myPMFT.PMFT
myPMFT.box
npt.assert_equal(myPMFT.bin_counts.shape, (nbinsZ, nbinsY, nbinsX))
npt.assert_equal(myPMFT.PCF.shape, (nbinsZ, nbinsY, nbinsX))
npt.assert_equal(myPMFT.PMFT.shape, (nbinsZ, nbinsY, nbinsX))
myPMFT.reset()
with self.assertRaises(AttributeError):
myPMFT.PCF
with self.assertRaises(AttributeError):
myPMFT.bin_counts
with self.assertRaises(AttributeError):
myPMFT.box
with self.assertRaises(AttributeError):
myPMFT.PMFT
myPMFT.compute(box, points, orientations, points, orientations)
myPMFT.PCF
myPMFT.bin_counts
myPMFT.PMFT
myPMFT.box
def test_two_particles(self):
boxSize = 25.0
box = freud.box.Box.cube(boxSize)
points = np.array([[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0]],
dtype=np.float32)
orientations = np.array([[1, 0, 0, 0], [1, 0, 0, 0]], dtype=np.float32)
maxX = 5.23
maxY = 6.23
maxZ = 7.23
nbinsX = 100
nbinsY = 110
nbinsZ = 120
dx = (2.0 * maxX / float(nbinsX))
dy = (2.0 * maxY / float(nbinsY))
dz = (2.0 * maxZ / float(nbinsZ))
correct_bin_counts = np.zeros(shape=(nbinsZ, nbinsY, nbinsX),
dtype=np.int32)
# calculation for array idxs
def get_bin(point_i, point_j):
return np.floor((point_i - point_j + [maxX, maxY, maxZ]) /
[dx, dy, dz]).astype(np.int32)
bins = get_bin(points[0], points[1])
correct_bin_counts[bins[2], bins[1], bins[0]] = 1
bins = get_bin(points[1], points[0])
correct_bin_counts[bins[2], bins[1], bins[0]] = 1
absoluteTolerance = 0.1
myPMFT = freud.pmft.PMFTXYZ(maxX, maxY, maxZ, nbinsX, nbinsY, nbinsZ)
myPMFT.accumulate(box, points, orientations, points, orientations)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
myPMFT.reset()
myPMFT.compute(box, points, orientations, points, orientations)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
# Test face orientations, shape (N_faces, 4)
face_orientations = np.array([[1., 0., 0., 0.]])
myPMFT.compute(box, points, orientations,
face_orientations=face_orientations)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
# Test face orientations, shape (1, N_faces, 4)
face_orientations = np.array([[[1., 0., 0., 0.]]])
myPMFT.compute(box, points, orientations,
face_orientations=face_orientations)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
# Test face orientations, shape (N_particles, N_faces, 4)
face_orientations = np.array([[[1., 0., 0., 0.]], [[1., 0., 0., 0.]]])
myPMFT.compute(box, points, orientations,
face_orientations=face_orientations)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
myPMFT.compute(box, points, orientations)
npt.assert_allclose(myPMFT.bin_counts, correct_bin_counts,
atol=absoluteTolerance)
def test_shift_two_particles_dead_pixel(self):
points = np.array([[1, 1, 1], [0, 0, 0]], dtype=np.float32)
orientations = np.array([[1, 0, 0, 0], [1, 0, 0, 0]], dtype=np.float32)
noshift = freud.pmft.PMFTXYZ(0.5, 0.5, 0.5, 3, 3, 3,
shiftvec=[0, 0, 0])
shift = freud.pmft.PMFTXYZ(0.5, 0.5, 0.5, 3, 3, 3,
shiftvec=[1, 1, 1])
for pm in [noshift, shift]:
pm.compute(freud.box.Box.cube(3), points, orientations,
points, orientations, face_orientations=None)
# Ignore warnings about NaNs
warnings.simplefilter("ignore", category=RuntimeWarning)
# Non-shifted pmft should have no non-inf valued voxels,
# since the other point is outside the x/y/z max
infcheck_noshift = np.isfinite(noshift.PMFT).sum()
# Shifted pmft should have one non-inf valued voxel
infcheck_shift = np.isfinite(shift.PMFT).sum()
npt.assert_equal(infcheck_noshift, 0)
npt.assert_equal(infcheck_shift, 1)
def test_repr(self):
maxX = 5.23
maxY = 6.23
maxZ = 7.23
nbinsX = 100
nbinsY = 110
nbinsZ = 120
myPMFT = freud.pmft.PMFTXYZ(maxX, maxY, maxZ, nbinsX, nbinsY, nbinsZ)
self.assertEqual(str(myPMFT), str(eval(repr(myPMFT))))
if __name__ == '__main__':
unittest.main()
| 35.523697
| 79
| 0.565439
| 3,879
| 29,982
| 4.267337
| 0.058005
| 0.01764
| 0.016674
| 0.012082
| 0.879055
| 0.875128
| 0.857911
| 0.838035
| 0.796653
| 0.784148
| 0
| 0.051453
| 0.313521
| 29,982
| 843
| 80
| 35.565836
| 0.752794
| 0.034554
| 0
| 0.806452
| 0
| 0
| 0.001729
| 0
| 0
| 0
| 0
| 0
| 0.162757
| 1
| 0.048387
| false
| 0
| 0.008798
| 0.002933
| 0.068915
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
63f7a76882e89085791588eb7b92c91db3ba5402
| 1,488
|
py
|
Python
|
data/dataset/NNtest.py
|
NEUSoftGreenAI/NeurstrucEnergy
|
94c5c2f4796382f37e0f2f77a4f6484c0e5f2260
|
[
"MIT"
] | null | null | null |
data/dataset/NNtest.py
|
NEUSoftGreenAI/NeurstrucEnergy
|
94c5c2f4796382f37e0f2f77a4f6484c0e5f2260
|
[
"MIT"
] | null | null | null |
data/dataset/NNtest.py
|
NEUSoftGreenAI/NeurstrucEnergy
|
94c5c2f4796382f37e0f2f77a4f6484c0e5f2260
|
[
"MIT"
] | null | null | null |
import time
import torch
import torch.nn as nn
class NN_chain(nn.Module):
def __init__(self):
super(NN_chain,self).__init__()
self.conv_list = [nn.Conv2d(64,64,1,1,0) for i in range(51)]
self.x_list = [0 for i in range(51)]
def forward(self,x):
self.x_list[0] = self.conv_list[0](x)
for i in range(1,51):
self.x_list[i] = self.conv_list[i](self.x_list[i-1])
return x
class NN_DAG(nn.Module):
def __init__(self):
super(NN_DAG,self).__init__()
self.conv_list = [nn.Conv2d(64,64,1,1,0) for i in range(51)]
self.x_list = [0 for i in range(51)]
def forward(self,x):
self.x_list[0] = self.conv_list[0](x)
for i in range(1,51):
self.x_list[i] = self.conv_list[i](self.x_list[0])
return x
inp = torch.rand(1,64,28,28)
NNchain = NN_chain()
NNDAG = NN_DAG()
mean_time = 0
for i in range(1000):
start = time.time()
otp = NNchain(inp)
mean_time += time.time()-start
print('NNchain',mean_time/1000)
mean_time = 0
for i in range(1000):
start = time.time()
otp = NNDAG(inp)
mean_time += time.time()-start
print('NNDAG',mean_time/1000)
mean_time = 0
for i in range(1000):
start = time.time()
otp = NNchain(inp)
mean_time += time.time()-start
print('NNchain',mean_time/1000)
mean_time = 0
for i in range(1000):
start = time.time()
otp = NNDAG(inp)
mean_time += time.time()-start
print('NNDAG',mean_time/1000)
| 27.054545
| 68
| 0.612903
| 259
| 1,488
| 3.335907
| 0.142857
| 0.111111
| 0.069444
| 0.127315
| 0.851852
| 0.851852
| 0.851852
| 0.791667
| 0.791667
| 0.791667
| 0
| 0.07099
| 0.233199
| 1,488
| 54
| 69
| 27.555556
| 0.68624
| 0
| 0
| 0.76
| 0
| 0
| 0.016129
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.06
| 0
| 0.22
| 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
126c4d43a033c03a3494b33575fca58d12daf23b
| 68,615
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/ratio_based_results/cmp_radiosity/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/ratio_based_results/cmp_radiosity/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_ml/ratio_based_results/cmp_radiosity/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.100564,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.281676,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.559272,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.421662,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.730166,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.418771,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.5706,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.331051,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.521,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.105659,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0152856,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.147525,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.113046,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.253184,
'Execution Unit/Register Files/Runtime Dynamic': 0.128332,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.384032,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.895616,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.2816,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00312827,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00312827,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00274298,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00107184,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00162392,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0106234,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0293411,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.108674,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.405788,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.369107,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 0.923533,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00386787,
'L2/Runtime Dynamic': 0.00103178,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.6015,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.61705,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.108846,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.108846,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.11759,
'Load Store Unit/Runtime Dynamic': 2.26268,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.268395,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.53679,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0952543,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0953077,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0665359,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.72317,
'Memory Management Unit/Runtime Dynamic': 0.161844,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 25.8961,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.368618,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0259972,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.21024,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.604856,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 7.23555,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0539097,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.245032,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.296981,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.189692,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.305966,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.154441,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.6501,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.171421,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.67823,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.056106,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00795654,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0774806,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0588435,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.133587,
'Execution Unit/Register Files/Runtime Dynamic': 0.0668,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.17671,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.41254,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.77985,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00178146,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00178146,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00160682,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000652198,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000845291,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00601503,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0151095,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0565677,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.5982,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.210191,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.192129,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.99134,
'Instruction Fetch Unit/Runtime Dynamic': 0.480013,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00220876,
'L2/Runtime Dynamic': 0.000597741,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.99759,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.846233,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0569554,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0569553,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.26655,
'Load Store Unit/Runtime Dynamic': 1.18407,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.140442,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.280884,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0498434,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0498737,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.223723,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0344659,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.465454,
'Memory Management Unit/Runtime Dynamic': 0.0843396,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.9932,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.147589,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0103545,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0929388,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.250882,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.77975,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.055987,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.246663,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.305578,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.190602,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.307434,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.155183,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.653219,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.171144,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.69434,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0577302,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00799471,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.078641,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0591258,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.136371,
'Execution Unit/Register Files/Runtime Dynamic': 0.0671205,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.179674,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.417983,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.79036,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00176798,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00176798,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00159381,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000646467,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000849347,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00597913,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0150256,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0568392,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.61546,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.210303,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.193051,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.00944,
'Instruction Fetch Unit/Runtime Dynamic': 0.481198,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00223929,
'L2/Runtime Dynamic': 0.000677326,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.01441,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.854451,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0574994,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0574995,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.28593,
'Load Store Unit/Runtime Dynamic': 1.19552,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.141784,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.283568,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0503195,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0503506,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.224796,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0344834,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.467345,
'Memory Management Unit/Runtime Dynamic': 0.0848339,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 18.0488,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.151862,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0104476,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0932938,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.255604,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.80819,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0532824,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.244539,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.293456,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.187407,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.30228,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.152581,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.642267,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.169347,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.66803,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0554402,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00786067,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0765581,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0581345,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.131998,
'Execution Unit/Register Files/Runtime Dynamic': 0.0659952,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.17461,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.407492,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.76567,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00176393,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00176393,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00159165,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000646388,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000835107,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00595461,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0149374,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0558862,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.55484,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.207332,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.189815,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.94588,
'Instruction Fetch Unit/Runtime Dynamic': 0.473925,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.00264135,
'L2/Runtime Dynamic': 0.000805662,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.97464,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.835484,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0562129,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0562129,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.24009,
'Load Store Unit/Runtime Dynamic': 1.16892,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.138611,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.277223,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0491936,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0492294,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.221027,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0340002,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.461642,
'Memory Management Unit/Runtime Dynamic': 0.0832295,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.9078,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.145838,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0102301,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0918155,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.247884,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.74043,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 0.2319471136709964,
'Runtime Dynamic': 0.2319471136709964,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.0142455,
'Runtime Dynamic': 0.00857813,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 79.8601,
'Peak Power': 112.972,
'Runtime Dynamic': 18.5725,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 79.8458,
'Total Cores/Runtime Dynamic': 18.5639,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.0142455,
'Total L3s/Runtime Dynamic': 0.00857813,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.071116
| 124
| 0.68211
| 8,082
| 68,615
| 5.785078
| 0.067558
| 0.123538
| 0.112929
| 0.093423
| 0.93913
| 0.931259
| 0.91787
| 0.887092
| 0.86286
| 0.842306
| 0
| 0.132046
| 0.22431
| 68,615
| 914
| 125
| 75.071116
| 0.746411
| 0
| 0
| 0.642232
| 0
| 0
| 0.657354
| 0.048094
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d62e275bf1bd8318cc2e3ba425ab14735f9c9d93
| 152,816
|
py
|
Python
|
testcenter_singlethread/subdialog/Ui_testcommandillustration.py
|
kelakty/Testcenter
|
a82e3b6b5733f94b789c6e2e83ed6c159b21b2a0
|
[
"MIT"
] | null | null | null |
testcenter_singlethread/subdialog/Ui_testcommandillustration.py
|
kelakty/Testcenter
|
a82e3b6b5733f94b789c6e2e83ed6c159b21b2a0
|
[
"MIT"
] | null | null | null |
testcenter_singlethread/subdialog/Ui_testcommandillustration.py
|
kelakty/Testcenter
|
a82e3b6b5733f94b789c6e2e83ed6c159b21b2a0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'd:\testcenter20190116备份(增加qtextedit中进行发送命令)\testcenter20190102备份\testcenter\testcenter_pyqt5\subdialog\testcommandillustration.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_TestCommandIllustration(object):
def setupUi(self, TestCommandIllustration):
TestCommandIllustration.setObjectName("TestCommandIllustration")
TestCommandIllustration.resize(675, 541)
self.textBrowser_testcommand = QtWidgets.QTextBrowser(TestCommandIllustration)
self.textBrowser_testcommand.setGeometry(QtCore.QRect(0, 40, 671, 501))
self.textBrowser_testcommand.setObjectName("textBrowser_testcommand")
self.lineEdit_searchexpression = QtWidgets.QLineEdit(TestCommandIllustration)
self.lineEdit_searchexpression.setGeometry(QtCore.QRect(10, 10, 311, 20))
self.lineEdit_searchexpression.setStatusTip("")
self.lineEdit_searchexpression.setAccessibleName("")
self.lineEdit_searchexpression.setInputMask("")
self.lineEdit_searchexpression.setText("")
self.lineEdit_searchexpression.setPlaceholderText("")
self.lineEdit_searchexpression.setObjectName("lineEdit_searchexpression")
self.pushButton_search = QtWidgets.QPushButton(TestCommandIllustration)
self.pushButton_search.setGeometry(QtCore.QRect(330, 10, 75, 23))
self.pushButton_search.setObjectName("pushButton_search")
self.radioButton = QtWidgets.QRadioButton(TestCommandIllustration)
self.radioButton.setGeometry(QtCore.QRect(480, 10, 89, 21))
self.radioButton.setObjectName("radioButton")
self.radioButton_2 = QtWidgets.QRadioButton(TestCommandIllustration)
self.radioButton_2.setGeometry(QtCore.QRect(560, 10, 89, 21))
self.radioButton_2.setChecked(True)
self.radioButton_2.setObjectName("radioButton_2")
self.label_searchcount = QtWidgets.QLabel(TestCommandIllustration)
self.label_searchcount.setGeometry(QtCore.QRect(410, 10, 71, 21))
self.label_searchcount.setText("")
self.label_searchcount.setObjectName("label_searchcount")
self.retranslateUi(TestCommandIllustration)
QtCore.QMetaObject.connectSlotsByName(TestCommandIllustration)
def retranslateUi(self, TestCommandIllustration):
_translate = QtCore.QCoreApplication.translate
TestCommandIllustration.setWindowTitle(_translate("TestCommandIllustration", "Form"))
self.textBrowser_testcommand.setHtml(_translate("TestCommandIllustration", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'SimSun\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">---------------------------------------------------------------------</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">锐捷网络股份有限公司 园区与城域网交换机事业部 研发1部 冯久奎 整理笔记</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">【声明:请遵循公司信息安全规定!未经同意,请勿复制或转发本文档内容】</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">交换机调试工具SecureCRT 新建一个连接时需要选择串口serial才能添加com </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">使用前必须点击会话选项--串行选项--流控,关闭RTS/CTS选项!!!</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">交换机软件主程序位置Y:\\Design Document\\BAN\\SWITCH\\S6510-4C\\Software</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">交换机自动保存会话: 文件--会话日志 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">注意关闭覆盖文件: 选项-会话选项-日志文件- 选追加到文件</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">EEPROM的烧写:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">一般</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">下载程序:使用Tftpd32程序 设置程序安装bin文件的目录,切记目录必须为中文路径否则无效。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">设置完后用将计算机网口通过网线与交换机MGMT口连接。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">将串口线与电脑串口连接,打开secureCRT软件,设置serial串口通信模式。设置通信波特率(115200或者9600)及端口。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">打开交换机,按ctrl+c 进入启动菜单界面,</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">选4.scattered utilities 3.advanced settings 4.format flash filesystem </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">选0.Tftp utilities,1.upgrade kernel and rootfs by install package---输入与本机同一网段的任意IP地址--输入本机IP地址。输入需要下载的程序bin文件。此时Tftpd32软件会自动传送并安装。 如果一直不会自动传送则重启Tftpd32软件。</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">注意启动程序和主程序所用的串口波特率可能不一样。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">修改主程序中串口波特率命令:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">line 0</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">speed 115200(将这个波特率设置成起机时能够正常打印的那个波特率)</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">end</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">wr 写入</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">2.run main 运行主程序后,输入en 可进入命令行界面</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">输入con 进入配置命令行</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">DOA时效 清除上电时间 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">config</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">uptime clear</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">end</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示历史开机</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show uptime history</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示端口状态:show int sta = show interface sta</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">重启: reload</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看接口设置和统计信息。 show interface </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show interface ? 可以查看有哪些速率的端口可设置</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">配置端口速率模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">config</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int range tfg3/1-24</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">port speed 10G</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show int tfgigabitethernet 3/1 </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">config</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int range tfg4/1-24</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">port speed 10G</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show int tfgigabitethernet 4/1 </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">配置端口速率</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int rang hundredgigabitethernet 1/1-8 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">speed 40g</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int rang hundredgigabitethernet 2/1-8 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">speed 40g</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看端口速率</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show int hundredgigabitethernet 1/1 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show int hundredgigabitethernet 2/1</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示软件信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show ver detail </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">cpld显示信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show cpld version </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">12.X:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show cpld-fpga version</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">cpld在线升级,拷贝rpm格式文件到u盘根目录,在en下执行下列命令</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">upgrade usb0:/s6k_firmware-cpld-1.0.0.44f1786-1802021940.mips.rpm</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">如果主程序没有firmware_driver.ko 文件的,需要拷贝firmware_driver.ko firmware_upgrade 到sbin目录下 拷贝s57h_cpld_1_cpld_v0.vme 到根目录下。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">命令为:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">run-system-shell</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ls 显示当前目录</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">cd /mnt/usb0 进入当前usb0 文件夹目录下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">cd .. 返回上一级目录</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">cp /mnt/usb0/firmware_driver.ko /sbin 复制usb0中firware_driver.ko 文件到sbin目录下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">cp /mnt/usb0/firmware_upgrade /sbin 复制usb0中firmware_upgrade文件到sbin目录下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">cp /mnt/usb0/s57h_cpld_1_cpld_v0.vme / 复制usb0中s57h_cpld_1_cpld_v0.vme文件到根目录下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">insmod /sbin/firmware_driver.ko</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">./sbin/firmware_upgrade s57h_cpld_1_cpld_v0.vme cpld 0 cpld</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">测试cpld在线升级通路:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">run-system-shell下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">按上面操作方式将文件复制到/sbin目录下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">之后输入加载insmod /sbin/firmware_driver.ko</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">运行sbin目录下的firmware_upgrade,命令: ./sbin/firmware_upgrade cpld test</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">通过运行结果判断cpu与cpld通讯管脚是否正常,提示FAIL表示通讯存在异常问题,提示PASS表示通讯正常。</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">另一升级方式(不兼容,不可加载insmod /sbin/firmware_driver.ko,否则需要使用rmmod /sbin/firmware_driver.ko将模块卸载)</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">到run-system-shell下/sbin/里查看是否有firmware_driver.ko、firmware_upgrade </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">/ # cd sbin</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">/sbin # ls -l firm*</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">-rw-r--r-- 1 guest guest 651944 Aug 24 16:50 firmware_driver.ko</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">-rwxr-xr-x 1 guest guest 167700 Aug 24 16:50 firmware_upgrade</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">在sbin里,调用firmware_upgrde升级cpld。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> ./firmware_upgrade /data/S6120_20XS4VS2QXS_0730_for_1.0B_header.vme cpld 1 cpld </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">此时不会自动打印信息。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">若想看调试信息,请在根目录下创建文件,命令:touch .firmware_upgrade_debug,再通过vi .firmware_upgrade_debug命令写入内容,内容为3。</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">touch /.firmware_upgrade_debug</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">echo 3 > /.firmware_upgrade_debug</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">打开log开关</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">强制风扇风速</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">fan mode defined speed-level 2</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">风扇有三种工作模式,分别在conf里面进行设置</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">1)配置风扇转速 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">config</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">N18K-X(config)#fan mode defined speed-level 1 --可以手动设置1~8种转速模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">N18K-X(config)#fan mode quiet --设置为静音模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">N18K-X(config)#fan mode normal --设为正常模式,这也是出厂的缺省模式</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">每种模式下,show fan detail 显示 信息是不一样的</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">进入uboot BootLoader</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">开机按ctrl+c进入菜单命令-按ctrl+q进入BootLoader命令--输入ubootui回车--按ctrl+P(3秒内) 进入ubootui命令行 输入mmc erase 0 0 0 e90000 //格式化EMMC</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">升级uboot</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">env set ipaddr 192.168.193.55 //交换机ip,需要使用和服务器同网段的无人占用的ip</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">env set serverip 192.168.193.234 //服务器ip,即开启tftp服务器设备的ip</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">env set ethact octrgmii0 //设置接口</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">tftp $(loadaddr) u-boot-spi.rom; 确认命令成功后再执行下一步操作</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sspi 1 24 005700;sf probe;sf update 0x20000000 0x0 0x1000000 //升级第一片FLASH</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sspi 1 24 005701;sf probe;sf update 0x20000000 0x0 0x1000000 //升级第二片FLASH</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">退出bootloader界面</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">simpleui -menu</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">uboot下删除配置文件,删除密码</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ctrl+C进入uboot</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ctrl+Q 后再输入ubootui</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">回车后按一次ctrl+p 3s内</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">输入setenv runlevel2 设置linux运行级别</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">输入run linux </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">输入在该模式下删除文件,需要输入sync进行同步配置</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">日志自动打印</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">config</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">log on 或者logging on 或者 logging console</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示日志</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show logging</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">关闭日志打印</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">no log on</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">usb复制文件</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">con </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">copy flash:/XX.txt usb0:/XXX.txt //从flash复制文件到usb0</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">部分机子是在en下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">copy flash:/XX.txt usb0</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">删除usb中文件</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">del usb0:/XXX.txt</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">移除usb设备</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">usb remove 0</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示usb文件信息 在en下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show usb </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">或show usb0 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">或者其他</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">dir usb0 </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">设置系统时间:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#clock set hh:mm:ss month day year //设置系统的日期和时钟</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#show clock //查看系统时间</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">clock update-caledar //用软件时钟来更新硬件时钟</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">保存配置:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#write</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">配置vlan </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r gi0/3-4</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r gi0/3,0/4</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 3</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show vlan </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">vlan配置完如果会一直自动变动的需要在con下输入</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">no zam</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">setmac mac物理地址为12位 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">序列号setsn为13位 </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示不同槽位板卡信息 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sh manu </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示风扇信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show fan</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示风扇速度</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show fan speed</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示温度</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show temp</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示电源信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show power</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">设备上的ID灯用于现场定位用,如果很多台设备不知道哪台是自己要找的,此时led enable 亮蓝色灯 </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">寄存器基线制作只需要与cpu相关的寄存器(nandflash,norflash,emmc,dram,pcie,usb)(FPGA,CPLD不需要)</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">寄存器基线测试</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">run-system-shell</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">insmod /sbin/dram_hwtest.ko</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">hw_test.bin reg_rd64 0x0001180000000000 后面为需要读的寄存器地址</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">exit退出shell命令行</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">光模块查询 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show interfaces tenGigabitEthernet 1/51 transceiver</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show interfaces transceiver alarm</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show interfaces te0/1 transceiver manuinfo </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看节点温度信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">入风口温度air_inlet / board1</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">出风口温度board / board2</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">MAC的节点温度 SDK下获取:MAX(MON_RESULT0..7)</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show temp</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">run-system-shell下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">debug-ssa进不去一直有一个exit退出</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">可以试一下:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">1) 按Ctrl+] 会进入telnet程序的配置模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">2) 输入set crlf on 回车,再回车</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">端口预加重测试</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">千兆端口默认没有预加重</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">run-system-shell</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">debug-ssa</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">输入ps 可查看端口状态信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy diag xe0 dsc</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">读预加重值</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">linkscan off</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy int xe0 0x1f 0x81f0;phy int xe0 0x12 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy int xe0 0x1f 0x8060;phy int xe0 0x17 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy int xe0 0x1f 0xffd0;phy int xe0 0x1e 0x0;phy int xe0 0x1f 0x82e0;phy int xe0 0x12 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy int xe1 0x1f 0x8070;phy int xe1 0x17 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy int xe1 0x1f 0xffd0;phy int xe1 0x1e 0x1;phy int xe1 0x1f 0x82e0;phy int xe1 0x12 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy int xe2 0x1f 0x8080;phy int xe2 0x17 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy int xe2 0x1f 0xffd0;phy int xe2 0x1e 0x2;phy int xe2 0x1f 0x82e0;phy int xe2 0x12 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy int xe3 0x1f 0x8090;phy int xe3 0x17 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy int xe3 0x1f 0xffd0;phy int xe3 0x1e 0x3;phy int xe3 0x1f 0x82e0;phy int xe3 0x12 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">linkscan on</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">PCIE的配置空间可能不能这样读</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ctrl+】 退出</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">exit 退出shell命令 </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看交换机被配置过的信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show run</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">连通性测试:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">连通性测试需要关闭LLDP功能。 //协议包关闭</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">config</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">no lldp enable</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">end</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">run-system-shell</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">debug-ssa 看管理板</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show c</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">debug-ssa 22 看FE卡 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show c </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">特殊时候需要关闭stp协议,通常默认已经是关闭的</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">关闭广播</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">先spanning-tree</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">然后在no spanning-tree</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">有时还需关闭</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">no zam</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">关闭广播风暴:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">interface+具体接口</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">no storm-control unicast </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">no storm-control broadcast </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">打流流通性测试:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">端口自环</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ruijie#Confi</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ruijie(Config)# No span</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ruijie(Config)#No lldp enable</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">步骤3:打广播报文命令执行:在shell界面下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">modprobe cpsp_prot_dbg_ko.ko test_pkt=1 type=1 modid=27 port=6 vid=1 efg=1 prot_err=1</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">流跑起来端口灯就闪,之后show int sta、show int c r、clear c、show int c e这样跑一会再show一下,就结束了</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show int co rate 查看端口收发速率</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show int c s 查看端口收发计数</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show int c e 查看端口错包数</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">clear c 清空端口计数</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示光模块信息:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie# show intfaces +具体端口+transceiver // 显示指定接口光模块基本信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#show intfaces +具体端口+transceiver alarm //显示指定接口光模块当前故障,无故障显示none</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">删除文件:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie>en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#delete +文件名</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">重命名文件:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie>en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#rename +旧文件名+新文件名 //有些版本rename +flash:旧文件名+flash:新文件名</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">POE相关配置功能:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">交换机面板前的POE按钮代表端口点灯为POE状态灯或者交换指示灯</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">PD分级过程:在PD端芯片上,有个管脚上接Rclass电阻,PSE芯片发出18V左右的电压,根据I=V/R,PSE端根据采集到的电流,识别对端PD是什么等级的。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie> en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie# conf</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)#interface+ 具体接口</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if-range)# poe enable //打开POE</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if-range)# no poe enable //关闭POE</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if-range)#end //退回特权模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie# show poe powersupply //查看POE供电情况</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie# show poe interfaces //查看POE供电接口</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#show poe interfaces status 查看所有端口状态</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)# poe mode //poe模式,节能,静态,自动</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">进入端口下 show this可以查看poe最大功耗</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">控制台速率配置</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#configure terminal //进入全局配置模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)#line console 0 //进入控制台线路配置模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-line)#speed 9600 //设置控制台速率为9600</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-line)#end //回到特权模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#show line console 0 //查看控制台配置</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">重启</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#reload //直接重启命令</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#reload at hh:mm month day [year] //设置重启系统时间和日期</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie# show reload //确认修改重启时间生效</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#reload cancel //删除已设置的重启计划</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">简单IP配置、路由设置、连通性测试:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">配置Routed port</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if)#no switchport //将该接口shut down 并转换成三层模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if)#ip address +ip +子网掩码 //配置ip地址和子网掩码。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#ping +ip地址 //ping 指定ip</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">部分主程序在零配置下会启动ZAM功能,此功能影响主程序端口测试,设备每次重启需要将此功能进行关闭,具体关闭命令如下:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#configure </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)# no zam</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)# default int range tf 0/1-48</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)# default int range hu 0/49-56</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)# exit</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Chassis交换机特有命令,Box交换机不涉及:</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示网络设备当前的插槽和模块信息: </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#show version slots</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">强制上电、下电:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)#no power enable 1 module +槽位 //强制对应槽位下电。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)#power enable 1 module +槽位 //强制对应槽位下电。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#power off slot 1 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">power off slot fe3</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">power on slot fe3</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">reset module fe3</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">2.2.3、主从引擎切换: </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#redundancy forceswitch //强制主从引擎切换。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Proceed with switchover to standby PRE? [N/y]y</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">解除槽位冲突:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)#remove configuration module+具体槽位 /清除槽位信息</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">卸载和安装线卡:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)# no install slot+具体槽位 //在某个槽位卸载模块</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)# install slot+具体槽位 //在某个槽位安装指定的模块</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">电源冗余控制的相关命令</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)#power redundancy 1 enable 让一个电源冗余</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">no power redundancy 无冗余电源</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">打开从管理板串口:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">按键盘:@@@@+c</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">@@@@+i</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">esc esc esc +i </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">主程序强制升级boot、ctrl版本号:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie>en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie# debug support</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#(support)# upgrade force slot all</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看交换机底层背板hg接口状态</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">run-system-shell</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">debug-ssa 4 //板卡号</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ps hg </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">删除show int sta里面及show ver slot里保存的已经拔掉的板卡信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">remove configuration slot 2</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">断开VSU VSL链路</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switch convert mode standalone</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">将配置信息保存即使断电重启也不会消失</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">输入wr</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">poe使能必须先选中poe的端口 例如:int r mt0/1-12 之后输入</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">poe enable 或者 no poe enable</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示poe电源</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sh poe pow</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示poe端口poe信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show poe interfaces configuration </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">配置poe mode为静态时,alloc power分配的功率默认为0,需要配置功率才能供电。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">进入端口下poe alloc-power 30</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">退出生测模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">按四次esc + b 就可以重启</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">vlan配置</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/1,2/3</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 5</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/5,2/7</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 6</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/9,2/11</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 7</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/13,2/15</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 8</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/17,2/19</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 9</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/21,2/23</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 10</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/4,2/6</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 11</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/8,2/10</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 12</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/12,2/14</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 13</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/16,2/18</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 14</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/20,2/22</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 15</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int r te2/24,2/2</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switchport access vlan 16</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看mac芯片信息,机箱管理板查看</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">debug ssp enable slot all</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sd</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">cons on</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ps</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">线卡直接查看pcie通路</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">~ # lspci</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">正常会显示如下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">01:00.0 Ethernet controller: Broadcom Corporation: Unknown device b640 (rev 11)</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">交换机40G端口一分四为4个10G</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)#split int for 0/25 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)#no split int for 0/25 恢复</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">选择接口介质类型:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie# configure</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)# interface range接口ID </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if)#medium-type +fiber或copper //设置端口的介质类型</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if)#end</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">设置接口的双工模式:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie# configure</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)# interface range接口ID </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if)#duplex+auto或full或half //设置端口的双工模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if)#end</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">设置接口的流控模式:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie# configure</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)# interface range接口ID </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if)#flowcontrol+auto或on或off //设置设置接口的流控模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if)#end</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">显示接口信息:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#show interface status</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">重命名文件:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie>en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#rename +旧文件名+新文件名 //有些版本rename +flash:旧文件名+flash:新文件名</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">关闭mac地址学习</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">mac-address-learning disable</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">主程序界面下打开线卡的DEBUG命令:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ruijie# debug ssp enable slot all // BCM厂家芯片: </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> ruijie# debug ssp rem MARVERL厂家芯片:</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">DEBUG常用命令:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">debug-ssa</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sd 进入ngsa-cli</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.1、打开串口:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>console on</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.2、打开槽位或双MAC芯片: </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> ngsa-cli>su+unit具体槽位 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">//Chassis产品 unit<10 本地设备,Unit>=10 ,则十位数代表线卡号,个位数达标线卡上的unit号。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> ngsa-cli>su+unit (其中unit=0或1)//box产品或模块类型卡双MAC方案</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.3、显示各端口状态</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>ps</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.4、打开端口使能:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>port all en=1 //en=1表示打开,en=0表示关闭</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.5、自协商设置:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> ngsa-cli>port all an=0 //an=0表示自动协商关闭 an=1表示自动协商打开。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">千兆以上速率都没有自协商功能,底层SDK下看ps 端口状态自协商都为no</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">speed auto是软件功能</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">千兆口及以上的速率 需要两端相同速率才能up</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.6、配置端口速度:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> ngsa-cli>port all(或ge或fa或hg或具体端口号) sp=10(或100或1000或10000或12000或21000等)</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.7、光电口切换:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>combo+具体端口号+fiber或 copper+en=1</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"> //也可以在en=1前添加force_speed=具体速度 force_duplex=1 autoneg_enable=0 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.8、计数器清零:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>clear counters </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.9、查看计数器,判断收发帧是否异常:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>show counters </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.10、开启自环:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>port xe/ge/fe? lb=phy/mac //e/ge/fe表示万兆口/千兆口/百兆口;?代表端口号,</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.11、关闭自环:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>port xe/ge/fe? lb=none </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.12、发包测试,数值可以自选:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>tx 10000 pbm= xe/ge/xe? </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.13、读取外部Phy寄存器:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>phy xe/ge/xe? </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.14、读取内部Phy寄存器</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>phy int xe/ge/xe? ---</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.15、擦写:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>errerase</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.16、双工设置:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli> port+端口号+ fullduplex=false //表示关闭双工</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.17、流控设置:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli> port all TPAU=OFF RPAU=OFF //表示关闭所有口的流控</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.18、VLAN配置:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>vlan show //查看VLAN信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>vlan create +数字 //创建VLAN</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>vlan add 数字 PBM=端口号 UBM=端口号 //端口添加vlan</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli> pvlan set+地址+数字 //设置vlan</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli> pvlan //查看设置vlan情况</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli> vlan 数字 remove PBM=端口号 UBM=端口号 //删除VLAN</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.19、查看各端口计数信息:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>G grpkt //或者G rpkt</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>G gtpkt //或者G tpkt</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>G grfcs //或者G rfcs</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>G gtfcs //或者G tfcs</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>show c ed // 可以显示所有错误帧</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.20、CPU发包设置:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>tx 1 pbm=fe?/ge?/xe?</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.21、配置1000base-t测试(以5464为例)</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli>phy ge 0x00 0x0040</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli> phy ge 0x18 0xf067</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli> phy ge 0x09 0x3d00 //模式一测试</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli> phy ge 0x09 0x5d00 //模式二测试</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli> phy ge 0x09 0x9d00 //模式四测试</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3.22、MAC寄存器的读取和设置:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli> getreg +具体寄存器的名称 //读取</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-cli> setreg +具体寄存器的名称 //设置</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">通过MGMT口以及TFTP软件传送文件到交换机</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int mgmt0</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ip address 192.168.193.55/24 设置同一局域网段下任意IP</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">gateway 192.168.193.1 设置网关</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">end</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ping oob 192.168.193.234 ping一下看是否能与电脑ping通ip为电脑端IP</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">升级cpld</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">upgrade download oob_tftp://192.168.193.234/s6k_firmware-cpld-1.0.0.08fb9c8-1808241625.mips.rpm </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">关闭web-server</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)#no enable service web-server http</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)#no enable service web-server https</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">关闭vlan ip</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int vlan 1</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">no ip address</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">S6120端口预加重读取</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">run-system-shell</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">debug-ssa</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy diag xe dsc</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">MGMT口自环在生测--进入调试程序界面--输入shell--进入shell界面下 输入</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ethtools test phy_loopback mgmt00 1000 1000</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ethtools test phy_loopback mgmt00 100 1000</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">进行测试,结果pass则通过</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">配置端口为25G端口</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">port xe0-xe3 if=sfi sp=10000 an=0 配置未光模块模式SFI</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">port xe0-xe3 if=cr sp=10000 an=0 配置未铜缆模式CR</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">线卡格式化:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ubootui下敲mmc erase 0 e90000,然后reset</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">管理板和FE卡格式化:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">emmc erase 0 sec_erase</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">退出拷机程序:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">esc</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">更新生测程序:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">生产测试菜单--- 6. 进入调试程序界面---</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Factory> shell 进入shell界面</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ls -l /mnt/usb0</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">exit</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Factory> upgrade /mnt/usb0/"N18000-X_RGOS11.3(1)B6P17_CMXII-CB_install.bin" plug</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">factory get pkg /mnt/usb0/N18000-X_RGOS11.3(1)B6P17_CMXII-CB_install.bin</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">等全部升级完。exit到菜单,选择全局复位,整机重启。</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">更改接口类型</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">en</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">run-system-shell</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">debug-ssa </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">port xe0 speed=10000 an=off if=sfi/sr/gmii</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">未知功能</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">phy prbs xe0 set mode=hc p=3</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">选择字符|用来快速显示需要看的端口号</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config-if-range)#sh in st | include 7/19</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">GigabitEthernet 7/19 down routed Unknown Unknown copper</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show card voltage //查看板卡电压</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show power //查看电源信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show power version //查看电源版本信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show power priority //查看各板卡上电优先级</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">power priority slot 3 5 //设置线卡3上电优先级为5</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show fan //查看风扇信息</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show fan detail</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show fan version</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">删除机箱某槽位的线卡</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">config</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">remove config module 3</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">在生测升级模式下或者生测退出后再回到生测菜单项</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#run-system-shell</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">~ # switch_</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">switch_mode switch_root</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">主程序下打开从管理板串口</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">shift+@@@@c</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">关闭某个vlan</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie(config)#no vlan 101</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config-if)#duplex full 配置端口为全双工模式,可选full( 全双工),half( 半双式),auto( 自</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">适应)</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config-if)#shutdown 关闭该端口</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config-if)#no shutdown 开启该端口</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">聚合端口的创建</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config)# interface aggregateport 1 创建聚合接口AG1</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config-if)# switchport mode trunk 配置并保证AG1 为trunk 模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config)#int f0/23-24</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config-if-range)#port-group 1 将端口(端口组)划入聚合端口AG1 中</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">生成树</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config)#spanning-tree 开启生成树协议</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config)#spanning-tree mode stp 指定生成树类型为stp</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">可选模式stp , rstp , mstp</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config)#spanning-tree priority 4096 设置交换机的优先级为4096 , 优先级值小为高。优</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">先级可选值为0,4096,8192,,, ,为4096 的倍数。交换机默认值为32768</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">VLAN 的基本配置</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config)#vlan 10 创建VLAN10</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config-vlan)#name vlanname 命名VLAN 为vlanname</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config-if)#switchport access vlan 10 将该端口划入VLAN10 中</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">某端口的接口配置模式下进行</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config)#interface vlan 10 进入VLAN 10 的虚拟端口配置模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">(config-if)# ip address 192.168.1.1 255.255.255.0 为VLAN10 的虚拟端口配置IP 及掩码,</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">二层交换机只能配置一个IP,此IP 是作为管理IP 使用, 例如, 使用Telnet 的方式登录的IP</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">地址</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看交换机动态学习到的MAC 地址表</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show mac-address-table dynamic</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sdk下开关流控,pause帧</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">port xe TxPAUse=on RxPAUse=on</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看交换机的arp 表</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show arp</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">禁止端口号为135 的应用。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch (config-ext-nacl)# deny tcp any any eq 135</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">禁止协议为www 的应用。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch (config-ext-nacl)#deny udp any any eq www</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">允许所有行为。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch(config-ext-nacl)# permit ip any any</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">将ACL 应用到具体的接口上:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch (config)#interface range f 0/1</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">把名为ruijie 的ACL 应用到端口f 0/1 上。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch (config-if)#ip access-group ruijie in</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">从接口去除ACL 。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch (config-if)#no ip access-group ruijie in</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">删除ACL:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">删除名为ruijie 的ACL 。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch(config)#no Ip access-list exten ruijie</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">增加ACE 项后,是增加到ACL 最后,不可以中间插入,如果要调整ACE 的顺序,必须整</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">个删除ACL 后再重新配置。</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">配置交换机Telnet 功能</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">配置远程登陆密码</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch(config)#enable secret level 1 0 ruijie</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">配置进入特权模式密码</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch (config)#enable secret level 15 0 ruijie</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">为交换机配置管理IP</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch (config)#interface vlan 1</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch (config-if)#no shutdown</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch (config-if)#ip address 192.168.1.1 255.255.255.0</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Switch (config-if)#end</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">光模块假模块假模式识别与log提示,需要开启命令才会自动识别</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">fiber antifake enable</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">故障线缆检测短路断路</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">con</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">int gi0/1</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">line-detect</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看端口IP</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sh ip interface brief</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看HG是否正常</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Ruijie#run-system-shell</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">~#debug-ssa</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">SDK>ps</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">从管理板hg</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">~#debug-ssa 31</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">SDK>ps</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sh ex sl 1 机架式 查看slot1槽位死机信息</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">从从boot启动</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">开机按ctrl +s</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">网关类产品端口运行在3层模式下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">切换到网关模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">asme1000(config)#sys-mode-ace gateway </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">切换完需要wr保存后重启整机</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">网关模式切换回 网桥模式 </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">no sys-mode gateway</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">切换完需要wr保存后重启整机</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">网关模式下才能对端口配IP</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">在SDK下切换mac0、mac1、mac2等</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sdk.0> 1: 切换到mac1的sdk </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sdk.1> 0: 切换到mac0的sdk</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">10G铜缆向下兼容1000M、100M</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">1000M光模块向下兼容100M</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">端口link不上时首先考虑自协商问题。在底层查看link状态以及自协商状态。保证两端的自协商必须一致。</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">管理板工装机箱生测</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">故障插入测试</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">管理板I2C的SDA信号短接到地。注意通常风扇、电源、线卡为一路I2C。LM75和FE卡为一路I2C。</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">二、强制从 “从FLASH”启动:bit2 ――(验证OK)</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">1、mw.b 0x14000019 0x5c 配置CPLD寄存器,强制从”从FLASH启动”</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">2、制造热复位:必需是TPS3823芯片送出RST信号的热复位,比如停狗复位,又比如CM可用前面板按钮,FE可通过CM复位它write64b 0x14000001 0x00 或本板自己复位自己write64b 0x14000040 0xFE</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">3、复位之后,进入ubootui确认启动位置,md.b 0x14000019 1</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">结果:0X9C,表明从“从FLASH”启动成功</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">12.X机架式系统,修改自动同步路径</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">upgrade auto-sync package flash:upgrade/</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看自动同步路径:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show upgrade auto-sync </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看升级状态</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show upgrade status</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">开启自动同步升级功能:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">upgrade sync-server open </p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">11.X(8612E N18012) 升级步骤:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">线卡TFTP方式升级生测,之后进入ctrl+u生测升级模式,将生测程序文件拷贝进入U盘,输入命令:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Factory> upgrade /mnt/usb0/"S8600E_RGOS11.0(4)B13_CM_install.bin" plug</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">需要按两次回车</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">生测升级主程序</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Factory> shell进入shell命令行下。输入</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">fac_upgrade /mnt/usb0/S8600E_RGOS11.0\\(4\\)B13P1_CM_install.bin </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">输入:show upgrade status查看升级状态。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">进入uboot下管理板切换RGOS模式到user模式,进入主程序后进入底层sdk下,输入:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">factory_mode status查看线卡状态是否为主程序,输入:factory_mode check进行切换到主程序。</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">生测下返回上级菜单 按z 或ctrl+z</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">12.X系统生测相关:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">跳过等待设备初始化:CTRL+Q 进入板卡防呆检测;CTRL+D进入生测调试界面;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">跳过RTC时间检测: 第一级菜单下ctrL+Q;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">生测版本不包含智能监控组件:show power 等智能监控的命令无效;</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">升级(非正规生产流程升级方式):</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">i. 整框板卡都是生测版本情况:起机进入生测菜单后,退出生产测试,进入cli特权模式,cli下升级板卡(同主程序升级);或采用如下的ii方式亦可;</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ii.主管理板为生测,框中混杂有主程序版本板卡时:起机,输入三次CTR+U进入升级模式,进入cli特权模式,cli下升级板卡(同主程序升级);</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ps:如果升级后的版本是生测版本,且要进行拷机测试等,最好(特别是在产线或测试处,进行非正规流程的升级后要进行验证回归等)起机一次进入菜单清除下生测数据(为避免由于之前存在太多的生测数据导致后续由于空间不足,测试log存储不全,最好要做下这个操作,将data数据清空);</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">退出生产测试 后一直没有反应:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">A. 1. 修改波特率;2. 确认波特率正确却进不去cli,就是业务有问题,卡住进不去cli, @@@@ +i 查看未就绪组件,找对应业务组件支持,或找软件项目组;</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">生测调试下:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Factory> power [on/off] slot_id - 上下电对应的板卡</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">线卡升级uboot,使用bin或者rom文件:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">将文件rom文件拷贝到管理板的flash:upgrade/ 目录下</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">查看show upgrade auto-sync 自动同步升级目录是否正确</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">线卡单独接串口进入uboot,选择4.burn the total flashROM by this download file进行升级,输入local ip:192.168.64.5 Remote ip:192.168.64.1</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">测试出现问题检查以下:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">生测测试时端口收发帧测试全部丢包,可能是测试之前进行了XXX测试</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">主程序连通性测试打流时出现vlan配置正确但是流打不通,到某个位置过不去,可以show arp看下端口学到的mac地址,可以关闭地址学习再进行测试mac-address-learning disable。因为可能是打单播流时,某个口有对应的地址,则直接转发到相应的端口而不再按着广播的方式走。</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">连通性测试打流时出现丢包,当使用的是双片以上的mac芯片的方案时,交换机配置的vlan走的全部是整机的hg口,不管是否是单片mac内。所以如果整机配置蛇形流时,可能hg带宽不足就会导致丢包,这是正常的限制。</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">S2910C-24GT2XS-HP-E扩展卡不支持热拔插。</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">RG-S6000C-48GT4XS-E扩展卡不支持热拔插。</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">工业交换机IS2700G升级方式:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ctrl+C进入ctrl菜单项或者命令模式</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">如果是命令模式,则升级指令如下:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">tftp 192.168.213.123 192.168.213.154 ngsa-factory-s26i.bin_ -main</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">ngsa-factory-s26i.bin_为生测文件名</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">盛科芯片命令</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">run-system-shell</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">debug-ssa </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">sdk</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">show port mac-link</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">华为机子默认密码</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">admin </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">admin@huawei.com</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">---------------------------------------------------------------------</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">锐捷网络股份有限公司 园区与城域网交换机事业部 研发1部 冯久奎 整理笔记</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">【声明:请遵循公司信息安全规定!未经同意,请勿复制或转发本文档内容】</p></body></html>"))
self.lineEdit_searchexpression.setToolTip(_translate("TestCommandIllustration", "<html><head/><body><p>请在此输入搜索内容</p></body></html>"))
self.pushButton_search.setText(_translate("TestCommandIllustration", "搜索"))
self.radioButton.setText(_translate("TestCommandIllustration", "上一个结果"))
self.radioButton_2.setText(_translate("TestCommandIllustration", "下一个结果"))
| 152.358923
| 282
| 0.703159
| 26,315
| 152,816
| 4.078548
| 0.052176
| 0.238235
| 0.061765
| 0.158823
| 0.887055
| 0.883692
| 0.882844
| 0.88208
| 0.88208
| 0.88208
| 0
| 0.049041
| 0.065157
| 152,816
| 1,002
| 283
| 152.510978
| 0.702224
| 0.001963
| 0
| 0.334343
| 1
| 0.044444
| 0.241791
| 0.048483
| 0
| 0
| 0.002374
| 0
| 0
| 1
| 0.00202
| false
| 0.00202
| 0.00101
| 0
| 0.00404
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c3eeb73932268f35d693f09f2b1809693406b143
| 1,748
|
py
|
Python
|
bx_django_utils/models/queryset_utils.py
|
boxine/bx_django_utils
|
1864cb062ecb7570954c8142ef6765af25931760
|
[
"MIT"
] | 7
|
2021-05-21T08:52:30.000Z
|
2022-02-23T09:06:06.000Z
|
bx_django_utils/models/queryset_utils.py
|
boxine/bx_django_utils
|
1864cb062ecb7570954c8142ef6765af25931760
|
[
"MIT"
] | 3
|
2021-06-11T11:27:43.000Z
|
2022-02-08T17:30:19.000Z
|
bx_django_utils/models/queryset_utils.py
|
boxine/bx_django_utils
|
1864cb062ecb7570954c8142ef6765af25931760
|
[
"MIT"
] | null | null | null |
from copy import deepcopy
from typing import Type
from django.db.models import Model, Q, QuerySet
from django.db.models.sql.where import NothingNode
def remove_filter(queryset: QuerySet, lookup: str) -> QuerySet:
"""
Remove an applied .filter() from a QuerySet
"""
if queryset.query.is_empty():
# Nothing to remove if queryset is empty e.g.: ...objects.none()
return queryset
queryset = deepcopy(queryset) # remove the QuerySet's cache
query = queryset.query
clause, _ = query._add_q(Q(**{lookup: None}), query.used_aliases)
def filter_lookups(node):
if hasattr(node, 'lhs'):
return node.lhs.target != clause.children[0].lhs.target
if isinstance(node, NothingNode):
return False
return len(list(filter(filter_lookups, node.children))) == len(node.children)
query.where.children = list(filter(filter_lookups, query.where.children))
return queryset
def remove_model_filter(queryset: QuerySet, model: Type[Model]) -> QuerySet:
"""
Remove an applied .filter() from a QuerySet if it contains references to the specified model
"""
if queryset.query.is_empty():
# Nothing to remove if queryset is empty e.g.: ...objects.none()
return queryset
queryset = deepcopy(queryset) # remove the QuerySet's cache
query = queryset.query
def filter_lookups(node):
if hasattr(node, 'lhs'):
return node.lhs.target.model != model
if isinstance(node, NothingNode):
return False
return len(list(filter(filter_lookups, node.children))) == len(node.children)
query.where.children = list(filter(filter_lookups, query.where.children))
return queryset
| 29.627119
| 96
| 0.669908
| 221
| 1,748
| 5.230769
| 0.253394
| 0.067474
| 0.058824
| 0.079585
| 0.716263
| 0.716263
| 0.716263
| 0.716263
| 0.716263
| 0.641869
| 0
| 0.000736
| 0.22254
| 1,748
| 58
| 97
| 30.137931
| 0.84989
| 0.182494
| 0
| 0.709677
| 0
| 0
| 0.004301
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.129032
| false
| 0
| 0.129032
| 0
| 0.580645
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
c3fa7b1cb22c37634348c5f0ae514c289a5d1d9b
| 17,702
|
py
|
Python
|
Mqtt/connect_device_package/aws-iot-device-sdk-python/test/core/jobs/test_thing_job_manager.py
|
SimoneABNto/My-Code-Py
|
47276c1d69a92aa284685c9f148c1bd960147f7f
|
[
"MIT"
] | 2
|
2022-03-07T12:25:38.000Z
|
2022-03-10T09:58:48.000Z
|
Mqtt/connect_device_package/aws-iot-device-sdk-python/test/core/jobs/test_thing_job_manager.py
|
SimoneABNto/My-Code-Py
|
47276c1d69a92aa284685c9f148c1bd960147f7f
|
[
"MIT"
] | null | null | null |
Mqtt/connect_device_package/aws-iot-device-sdk-python/test/core/jobs/test_thing_job_manager.py
|
SimoneABNto/My-Code-Py
|
47276c1d69a92aa284685c9f148c1bd960147f7f
|
[
"MIT"
] | 1
|
2022-03-09T07:10:20.000Z
|
2022-03-09T07:10:20.000Z
|
# Test thingJobManager behavior
from AWSIoTPythonSDK.core.jobs.thingJobManager import thingJobManager as JobManager
from AWSIoTPythonSDK.core.jobs.thingJobManager import jobExecutionTopicType
from AWSIoTPythonSDK.core.jobs.thingJobManager import jobExecutionTopicReplyType
from AWSIoTPythonSDK.core.jobs.thingJobManager import jobExecutionStatus
import time
import json
from mock import MagicMock
#asserts based on this documentation: https://docs.aws.amazon.com/iot/latest/developerguide/jobs-api.html
class TestThingJobManager:
thingName = 'testThing'
clientTokenValue = "testClientToken123"
thingJobManager = JobManager(thingName, clientTokenValue)
noClientTokenJobManager = JobManager(thingName)
jobId = '8192'
statusDetailsMap = {'testKey':'testVal'}
def test_pending_topics(self):
topicType = jobExecutionTopicType.JOB_GET_PENDING_TOPIC
assert ('$aws/things/' + self.thingName + '/jobs/get') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE)
assert ('$aws/things/' + self.thingName + '/jobs/get/accepted') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE)
assert ('$aws/things/' + self.thingName + '/jobs/get/rejected') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE)
assert ('$aws/things/' + self.thingName + '/jobs/get/#') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE, self.jobId)
def test_start_next_topics(self):
topicType = jobExecutionTopicType.JOB_START_NEXT_TOPIC
assert ('$aws/things/' + self.thingName + '/jobs/start-next') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE)
assert ('$aws/things/' + self.thingName + '/jobs/start-next/accepted') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE)
assert ('$aws/things/' + self.thingName + '/jobs/start-next/rejected') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE)
assert ('$aws/things/' + self.thingName + '/jobs/start-next/#') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE, self.jobId)
def test_describe_topics(self):
topicType = jobExecutionTopicType.JOB_DESCRIBE_TOPIC
assert ('$aws/things/' + self.thingName + '/jobs/' + str(self.jobId) + '/get') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE, self.jobId)
assert ('$aws/things/' + self.thingName + '/jobs/' + str(self.jobId) + '/get/accepted') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE, self.jobId)
assert ('$aws/things/' + self.thingName + '/jobs/' + str(self.jobId) + '/get/rejected') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE, self.jobId)
assert ('$aws/things/' + self.thingName + '/jobs/' + str(self.jobId) + '/get/#') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE)
def test_update_topics(self):
topicType = jobExecutionTopicType.JOB_UPDATE_TOPIC
assert ('$aws/things/' + self.thingName + '/jobs/' + str(self.jobId) + '/update') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE, self.jobId)
assert ('$aws/things/' + self.thingName + '/jobs/' + str(self.jobId) + '/update/accepted') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE, self.jobId)
assert ('$aws/things/' + self.thingName + '/jobs/' + str(self.jobId) + '/update/rejected') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE, self.jobId)
assert ('$aws/things/' + self.thingName + '/jobs/' + str(self.jobId) + '/update/#') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE)
def test_notify_topics(self):
topicType = jobExecutionTopicType.JOB_NOTIFY_TOPIC
assert ('$aws/things/' + self.thingName + '/jobs/notify') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE, self.jobId)
def test_notify_next_topics(self):
topicType = jobExecutionTopicType.JOB_NOTIFY_NEXT_TOPIC
assert ('$aws/things/' + self.thingName + '/jobs/notify-next') == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE, self.jobId)
def test_wildcard_topics(self):
topicType = jobExecutionTopicType.JOB_WILDCARD_TOPIC
topicString = '$aws/things/' + self.thingName + '/jobs/#'
assert topicString == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE)
assert topicString == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE)
assert topicString == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE)
assert topicString == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE)
def test_thingless_topics(self):
thinglessJobManager = JobManager(None)
assert None == thinglessJobManager.getJobTopic(jobExecutionTopicType.JOB_GET_PENDING_TOPIC)
assert None == thinglessJobManager.getJobTopic(jobExecutionTopicType.JOB_START_NEXT_TOPIC)
assert None == thinglessJobManager.getJobTopic(jobExecutionTopicType.JOB_DESCRIBE_TOPIC)
assert None == thinglessJobManager.getJobTopic(jobExecutionTopicType.JOB_UPDATE_TOPIC)
assert None == thinglessJobManager.getJobTopic(jobExecutionTopicType.JOB_NOTIFY_TOPIC)
assert None == thinglessJobManager.getJobTopic(jobExecutionTopicType.JOB_NOTIFY_NEXT_TOPIC)
assert None == thinglessJobManager.getJobTopic(jobExecutionTopicType.JOB_WILDCARD_TOPIC)
def test_unrecognized_topics(self):
topicType = jobExecutionTopicType.JOB_UNRECOGNIZED_TOPIC
assert None == self.thingJobManager.getJobTopic(topicType)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REQUEST_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_ACCEPTED_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_REJECTED_REPLY_TYPE, self.jobId)
assert None == self.thingJobManager.getJobTopic(topicType, jobExecutionTopicReplyType.JOB_WILDCARD_REPLY_TYPE, self.jobId)
def test_serialize_client_token(self):
payload = '{"clientToken": "' + self.clientTokenValue + '"}'
assert payload == self.thingJobManager.serializeClientTokenPayload()
assert "{}" == self.noClientTokenJobManager.serializeClientTokenPayload()
def test_serialize_start_next_pending_job_execution(self):
payload = {'clientToken': self.clientTokenValue}
assert payload == json.loads(self.thingJobManager.serializeStartNextPendingJobExecutionPayload())
assert {} == json.loads(self.noClientTokenJobManager.serializeStartNextPendingJobExecutionPayload())
payload.update({'statusDetails': self.statusDetailsMap})
assert payload == json.loads(self.thingJobManager.serializeStartNextPendingJobExecutionPayload(self.statusDetailsMap))
assert {'statusDetails': self.statusDetailsMap} == json.loads(self.noClientTokenJobManager.serializeStartNextPendingJobExecutionPayload(self.statusDetailsMap))
def test_serialize_describe_job_execution(self):
payload = {'includeJobDocument': True}
assert payload == json.loads(self.noClientTokenJobManager.serializeDescribeJobExecutionPayload())
payload.update({'executionNumber': 1})
assert payload == json.loads(self.noClientTokenJobManager.serializeDescribeJobExecutionPayload(1))
payload.update({'includeJobDocument': False})
assert payload == json.loads(self.noClientTokenJobManager.serializeDescribeJobExecutionPayload(1, False))
payload = {'includeJobDocument': True, 'clientToken': self.clientTokenValue}
assert payload == json.loads(self.thingJobManager.serializeDescribeJobExecutionPayload())
payload.update({'executionNumber': 1})
assert payload == json.loads(self.thingJobManager.serializeDescribeJobExecutionPayload(1))
payload.update({'includeJobDocument': False})
assert payload == json.loads(self.thingJobManager.serializeDescribeJobExecutionPayload(1, False))
def test_serialize_job_execution_update(self):
assert None == self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_STATUS_NOT_SET)
assert None == self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_UNKNOWN_STATUS)
assert None == self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_STATUS_NOT_SET)
assert None == self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_UNKNOWN_STATUS)
payload = {'status':'IN_PROGRESS'}
assert payload == json.loads(self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_IN_PROGRESS))
payload.update({'status':'FAILED'})
assert payload == json.loads(self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_FAILED))
payload.update({'status':'SUCCEEDED'})
assert payload == json.loads(self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_SUCCEEDED))
payload.update({'status':'CANCELED'})
assert payload == json.loads(self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_CANCELED))
payload.update({'status':'REJECTED'})
assert payload == json.loads(self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_REJECTED))
payload.update({'status':'QUEUED'})
assert payload == json.loads(self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED))
payload.update({'statusDetails': self.statusDetailsMap})
assert payload == json.loads(self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED, self.statusDetailsMap))
payload.update({'expectedVersion': '1'})
assert payload == json.loads(self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED, self.statusDetailsMap, 1))
payload.update({'executionNumber': '1'})
assert payload == json.loads(self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED, self.statusDetailsMap, 1, 1))
payload.update({'includeJobExecutionState': True})
assert payload == json.loads(self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED, self.statusDetailsMap, 1, 1, True))
payload.update({'includeJobDocument': True})
assert payload == json.loads(self.noClientTokenJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED, self.statusDetailsMap, 1, 1, True, True))
payload = {'status':'IN_PROGRESS', 'clientToken': self.clientTokenValue}
assert payload == json.loads(self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_IN_PROGRESS))
payload.update({'status':'FAILED'})
assert payload == json.loads(self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_FAILED))
payload.update({'status':'SUCCEEDED'})
assert payload == json.loads(self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_SUCCEEDED))
payload.update({'status':'CANCELED'})
assert payload == json.loads(self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_CANCELED))
payload.update({'status':'REJECTED'})
assert payload == json.loads(self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_REJECTED))
payload.update({'status':'QUEUED'})
assert payload == json.loads(self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED))
payload.update({'statusDetails': self.statusDetailsMap})
assert payload == json.loads(self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED, self.statusDetailsMap))
payload.update({'expectedVersion': '1'})
assert payload == json.loads(self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED, self.statusDetailsMap, 1))
payload.update({'executionNumber': '1'})
assert payload == json.loads(self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED, self.statusDetailsMap, 1, 1))
payload.update({'includeJobExecutionState': True})
assert payload == json.loads(self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED, self.statusDetailsMap, 1, 1, True))
payload.update({'includeJobDocument': True})
assert payload == json.loads(self.thingJobManager.serializeJobExecutionUpdatePayload(jobExecutionStatus.JOB_EXECUTION_QUEUED, self.statusDetailsMap, 1, 1, True, True))
| 92.197917
| 209
| 0.786521
| 1,609
| 17,702
| 8.470479
| 0.062772
| 0.107345
| 0.127669
| 0.16597
| 0.922371
| 0.892655
| 0.853474
| 0.831682
| 0.771003
| 0.750459
| 0
| 0.001982
| 0.116653
| 17,702
| 191
| 210
| 92.680628
| 0.869604
| 0.007513
| 0
| 0.350877
| 0
| 0
| 0.062906
| 0.005579
| 0
| 0
| 0
| 0
| 0.602339
| 1
| 0.076023
| false
| 0
| 0.040936
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
61b545a81d87374b7095a8d117a28ba7f9d6332b
| 136
|
py
|
Python
|
app/player/routes.py
|
franc01s/castrcv
|
d0898c417f24120882114a7d0266db155f2267a3
|
[
"Apache-2.0"
] | null | null | null |
app/player/routes.py
|
franc01s/castrcv
|
d0898c417f24120882114a7d0266db155f2267a3
|
[
"Apache-2.0"
] | null | null | null |
app/player/routes.py
|
franc01s/castrcv
|
d0898c417f24120882114a7d0266db155f2267a3
|
[
"Apache-2.0"
] | null | null | null |
from flask import render_template
from . import player
@player.route('/')
def root():
return render_template('player/player.html')
| 19.428571
| 48
| 0.742647
| 18
| 136
| 5.5
| 0.611111
| 0.282828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132353
| 136
| 6
| 49
| 22.666667
| 0.838983
| 0
| 0
| 0
| 0
| 0
| 0.139706
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
f60da21a0b36e9adef1949edfda2a66e65d4616e
| 409
|
py
|
Python
|
tests/internal/burstable_performance/test_burstable_performance_true_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/burstable_performance/test_burstable_performance_true_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/burstable_performance/test_burstable_performance_true_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Testing module burstable_performance.true
import pytest
import ec2_compare.internal.burstable_performance.true
def test_get_internal_data_burstable_performance_true_get_instances_list():
assert len(ec2_compare.internal.burstable_performance.true.get_instances_list()) > 0
def test_get_internal_data_burstable_performance_true_get():
assert len(ec2_compare.internal.burstable_performance.true.get) > 0
| 40.9
| 86
| 0.870416
| 56
| 409
| 5.910714
| 0.339286
| 0.362538
| 0.435045
| 0.326284
| 0.827795
| 0.827795
| 0.622357
| 0.622357
| 0.622357
| 0
| 0
| 0.013055
| 0.06357
| 409
| 9
| 87
| 45.444444
| 0.851175
| 0.100245
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
f615eabcdc34b6e21150c113ad666a783a62f8ca
| 183
|
py
|
Python
|
train/tasks/semantic/script.py
|
tomvdon/lidar-bonnetal
|
0bb78eb9a731e98e6f3b893d735b6c3ca96cb0e8
|
[
"MIT"
] | null | null | null |
train/tasks/semantic/script.py
|
tomvdon/lidar-bonnetal
|
0bb78eb9a731e98e6f3b893d735b6c3ca96cb0e8
|
[
"MIT"
] | null | null | null |
train/tasks/semantic/script.py
|
tomvdon/lidar-bonnetal
|
0bb78eb9a731e98e6f3b893d735b6c3ca96cb0e8
|
[
"MIT"
] | null | null | null |
import subprocess
subprocess.run("python ./train.py -d ../../../Training_Tom/ -ac ./config/arch/darknet53.yaml -dc ./config/labels/quad.yaml -l ../../../log/ -p ../../../darknet53/")
| 61
| 164
| 0.63388
| 24
| 183
| 4.791667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023669
| 0.076503
| 183
| 2
| 165
| 91.5
| 0.656805
| 0
| 0
| 0
| 0
| 0.5
| 0.797814
| 0.409836
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
9cddbd108857e933ebcc24eba3179498234bc6c3
| 105
|
py
|
Python
|
conf/script/src/build_system/cmd/setup/_priv/meson/__init__.py
|
benoit-dubreuil/template-repo-cpp-full-ecosystem
|
f506dd5e2a61cdd311b6a6a4be4abc59567b4b20
|
[
"MIT"
] | null | null | null |
conf/script/src/build_system/cmd/setup/_priv/meson/__init__.py
|
benoit-dubreuil/template-repo-cpp-full-ecosystem
|
f506dd5e2a61cdd311b6a6a4be4abc59567b4b20
|
[
"MIT"
] | 113
|
2021-02-15T19:22:36.000Z
|
2021-05-07T15:17:42.000Z
|
conf/script/src/build_system/cmd/setup/_priv/meson/__init__.py
|
benoit-dubreuil/template-repo-cpp-full-ecosystem
|
f506dd5e2a61cdd311b6a6a4be4abc59567b4b20
|
[
"MIT"
] | null | null | null |
from .find_meson_machine_file import *
from .meson_machine_file_args import *
from .meson_utils import *
| 26.25
| 38
| 0.828571
| 16
| 105
| 5
| 0.5
| 0.3
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 105
| 3
| 39
| 35
| 0.860215
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
14388bbbad695efc5cebaa348c88e544fd5eeb88
| 42
|
py
|
Python
|
python/const.py
|
10sr/junks
|
638f75a2b592e60460f1bb680de1de8ef6a99519
|
[
"CC0-1.0"
] | null | null | null |
python/const.py
|
10sr/junks
|
638f75a2b592e60460f1bb680de1de8ef6a99519
|
[
"CC0-1.0"
] | 24
|
2016-07-19T04:59:00.000Z
|
2021-09-30T02:10:06.000Z
|
python/const.py
|
10sr/junks
|
638f75a2b592e60460f1bb680de1de8ef6a99519
|
[
"CC0-1.0"
] | 1
|
2018-05-21T06:35:00.000Z
|
2018-05-21T06:35:00.000Z
|
A, B = range(2)
print("%d, %d" % (A, B))
| 10.5
| 24
| 0.404762
| 9
| 42
| 1.888889
| 0.666667
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.238095
| 42
| 3
| 25
| 14
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
146a3ae5c930d46689a3bb4d53af79fce7030bee
| 3,632
|
py
|
Python
|
leprikon/migrations/0046_attachment_events.py
|
leprikon-cz/leprikon
|
b1bec36fb6bcf0220bffccca53b6f200f9e95910
|
[
"BSD-3-Clause"
] | 4
|
2018-10-29T17:46:09.000Z
|
2021-12-16T08:57:48.000Z
|
leprikon/migrations/0046_attachment_events.py
|
leprikon-cz/leprikon
|
b1bec36fb6bcf0220bffccca53b6f200f9e95910
|
[
"BSD-3-Clause"
] | 68
|
2016-07-11T07:48:54.000Z
|
2022-03-18T01:32:06.000Z
|
leprikon/migrations/0046_attachment_events.py
|
leprikon-cz/leprikon
|
b1bec36fb6bcf0220bffccca53b6f200f9e95910
|
[
"BSD-3-Clause"
] | 2
|
2016-07-12T20:39:53.000Z
|
2020-10-10T03:14:42.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-02-13 16:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('leprikon', '0045_registration_slug'),
]
operations = [
migrations.AddField(
model_name='subjectattachment',
name='events',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('registration_received', 'registration received'), ('registration_approved', 'registration approved'), ('registration_refused', 'registration refused'), ('registration_payment_request', 'payment requested'), ('registration_canceled', 'registration canceled'), ('discount_granted', 'discount granted'), ('payment_received', 'payment received')], default=['registration_received'], help_text='The attachment will be sent with notification on selected events.', max_length=149, verbose_name='send when'),
),
migrations.AddField(
model_name='subjectattachment',
name='public',
field=models.BooleanField(default=True, help_text='The attachment will be available before registration.', verbose_name='public'),
),
migrations.AddField(
model_name='subjecttypeattachment',
name='events',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('registration_received', 'registration received'), ('registration_approved', 'registration approved'), ('registration_refused', 'registration refused'), ('registration_payment_request', 'payment requested'), ('registration_canceled', 'registration canceled'), ('discount_granted', 'discount granted'), ('payment_received', 'payment received')], default=['registration_received'], help_text='The attachment will be sent with notification on selected events.', max_length=149, verbose_name='send when'),
),
migrations.AddField(
model_name='subjecttypeattachment',
name='public',
field=models.BooleanField(default=True, help_text='The attachment will be available before registration.', verbose_name='public'),
),
migrations.AlterField(
model_name='subjectattachment',
name='events',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('registration_received', 'registration received'), ('registration_approved', 'registration approved'), ('registration_refused', 'registration refused'), ('registration_payment_request', 'payment requested'), ('registration_canceled', 'registration canceled'), ('discount_granted', 'discount granted'), ('payment_received', 'payment received')], default=[], help_text='The attachment will be sent with notification on selected events.', max_length=149, verbose_name='send when'),
),
migrations.AlterField(
model_name='subjecttypeattachment',
name='events',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('registration_received', 'registration received'), ('registration_approved', 'registration approved'), ('registration_refused', 'registration refused'), ('registration_payment_request', 'payment requested'), ('registration_canceled', 'registration canceled'), ('discount_granted', 'discount granted'), ('payment_received', 'payment received')], default=[], help_text='The attachment will be sent with notification on selected events.', max_length=149, verbose_name='send when'),
),
]
| 75.666667
| 585
| 0.713932
| 353
| 3,632
| 7.167139
| 0.212465
| 0.079051
| 0.101186
| 0.049802
| 0.89249
| 0.89249
| 0.867194
| 0.867194
| 0.867194
| 0.867194
| 0
| 0.011086
| 0.155562
| 3,632
| 47
| 586
| 77.276596
| 0.813825
| 0.018998
| 0
| 0.75
| 1
| 0
| 0.48764
| 0.137921
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.175
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
14704e720b93f655a8eb2f08c4aa23542590ed50
| 99
|
py
|
Python
|
set_pw.py
|
tillhanke/fix-ssh-on-pi
|
2c73d2ef9e32322f9ce2cce51bfb7d09a3499df4
|
[
"MIT"
] | null | null | null |
set_pw.py
|
tillhanke/fix-ssh-on-pi
|
2c73d2ef9e32322f9ce2cce51bfb7d09a3499df4
|
[
"MIT"
] | null | null | null |
set_pw.py
|
tillhanke/fix-ssh-on-pi
|
2c73d2ef9e32322f9ce2cce51bfb7d09a3499df4
|
[
"MIT"
] | null | null | null |
from passlib.hash import sha512_crypt
import getpass
print(sha512_crypt.hash(getpass.getpass()))
| 16.5
| 43
| 0.818182
| 14
| 99
| 5.642857
| 0.571429
| 0.278481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0.090909
| 99
| 5
| 44
| 19.8
| 0.811111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
dcbd3ea9c7a284c9943d67622282c881b8d8d1f6
| 14,957
|
py
|
Python
|
legion/cli/tests/parsers/test_packaging_integration.py
|
legion-platform/legion
|
db3a1d99f1005cb881b16af6075a806725123031
|
[
"ECL-2.0",
"Apache-2.0"
] | 19
|
2018-05-20T17:06:55.000Z
|
2022-01-04T14:15:09.000Z
|
legion/cli/tests/parsers/test_packaging_integration.py
|
legion-platform/legion
|
db3a1d99f1005cb881b16af6075a806725123031
|
[
"ECL-2.0",
"Apache-2.0"
] | 917
|
2018-05-18T18:54:54.000Z
|
2021-09-01T10:41:56.000Z
|
legion/cli/tests/parsers/test_packaging_integration.py
|
legion-platform/legion
|
db3a1d99f1005cb881b16af6075a806725123031
|
[
"ECL-2.0",
"Apache-2.0"
] | 13
|
2018-07-23T18:09:51.000Z
|
2019-08-05T15:37:30.000Z
|
#
# Copyright 2019 EPAM Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import http
import json
import pathlib
import pytest
from click.testing import CliRunner
from legion.cli.parsers import packaging_integration
from legion.cli.parsers.packaging_integration import ID_AND_FILE_MISSED_ERROR_MESSAGE, IGNORE_NOT_FOUND_ERROR_MESSAGE
from legion.cli.utils.output import JSON_OUTPUT_FORMAT, JSONPATH_OUTPUT_FORMAT
from legion.sdk.clients.edi import WrongHttpStatusCode
from legion.sdk.clients.packaging_integration import PackagingIntegrationClient
from legion.sdk.models import PackagingIntegration, PackagingIntegrationSpec
PI_ID = 'some-id'
@pytest.fixture
def pi_client() -> PackagingIntegrationClient:
return PackagingIntegrationClient()
@pytest.fixture
def pi() -> PackagingIntegration:
return PackagingIntegration(
id=PI_ID,
spec=PackagingIntegrationSpec(
default_image="mock-image",
entrypoint="default-entrypoint",
),
)
@pytest.fixture
def cli_runner() -> CliRunner:
return CliRunner()
def test_get(mocker, cli_runner: CliRunner, pi_client: PackagingIntegrationClient, pi: PackagingIntegration):
client_mock = mocker.patch.object(PackagingIntegrationClient, 'get', return_value=pi)
result = cli_runner.invoke(packaging_integration.packaging_integration,
['get', '--id', PI_ID, '-o', JSON_OUTPUT_FORMAT],
obj=pi_client)
client_mock.assert_called_once_with(PI_ID)
assert result.exit_code == 0
assert json.loads(result.output) == [pi.to_dict()]
def test_get_all(mocker, cli_runner: CliRunner, pi_client: PackagingIntegrationClient, pi: PackagingIntegration):
client_mock = mocker.patch.object(PackagingIntegrationClient, 'get_all', return_value=[pi])
result = cli_runner.invoke(packaging_integration.packaging_integration, ['get', '-o', JSON_OUTPUT_FORMAT],
obj=pi_client)
client_mock.assert_called_once_with()
assert result.exit_code == 0
assert json.loads(result.output) == [pi.to_dict()]
def test_get_jsonpath(mocker, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
client_mock = mocker.patch.object(PackagingIntegrationClient, 'get', return_value=pi)
result = cli_runner.invoke(packaging_integration.packaging_integration, ['get', '--id', PI_ID, '-o',
f'{JSONPATH_OUTPUT_FORMAT}=[*].id'],
obj=pi_client)
client_mock.assert_called_once_with(PI_ID)
assert result.exit_code == 0
assert result.output.strip() == PI_ID
def test_get_default_output_format(mocker, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
client_mock = mocker.patch.object(PackagingIntegrationClient, 'get', return_value=pi)
result = cli_runner.invoke(packaging_integration.packaging_integration, ['get', '--id', PI_ID],
obj=pi_client)
client_mock.assert_called_once_with(PI_ID)
assert result.exit_code == 0
assert PI_ID in result.stdout
def test_get_wrong_output_format(cli_runner: CliRunner, pi_client: PackagingIntegrationClient):
wrong_format = 'wrong-format'
result = cli_runner.invoke(packaging_integration.packaging_integration, ['get', '--id', PI_ID, '-o', wrong_format],
obj=pi_client)
assert result.exit_code != 0
assert f'invalid choice: {wrong_format}' in result.output
def test_edit(tmp_path: pathlib.Path, mocker, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
ti_file = tmp_path / "ti.yaml"
ti_file.write_text(json.dumps({**pi.to_dict(), **{'kind': 'PackagingIntegration'}}))
client_mock = mocker.patch.object(PackagingIntegrationClient, 'edit', return_value=pi)
result = cli_runner.invoke(packaging_integration.packaging_integration,
['edit', '-f', ti_file, '-o', JSON_OUTPUT_FORMAT],
obj=pi_client)
client_mock.assert_called_once_with(pi)
assert result.exit_code == 0
assert json.loads(result.output) == [pi.to_dict()]
def test_edit_jsonpath(mocker, tmp_path, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
ti_file = tmp_path / "ti.yaml"
ti_file.write_text(json.dumps({**pi.to_dict(), **{'kind': 'PackagingIntegration'}}))
client_mock = mocker.patch.object(PackagingIntegrationClient, 'edit', return_value=pi)
result = cli_runner.invoke(packaging_integration.packaging_integration, ['edit', '-f', ti_file, '-o',
f'{JSONPATH_OUTPUT_FORMAT}=[*].id'],
obj=pi_client)
client_mock.assert_called_once_with(pi)
assert result.exit_code == 0
assert result.output.strip() == PI_ID
def test_edit_default_output_format(mocker, tmp_path, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
ti_file = tmp_path / "ti.yaml"
ti_file.write_text(json.dumps({**pi.to_dict(), **{'kind': 'PackagingIntegration'}}))
client_mock = mocker.patch.object(PackagingIntegrationClient, 'edit', return_value=pi)
result = cli_runner.invoke(packaging_integration.packaging_integration, ['edit', '-f', ti_file],
obj=pi_client)
client_mock.assert_called_once_with(pi)
assert result.exit_code == 0
assert PI_ID in result.stdout
def test_edit_wrong_kind(tmp_path: pathlib.Path, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
ti_file = tmp_path / "ti.yaml"
ti_file.write_text(json.dumps({**pi.to_dict(), **{'kind': 'Wrong'}}))
result = cli_runner.invoke(packaging_integration.packaging_integration,
['edit', '-f', ti_file, '-o', JSON_OUTPUT_FORMAT],
obj=pi_client)
assert result.exit_code != 0
assert "Unknown kind of object: 'Wrong'" in str(result.exception)
def test_edit_wrong_output_format(cli_runner: CliRunner, pi_client: PackagingIntegrationClient):
wrong_format = 'wrong-format'
result = cli_runner.invoke(packaging_integration.packaging_integration, ['edit', '--id', PI_ID, '-o', wrong_format],
obj=pi_client)
assert result.exit_code != 0
assert f'invalid choice: {wrong_format}' in result.output
def test_create(tmp_path: pathlib.Path, mocker, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
ti_file = tmp_path / "ti.yaml"
ti_file.write_text(json.dumps({**pi.to_dict(), **{'kind': 'PackagingIntegration'}}))
client_mock = mocker.patch.object(PackagingIntegrationClient, 'create', return_value=pi)
result = cli_runner.invoke(packaging_integration.packaging_integration,
['create', '-f', ti_file, '-o', JSON_OUTPUT_FORMAT],
obj=pi_client)
client_mock.assert_called_once_with(pi)
assert result.exit_code == 0
assert json.loads(result.output) == [pi.to_dict()]
def test_create_wrong_kind(tmp_path: pathlib.Path, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
ti_file = tmp_path / "ti.yaml"
ti_file.write_text(json.dumps({**pi.to_dict(), **{'kind': 'Wrong'}}))
result = cli_runner.invoke(packaging_integration.packaging_integration,
['create', '-f', ti_file, '-o', JSON_OUTPUT_FORMAT],
obj=pi_client)
assert result.exit_code != 0
assert "Unknown kind of object: 'Wrong'" in str(result.exception)
def test_create_wrong_output_format(cli_runner: CliRunner, pi_client: PackagingIntegrationClient):
wrong_format = 'wrong-format'
result = cli_runner.invoke(packaging_integration.packaging_integration, ['edit', '--id', PI_ID, '-o', wrong_format],
obj=pi_client)
assert result.exit_code != 0
assert f'invalid choice: {wrong_format}' in result.output
def test_create_jsonpath(mocker, tmp_path, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
ti_file = tmp_path / "ti.yaml"
ti_file.write_text(json.dumps({**pi.to_dict(), **{'kind': 'PackagingIntegration'}}))
client_mock = mocker.patch.object(PackagingIntegrationClient, 'create', return_value=pi)
result = cli_runner.invoke(packaging_integration.packaging_integration, ['create', '-f', ti_file, '-o',
f'{JSONPATH_OUTPUT_FORMAT}=[*].id'],
obj=pi_client)
client_mock.assert_called_once_with(pi)
assert result.exit_code == 0
assert result.output.strip() == PI_ID
def test_create_default_output_format(mocker, tmp_path, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
ti_file = tmp_path / "ti.yaml"
ti_file.write_text(json.dumps({**pi.to_dict(), **{'kind': 'PackagingIntegration'}}))
client_mock = mocker.patch.object(PackagingIntegrationClient, 'create', return_value=pi)
result = cli_runner.invoke(packaging_integration.packaging_integration, ['create', '-f', ti_file],
obj=pi_client)
client_mock.assert_called_once_with(pi)
assert result.exit_code == 0
assert PI_ID in result.stdout
def test_delete_by_file(tmp_path: pathlib.Path, mocker, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
message = "tiection was deleted"
ti_file = tmp_path / "ti.yaml"
ti_file.write_text(json.dumps({**pi.to_dict(), **{'kind': 'PackagingIntegration'}}))
client_mock = mocker.patch.object(PackagingIntegrationClient, 'delete', return_value=message)
result = cli_runner.invoke(packaging_integration.packaging_integration, ['delete', '-f', ti_file],
obj=pi_client)
client_mock.assert_called_once_with(pi.id)
assert result.exit_code == 0
assert message in result.stdout
def test_delete_by_id(mocker, cli_runner: CliRunner, pi_client: PackagingIntegrationClient):
message = "tiection was deleted"
client_mock = mocker.patch.object(PackagingIntegrationClient, 'delete', return_value=message)
result = cli_runner.invoke(packaging_integration.packaging_integration, ['delete', '--id', PI_ID],
obj=pi_client)
client_mock.assert_called_once_with(PI_ID)
assert result.exit_code == 0
assert message in result.stdout
def test_delete_id_and_file_missed(cli_runner: CliRunner, pi_client: PackagingIntegrationClient):
result = cli_runner.invoke(packaging_integration.packaging_integration, ['delete'], obj=pi_client)
assert result.exit_code != 0
assert ID_AND_FILE_MISSED_ERROR_MESSAGE in str(result.exception)
def test_delete_id_and_file_present(tmp_path, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
ti_file = tmp_path / "ti.yaml"
ti_file.write_text(json.dumps({**pi.to_dict(), **{'kind': 'PackagingIntegration'}}))
result = cli_runner.invoke(packaging_integration.packaging_integration,
['delete', '--id', 'some-id', '-f', ti_file],
obj=pi_client)
assert result.exit_code != 0
assert ID_AND_FILE_MISSED_ERROR_MESSAGE in str(result.exception)
def test_delete_wrong_kind(tmp_path: pathlib.Path, cli_runner: CliRunner, pi_client: PackagingIntegrationClient,
pi: PackagingIntegration):
ti_file = tmp_path / "ti.yaml"
ti_file.write_text(json.dumps({**pi.to_dict(), **{'kind': 'Wrong'}}))
result = cli_runner.invoke(packaging_integration.packaging_integration, ['delete', '-f', ti_file],
obj=pi_client)
assert result.exit_code != 0
assert "Unknown kind of object: 'Wrong'" in str(result.exception)
def test_delete_ignore_not_found_disabled(mocker, cli_runner: CliRunner, pi_client: PackagingIntegrationClient):
client_mock = mocker.patch.object(PackagingIntegrationClient, 'delete',
side_effect=WrongHttpStatusCode(http.HTTPStatus.NOT_FOUND))
result = cli_runner.invoke(packaging_integration.packaging_integration, ['delete', '--id', PI_ID],
obj=pi_client)
client_mock.assert_called_once_with(PI_ID)
assert result.exit_code != 0
assert "Got error from server" in str(result.exception)
def test_delete_ignore_not_found_enabled(mocker, cli_runner: CliRunner, pi_client: PackagingIntegrationClient):
client_mock = mocker.patch.object(PackagingIntegrationClient, 'delete',
side_effect=WrongHttpStatusCode(http.HTTPStatus.NOT_FOUND))
result = cli_runner.invoke(packaging_integration.packaging_integration,
['delete', '--id', PI_ID, '--ignore-not-found'],
obj=pi_client)
client_mock.assert_called_once_with(PI_ID)
assert result.exit_code == 0
assert IGNORE_NOT_FOUND_ERROR_MESSAGE.format(PI_ID) in result.stdout
def test_delete_ignore_not_found_enabled_http_code(mocker, cli_runner: CliRunner,
pi_client: PackagingIntegrationClient):
client_mock = mocker.patch.object(PackagingIntegrationClient, 'delete',
side_effect=WrongHttpStatusCode(http.HTTPStatus.BAD_REQUEST))
result = cli_runner.invoke(packaging_integration.packaging_integration,
['delete', '--id', PI_ID, '--ignore-not-found'],
obj=pi_client)
client_mock.assert_called_once_with(PI_ID)
assert result.exit_code != 0
assert "Got error from server" in str(result.exception)
| 44.382789
| 120
| 0.67567
| 1,724
| 14,957
| 5.572506
| 0.090487
| 0.102009
| 0.084938
| 0.047882
| 0.860102
| 0.854689
| 0.844592
| 0.835224
| 0.831373
| 0.824399
| 0
| 0.00266
| 0.2209
| 14,957
| 336
| 121
| 44.514881
| 0.821763
| 0.03851
| 0
| 0.716814
| 0
| 0
| 0.072761
| 0.006475
| 0
| 0
| 0
| 0
| 0.269912
| 1
| 0.115044
| false
| 0
| 0.048673
| 0.013274
| 0.176991
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.