hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3d08e5561a57813357b2c87dcc7ee515cbddb2c6
| 9,290
|
py
|
Python
|
python_modules/libraries/dagster-dbt/dagster_dbt_tests/rpc/test_solids_ii.py
|
ericct/dagster
|
dd2c9f05751e1bae212a30dbc54381167a14f6c5
|
[
"Apache-2.0"
] | 2
|
2021-06-21T17:50:26.000Z
|
2021-06-21T19:14:23.000Z
|
python_modules/libraries/dagster-dbt/dagster_dbt_tests/rpc/test_solids_ii.py
|
ericct/dagster
|
dd2c9f05751e1bae212a30dbc54381167a14f6c5
|
[
"Apache-2.0"
] | null | null | null |
python_modules/libraries/dagster-dbt/dagster_dbt_tests/rpc/test_solids_ii.py
|
ericct/dagster
|
dd2c9f05751e1bae212a30dbc54381167a14f6c5
|
[
"Apache-2.0"
] | 1
|
2021-08-18T17:21:57.000Z
|
2021-08-18T17:21:57.000Z
|
import responses
from dagster import ModeDefinition, execute_solid
from dagster_dbt import (
DbtRpcOutput,
dbt_rpc_resource,
dbt_rpc_run,
dbt_rpc_run_and_wait,
dbt_rpc_run_operation,
dbt_rpc_run_operation_and_wait,
dbt_rpc_snapshot,
dbt_rpc_snapshot_and_wait,
dbt_rpc_snapshot_freshness,
dbt_rpc_snapshot_freshness_and_wait,
dbt_rpc_test,
dbt_rpc_test_and_wait,
)
def test_dbt_rpc_snapshot(rsps):
host = "0.0.0.0"
port = 8580
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json={"result": {"request_token": "1234-xo-xo"}},
)
result = execute_solid(
dbt_rpc_snapshot,
mode_def=ModeDefinition(name="unittest", resource_defs={"dbt_rpc": dbt_rpc_resource}),
input_values={"start_after": None},
run_config={"resources": {"dbt_rpc": {"config": {"host": host, "port": port}}},},
)
assert result.success
assert result.output_value("request_token") == "1234-xo-xo"
def test_dbt_rpc_run(rsps):
host = "0.0.0.0"
port = 8580
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json={"result": {"request_token": "1234-xo-xo"}},
)
result = execute_solid(
dbt_rpc_run,
mode_def=ModeDefinition(name="unittest", resource_defs={"dbt_rpc": dbt_rpc_resource}),
input_values={"start_after": None},
run_config={"resources": {"dbt_rpc": {"config": {"host": host, "port": port}}},},
)
assert result.success
assert result.output_value("request_token") == "1234-xo-xo"
def test_dbt_rpc_test(rsps):
host = "0.0.0.0"
port = 8580
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json={"result": {"request_token": "1234-xo-xo"}},
)
result = execute_solid(
dbt_rpc_test,
mode_def=ModeDefinition(name="unittest", resource_defs={"dbt_rpc": dbt_rpc_resource}),
input_values={"start_after": None},
run_config={"resources": {"dbt_rpc": {"config": {"host": host, "port": port}}},},
)
assert result.success
assert result.output_value("request_token") == "1234-xo-xo"
def test_dbt_rpc_run_operation(rsps):
host = "0.0.0.0"
port = 8580
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json={"result": {"request_token": "1234-xo-xo"}},
)
result = execute_solid(
dbt_rpc_run_operation,
mode_def=ModeDefinition(name="unittest", resource_defs={"dbt_rpc": dbt_rpc_resource}),
input_values={"start_after": None},
run_config={
"resources": {"dbt_rpc": {"config": {"host": host, "port": port}}},
"solids": {"dbt_rpc_run_operation": {"config": {"macro": "my_test_macro"}}},
},
)
assert result.success
assert result.output_value("request_token") == "1234-xo-xo"
def test_dbt_rpc_snapshot_freshness(rsps):
host = "0.0.0.0"
port = 8580
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json={"result": {"request_token": "1234-xo-xo"}},
)
result = execute_solid(
dbt_rpc_snapshot_freshness,
mode_def=ModeDefinition(name="unittest", resource_defs={"dbt_rpc": dbt_rpc_resource}),
input_values={"start_after": None},
run_config={"resources": {"dbt_rpc": {"config": {"host": host, "port": port}}}},
)
assert result.success
assert result.output_value("request_token") == "1234-xo-xo"
def test_dbt_rpc_run_and_wait(rsps, non_terminal_poll_result, terminal_poll_result):
host = "0.0.0.0"
port = 8580
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json={"result": {"request_token": "1234-xo-xo"}},
)
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json=non_terminal_poll_result,
)
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json=terminal_poll_result,
)
result = execute_solid(
dbt_rpc_run_and_wait,
mode_def=ModeDefinition(name="unittest", resource_defs={"dbt_rpc": dbt_rpc_resource}),
input_values={"start_after": None},
run_config={
"resources": {"dbt_rpc": {"config": {"host": host, "port": port}}},
"solids": {"dbt_rpc_run_and_wait": {"config": {"interval": 1}}},
},
)
assert result.success
assert isinstance(result.output_value("result"), DbtRpcOutput)
def test_dbt_rpc_snapshot_and_wait(rsps, non_terminal_poll_result, terminal_poll_result):
host = "0.0.0.0"
port = 8580
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json={"result": {"request_token": "1234-xo-xo"}},
)
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json=non_terminal_poll_result,
)
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json=terminal_poll_result,
)
result = execute_solid(
dbt_rpc_snapshot_and_wait,
mode_def=ModeDefinition(name="unittest", resource_defs={"dbt_rpc": dbt_rpc_resource}),
input_values={"start_after": None},
run_config={
"resources": {"dbt_rpc": {"config": {"host": host, "port": port}}},
"solids": {"dbt_rpc_snapshot_and_wait": {"config": {"interval": 1}}},
},
)
assert result.success
assert isinstance(result.output_value("result"), DbtRpcOutput)
def test_dbt_rpc_snapshot_freshness_and_wait(rsps, non_terminal_poll_result, terminal_poll_result):
host = "0.0.0.0"
port = 8580
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json={"result": {"request_token": "1234-xo-xo"}},
)
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json=non_terminal_poll_result,
)
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json=terminal_poll_result,
)
result = execute_solid(
dbt_rpc_snapshot_freshness_and_wait,
mode_def=ModeDefinition(name="unittest", resource_defs={"dbt_rpc": dbt_rpc_resource}),
input_values={"start_after": None},
run_config={
"resources": {"dbt_rpc": {"config": {"host": host, "port": port}}},
"solids": {"dbt_rpc_snapshot_freshness_and_wait": {"config": {"interval": 1}}},
},
)
assert result.success
assert isinstance(result.output_value("result"), DbtRpcOutput)
def test_dbt_rpc_run_operation_and_wait(rsps, non_terminal_poll_result, terminal_poll_result):
host = "0.0.0.0"
port = 8580
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json={"result": {"request_token": "1234-xo-xo"}},
)
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json=non_terminal_poll_result,
)
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json=terminal_poll_result,
)
result = execute_solid(
dbt_rpc_run_operation_and_wait,
mode_def=ModeDefinition(name="unittest", resource_defs={"dbt_rpc": dbt_rpc_resource}),
input_values={"start_after": None},
run_config={
"resources": {"dbt_rpc": {"config": {"host": host, "port": port}}},
"solids": {
"dbt_rpc_run_operation_and_wait": {"config": {"macro": "test_macro", "interval": 1}}
},
},
)
assert result.success
assert isinstance(result.output_value("result"), DbtRpcOutput)
def test_dbt_rpc_test_and_wait(rsps, non_terminal_poll_result, terminal_poll_result):
host = "0.0.0.0"
port = 8580
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json={"result": {"request_token": "1234-xo-xo"}},
)
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json=non_terminal_poll_result,
)
rsps.add(
method=responses.POST,
url=f"http://{host}:{port}/jsonrpc",
status=201,
json=terminal_poll_result,
)
result = execute_solid(
dbt_rpc_test_and_wait,
mode_def=ModeDefinition(name="unittest", resource_defs={"dbt_rpc": dbt_rpc_resource}),
input_values={"start_after": None},
run_config={
"resources": {"dbt_rpc": {"config": {"host": host, "port": port}}},
"solids": {"dbt_rpc_test_and_wait": {"config": {"interval": 1}}},
},
)
assert result.success
assert isinstance(result.output_value("result"), DbtRpcOutput)
| 28.409786
| 100
| 0.603445
| 1,128
| 9,290
| 4.687057
| 0.055851
| 0.076036
| 0.011349
| 0.083223
| 0.964441
| 0.946662
| 0.918101
| 0.911292
| 0.911292
| 0.911292
| 0
| 0.028967
| 0.238213
| 9,290
| 326
| 101
| 28.496933
| 0.718101
| 0
| 0
| 0.770677
| 0
| 0
| 0.206889
| 0.014209
| 0
| 0
| 0
| 0
| 0.075188
| 1
| 0.037594
| false
| 0
| 0.011278
| 0
| 0.048872
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d19d1ddf9931ccadfd0b3f3d5709ae61a800858
| 937
|
py
|
Python
|
split_rgb_segment.py
|
Prakadeeswaran05/Deeplabv3-For-Custom-Data-Colab
|
8b40da89018f1691da0debc6ef69f56ddb2ebbfa
|
[
"MIT"
] | 9
|
2020-11-14T13:31:32.000Z
|
2021-12-31T13:39:46.000Z
|
split_rgb_segment.py
|
Prakadeeswaran05/Deeplabv3-For-Custom-Data-Colab
|
8b40da89018f1691da0debc6ef69f56ddb2ebbfa
|
[
"MIT"
] | null | null | null |
split_rgb_segment.py
|
Prakadeeswaran05/Deeplabv3-For-Custom-Data-Colab
|
8b40da89018f1691da0debc6ef69f56ddb2ebbfa
|
[
"MIT"
] | 2
|
2021-12-16T07:57:58.000Z
|
2022-03-17T13:29:47.000Z
|
import glob
import shutil
import os
src_dir = "C:\\Users\\kesav\\Downloads\\pics"
dst_dir = "C:\\Users\\kesav\\Downloads\\jpeg"
for jpgfile in glob.iglob(os.path.join(src_dir, "*.jpg")):
if 'L' in jpgfile:
val=jpgfile.replace('_L','1')
os.rename(jpgfile,val)
shutil.copy(val, dst_dir)
os.remove(val)
else:
val=jpgfile[:-4]+'1'+jpgfile[-4:]
os.rename(jpgfile,val)
shutil.copy(val,dst_dir)
for jpgfile in glob.iglob(os.path.join(src_dir, "*.png")):
if 'L' in jpgfile:
val=jpgfile.replace('_L','1')
os.rename(jpgfile,val)
shutil.copy(val, dst_dir)
os.remove(val)
else:
val=jpgfile[:-4]+'1'+jpgfile[-4:]
os.rename(jpgfile,val)
shutil.copy(val,dst_dir)
| 19.520833
| 58
| 0.490928
| 116
| 937
| 3.87931
| 0.267241
| 0.133333
| 0.133333
| 0.16
| 0.888889
| 0.786667
| 0.786667
| 0.786667
| 0.786667
| 0.786667
| 0
| 0.013333
| 0.359658
| 937
| 47
| 59
| 19.93617
| 0.736667
| 0
| 0
| 0.72
| 0
| 0
| 0.091782
| 0.070438
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.12
| 0
| 0.12
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d23a3fcf26d454bbba51767c9b6354641c5ba40
| 3,468
|
py
|
Python
|
tests/redshift/test_generate_s3_to_redshift_copy_statement.py
|
coverwallet/pysoni
|
49d3a8acb101436ad0724749572be2ad9d86f3ae
|
[
"MIT"
] | 5
|
2019-07-08T15:38:06.000Z
|
2022-03-24T20:36:19.000Z
|
tests/redshift/test_generate_s3_to_redshift_copy_statement.py
|
coverwallet/pysoni
|
49d3a8acb101436ad0724749572be2ad9d86f3ae
|
[
"MIT"
] | 2
|
2019-07-07T23:26:32.000Z
|
2020-06-04T07:43:24.000Z
|
tests/redshift/test_generate_s3_to_redshift_copy_statement.py
|
coverwallet/pysoni
|
49d3a8acb101436ad0724749572be2ad9d86f3ae
|
[
"MIT"
] | 1
|
2019-05-31T09:11:22.000Z
|
2019-05-31T09:11:22.000Z
|
import pytest
def test_generate_s3_to_redshift_copy_statement(pysoni_redshift_client):
expected_statement = "COPY report.test_table (column1,column2) FROM 's3://mybucket/mykey' with credentials 'aws_access_key_id=aws_access_key_id;aws_secret_access_key=aws_access_secret_key' IGNOREHEADER 1 BLANKSASNULL EMPTYASNULL IGNOREBLANKLINES CSV DELIMITER '|' TIMEFORMAT 'auto' TRUNCATECOLUMNS"
copy_statement = pysoni_redshift_client.generate_s3_to_redshift_copy_statement(
s3_path='s3://mybucket/mykey', table_name='test_table', columns=['column1', 'column2'],
copy_options=["IGNOREHEADER 1", "BLANKSASNULL", "EMPTYASNULL", "IGNOREBLANKLINES", "CSV DELIMITER '|'",
"TIMEFORMAT 'auto'", "TRUNCATECOLUMNS"],
table_schema='report')
assert copy_statement == expected_statement
def test_generate_s3_to_redshift_copy_statement_without_columns_argument(pysoni_redshift_client):
expected_statement = "COPY report.test_table FROM 's3://mybucket/mykey' with credentials 'aws_access_key_id=aws_access_key_id;aws_secret_access_key=aws_access_secret_key' IGNOREHEADER 1 BLANKSASNULL EMPTYASNULL IGNOREBLANKLINES CSV DELIMITER '|' TIMEFORMAT 'auto' TRUNCATECOLUMNS"
copy_statement = pysoni_redshift_client.generate_s3_to_redshift_copy_statement(
s3_path='s3://mybucket/mykey', table_name='test_table',
copy_options=["IGNOREHEADER 1", "BLANKSASNULL", "EMPTYASNULL", "IGNOREBLANKLINES", "CSV DELIMITER '|'",
"TIMEFORMAT 'auto'", "TRUNCATECOLUMNS"],
table_schema='report')
assert copy_statement == expected_statement
def test_generate_s3_to_redshift_copy_statement_without_schema_argument(pysoni_redshift_client):
expected_statement = "COPY public.test_table (column1,column2) FROM 's3://mybucket/mykey' with credentials 'aws_access_key_id=aws_access_key_id;aws_secret_access_key=aws_access_secret_key' IGNOREHEADER 1 BLANKSASNULL EMPTYASNULL IGNOREBLANKLINES CSV DELIMITER '|' TIMEFORMAT 'auto' TRUNCATECOLUMNS"
copy_statement = pysoni_redshift_client.generate_s3_to_redshift_copy_statement(
s3_path='s3://mybucket/mykey', table_name='test_table', columns=['column1', 'column2'],
copy_options=["IGNOREHEADER 1", "BLANKSASNULL", "EMPTYASNULL", "IGNOREBLANKLINES", "CSV DELIMITER '|'",
"TIMEFORMAT 'auto'", "TRUNCATECOLUMNS"])
assert copy_statement == expected_statement
def test_generate_s3_to_redshift_copy_statement_without_copy_options_argument(pysoni_redshift_client):
expected_statement = "COPY report.test_table (column1,column2) FROM 's3://mybucket/mykey' with credentials 'aws_access_key_id=aws_access_key_id;aws_secret_access_key=aws_access_secret_key' "
copy_statement = pysoni_redshift_client.generate_s3_to_redshift_copy_statement(
s3_path='s3://mybucket/mykey', table_name='test_table', columns=['column1', 'column2'],
table_schema='report')
assert copy_statement == expected_statement
def test_generate_s3_to_redshift_copy_statement_without_default_arguments(pysoni_redshift_client):
expected_statement = "COPY public.test_table FROM 's3://mybucket/mykey' with credentials 'aws_access_key_id=aws_access_key_id;aws_secret_access_key=aws_access_secret_key' "
copy_statement = pysoni_redshift_client.generate_s3_to_redshift_copy_statement(
s3_path='s3://mybucket/mykey', table_name='test_table')
assert copy_statement == expected_statement
| 56.852459
| 302
| 0.784314
| 425
| 3,468
| 5.943529
| 0.112941
| 0.10293
| 0.047506
| 0.079177
| 0.979414
| 0.965162
| 0.965162
| 0.961995
| 0.94616
| 0.908947
| 0
| 0.014122
| 0.121972
| 3,468
| 60
| 303
| 57.8
| 0.815435
| 0
| 0
| 0.6
| 1
| 0.142857
| 0.471742
| 0.147059
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.142857
| false
| 0
| 0.028571
| 0
| 0.171429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d53dda3dec8eee56512bbe1653ea03a2be0996b
| 11,712
|
py
|
Python
|
pycon_project/apps/symposion/schedule/management/commands/load_sessions.py
|
pythonchelle/pycon
|
a22a7001ce32496f8185ed2612e26ef9c0551ac5
|
[
"BSD-3-Clause"
] | 1
|
2015-06-30T14:11:31.000Z
|
2015-06-30T14:11:31.000Z
|
pycon_project/apps/symposion/schedule/management/commands/load_sessions.py
|
pythonchelle/pycon
|
a22a7001ce32496f8185ed2612e26ef9c0551ac5
|
[
"BSD-3-Clause"
] | null | null | null |
pycon_project/apps/symposion/schedule/management/commands/load_sessions.py
|
pythonchelle/pycon
|
a22a7001ce32496f8185ed2612e26ef9c0551ac5
|
[
"BSD-3-Clause"
] | null | null | null |
from datetime import datetime
from django.core.management.base import BaseCommand
from symposion.schedule.models import Track, Session, Slot
friday_plenaries = [
[
{
"start": datetime(2012, 3, 9, 7, 0),
"end": datetime(2012, 3, 9, 8, 0),
},
{
"start": datetime(2012, 3, 9, 8, 0),
"end": datetime(2012, 3, 9, 9, 0),
},
],
[
{
"start": datetime(2012, 3, 9, 9, 0),
"end": datetime(2012, 3, 9, 9, 10),
},
{
"start": datetime(2012, 3, 9, 9, 10),
"end": datetime(2012, 3, 9, 9, 40),
},
{
"start": datetime(2012, 3, 9, 9, 40),
"end": datetime(2012, 3, 9, 10, 5),
},
],
[
{
"start": datetime(2012, 3, 9, 10, 5),
"end": datetime(2012, 3, 9, 10, 25),
},
],
[
{
"start": datetime(2012, 3, 9, 17, 30),
"end": datetime(2012, 3, 9, 18, 0),
},
],
]
friday_slots_type_1 = [
[
{
"start": datetime(2012, 3, 9, 10, 25),
"end": datetime(2012, 3, 9, 11, 5),
},
{
"start": datetime(2012, 3, 9, 11, 5),
"end": datetime(2012, 3, 9, 11, 45),
},
{
"start": datetime(2012, 3, 9, 11, 45),
"end": datetime(2012, 3, 9, 12, 30),
},
],
[
{
"title": "Lunch",
"start": datetime(2012, 3, 9, 12, 30),
"end": datetime(2012, 3, 9, 13, 35),
},
],
[
{
"start": datetime(2012, 3, 9, 13, 35),
"end": datetime(2012, 3, 9, 14, 15),
},
{
"start": datetime(2012, 3, 9, 14, 15),
"end": datetime(2012, 3, 9, 14, 55),
},
{
"start": datetime(2012, 3, 9, 14, 55),
"end": datetime(2012, 3, 9, 15, 40),
},
],
[
{
"title": "Afternoon Break with Snacks in Expo Hall",
"start": datetime(2012, 3, 9, 15, 40),
"end": datetime(2012, 3, 9, 16, 15),
},
],
[
{
"start": datetime(2012, 3, 9, 16, 15),
"end": datetime(2012, 3, 9, 16, 55),
},
{
"start": datetime(2012, 3, 9, 16, 55),
"end": datetime(2012, 3, 9, 17, 30),
},
]
]
friday_slots_type_2 = [
[
{
"start": datetime(2012, 3, 9, 10, 25),
"end": datetime(2012, 3, 9, 11, 5),
},
{
"start": datetime(2012, 3, 9, 11, 5),
"end": datetime(2012, 3, 9, 11, 45),
},
{
"start": datetime(2012, 3, 9, 11, 45),
"end": datetime(2012, 3, 9, 12, 15),
},
],
[
{
"title": "Lunch",
"start": datetime(2012, 3, 9, 12, 15),
"end": datetime(2012, 3, 9, 13, 20),
},
],
[
{
"start": datetime(2012, 3, 9, 13, 20),
"end": datetime(2012, 3, 9, 14, 15),
},
{
"start": datetime(2012, 3, 9, 14, 15),
"end": datetime(2012, 3, 9, 14, 55),
},
{
"start": datetime(2012, 3, 9, 14, 55),
"end": datetime(2012, 3, 9, 15, 25),
},
],
[
{
"title": "Afternoon Break with Snacks in Expo Hall",
"start": datetime(2012, 3, 9, 15, 25),
"end": datetime(2012, 3, 9, 16, 0),
},
],
[
{
"start": datetime(2012, 3, 9, 16, 0),
"end": datetime(2012, 3, 9, 16, 55),
},
{
"start": datetime(2012, 3, 9, 16, 55),
"end": datetime(2012, 3, 9, 17, 30),
},
]
]
saturday_plenaries = [
[
{
"start": datetime(2012, 3, 10, 7, 0),
"end": datetime(2012, 3, 10, 8, 0),
},
{
"start": datetime(2012, 3, 10, 8, 0),
"end": datetime(2012, 3, 10, 8, 30),
},
],
[
{
"start": datetime(2012, 3, 10, 8, 30),
"end": datetime(2012, 3, 10, 9, 0),
},
{
"start": datetime(2012, 3, 10, 9, 0),
"end": datetime(2012, 3, 10, 9, 5),
},
{
"start": datetime(2012, 3, 10, 9, 5),
"end": datetime(2012, 3, 10, 9, 20),
},
{
"start": datetime(2012, 3, 10, 9, 20),
"end": datetime(2012, 3, 10, 9, 35),
},
{
"start": datetime(2012, 3, 10, 9, 35),
"end": datetime(2012, 3, 10, 10, 5),
},
],
[
{
"start": datetime(2012, 3, 10, 10, 5),
"end": datetime(2012, 3, 10, 10, 25),
},
],
[
{
"start": datetime(2012, 3, 10, 17, 30),
"end": datetime(2012, 3, 10, 18, 0),
},
],
]
saturday_slots_type_1 = [
[
{
"start": datetime(2012, 3, 10, 10, 25),
"end": datetime(2012, 3, 10, 11, 5),
},
{
"start": datetime(2012, 3, 10, 11, 5),
"end": datetime(2012, 3, 10, 11, 45),
},
{
"start": datetime(2012, 3, 10, 11, 45),
"end": datetime(2012, 3, 10, 12, 30),
},
],
[
{
"title": "Lunch",
"start": datetime(2012, 3, 10, 12, 30),
"end": datetime(2012, 3, 10, 13, 35),
},
],
[
{
"start": datetime(2012, 3, 10, 13, 35),
"end": datetime(2012, 3, 10, 14, 15),
},
{
"start": datetime(2012, 3, 10, 14, 15),
"end": datetime(2012, 3, 10, 14, 55),
},
{
"start": datetime(2012, 3, 10, 14, 55),
"end": datetime(2012, 3, 10, 15, 40),
},
],
[
{
"title": "Afternoon Break with Snacks in Expo Hall",
"start": datetime(2012, 3, 10, 15, 40),
"end": datetime(2012, 3, 10, 16, 15),
},
],
[
{
"start": datetime(2012, 3, 10, 16, 15),
"end": datetime(2012, 3, 10, 16, 55),
},
{
"start": datetime(2012, 3, 10, 16, 55),
"end": datetime(2012, 3, 10, 17, 30),
},
]
]
saturday_slots_type_2 = [
[
{
"start": datetime(2012, 3, 10, 10, 25),
"end": datetime(2012, 3, 10, 11, 5),
},
{
"start": datetime(2012, 3, 10, 11, 5),
"end": datetime(2012, 3, 10, 11, 45),
},
{
"start": datetime(2012, 3, 10, 11, 45),
"end": datetime(2012, 3, 10, 12, 15),
},
],
[
{
"title": "Lunch",
"start": datetime(2012, 3, 10, 12, 15),
"end": datetime(2012, 3, 10, 13, 20),
},
],
[
{
"start": datetime(2012, 3, 10, 13, 20),
"end": datetime(2012, 3, 10, 14, 15),
},
{
"start": datetime(2012, 3, 10, 14, 15),
"end": datetime(2012, 3, 10, 14, 55),
},
{
"start": datetime(2012, 3, 10, 14, 55),
"end": datetime(2012, 3, 10, 15, 25),
},
],
[
{
"title": "Afternoon Break with Snacks in Expo Hall",
"start": datetime(2012, 3, 10, 15, 25),
"end": datetime(2012, 3, 10, 16, 0),
},
],
[
{
"start": datetime(2012, 3, 10, 16, 0),
"end": datetime(2012, 3, 10, 16, 55),
},
{
"start": datetime(2012, 3, 10, 16, 55),
"end": datetime(2012, 3, 10, 17, 30),
},
]
]
sunday_plenaries = [
[
{
"start": datetime(2012, 3, 11, 7, 0),
"end": datetime(2012, 3, 11, 8, 30),
},
],
[
{
"start": datetime(2012, 3, 11, 8, 30),
"end": datetime(2012, 3, 11, 9, 0),
},
{
"start": datetime(2012, 3, 11, 9, 0),
"end": datetime(2012, 3, 11, 9, 5),
},
{
"start": datetime(2012, 3, 11, 9, 5),
"end": datetime(2012, 3, 11, 9, 20),
},
{
"start": datetime(2012, 3, 11, 9, 20),
"end": datetime(2012, 3, 11, 9, 35),
},
{
"start": datetime(2012, 3, 11, 9, 35),
"end": datetime(2012, 3, 11, 10, 5),
},
],
[
{
"title": "Break with Snacks in Poster Area",
"start": datetime(2012, 3, 11, 10, 5),
"end": datetime(2012, 3, 11, 10, 25),
},
],
[
{
"start": datetime(2012, 3, 11, 10, 25),
"end": datetime(2012, 3, 11, 11, 55),
},
],
[
{
"title": "Lunch",
"start": datetime(2012, 3, 11, 12, 25),
"end": datetime(2012, 3, 11, 13, 15),
},
],
[
{
"start": datetime(2012, 3, 11, 14, 35),
"end": datetime(2012, 3, 11, 15, 35),
},
{
"start": datetime(2012, 3, 11, 15, 35),
"end": datetime(2012, 3, 11, 15, 55),
},
],
]
sunday_type_1 = [
[
{
"start": datetime(2012, 3, 11, 11, 55),
"end": datetime(2012, 3, 11, 12, 25),
},
],
[
{
"start": datetime(2012, 3, 11, 13, 15),
"end": datetime(2012, 3, 11, 13, 55),
},
{
"start": datetime(2012, 3, 11, 13, 55),
"end": datetime(2012, 3, 11, 14, 35),
},
],
]
tracks = [
{"Track I": [friday_slots_type_1, saturday_slots_type_1, sunday_type_1]},
{"Track II": [friday_slots_type_2, saturday_slots_type_2, sunday_type_1]},
{"Track III": [friday_slots_type_1, saturday_slots_type_1, sunday_type_1]},
{"Track IV": [friday_slots_type_2, saturday_slots_type_2, sunday_type_1]},
{"Track V": [friday_slots_type_1, saturday_slots_type_1, sunday_type_1]}
]
class Command(BaseCommand):
def handle(self, *args, **options):
for track_data in tracks:
for track_name, data in track_data.items():
track = Track.objects.create(name=track_name)
print "Created Track: %s" % track_name
for day in data:
for session_data in day:
session = Session.objects.create(track=track)
print "\tCreated session for %s" % track_name
for slot_data in session_data:
slot = Slot.objects.create(
track=track,
session=session,
start=slot_data.get("start"),
end=slot_data.get("end"),
title=slot_data.get("title")
)
print "\t\tCreated slot: %s" % slot
print "Plenaries"
for data in [friday_plenaries, saturday_plenaries, sunday_plenaries]:
for session_data in data:
for slot_data in session_data:
slot = Slot.objects.create(
track=None,
session=None,
start=slot_data.get("start"),
end=slot_data.get("end"),
title=slot_data.get("title")
)
print "\tCreated slot: %s" % slot
| 26.618182
| 79
| 0.390454
| 1,280
| 11,712
| 3.511719
| 0.066406
| 0.373749
| 0.404894
| 0.280311
| 0.85406
| 0.833815
| 0.499889
| 0.463849
| 0.434038
| 0.434038
| 0
| 0.21917
| 0.440574
| 11,712
| 439
| 80
| 26.678815
| 0.46688
| 0
| 0
| 0.236277
| 0
| 0
| 0.083675
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.00716
| null | null | 0.011933
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3d550d1d1a8fca49e4a87615ee85b048df1fb9b4
| 12,535
|
py
|
Python
|
api/views/loan_products/interest_config_views.py
|
prateekmohanty63/microfinance
|
39839c0d378be4ccc40a9dde5dc38a10773a38a1
|
[
"MIT"
] | 1
|
2022-02-25T18:39:44.000Z
|
2022-02-25T18:39:44.000Z
|
api/views/loan_products/interest_config_views.py
|
prateekmohanty63/microfinance
|
39839c0d378be4ccc40a9dde5dc38a10773a38a1
|
[
"MIT"
] | null | null | null |
api/views/loan_products/interest_config_views.py
|
prateekmohanty63/microfinance
|
39839c0d378be4ccc40a9dde5dc38a10773a38a1
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from rest_framework.response import Response
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from loans.models import Product, ProductConfig, InterestConfig
from api.serializers import LoanInterestConfigSerializer as InterestConfigSerializer
from rest_framework import status
from api.views.permissions_check import *
from core.utils import randomstr
@api_view(['POST'])
@permission_classes([IsAuthenticated])
def createInterestConfig(request, product_id, product_config_id):
user = request.user
data = request.data
# Check to make sure the product_config is valid
product_config = ProductConfig.objects.filter(product_config_id=product_config_id, status='active').first()
if not product_config:
message = {'detail': 'Product configuration does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Check if the product is valid
product = Product.objects.filter(product_id=product_id, status='active').first()
if not product:
message = {'detail': 'Product does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if product_config.product != product:
message = {'detail': 'Product id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Verify the user has permission to be looking at and editing product details (must be part of the org and admin)
organization_id = product.organization.organization_id
user_organization = check_organization_permissions(user=user, organization_id=organization_id, roles=['admin'])
if not user_organization['organization']:
message = {'detail': user_organization['message']}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if product.organization != user_organization['organization']:
message = {'detail': 'Organization id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
try:
# Create the product configuration
interest_config = InterestConfig.objects.create(
interest_config_id=randomstr(),
product_config=product_config,
day=data['day'],
structure=data['structure'],
created_user=user,
)
if 'amount' in data:
interest_config.amount=data['amount']
interest_config.save()
if 'label' in data:
interest_config.label=data['label']
interest_config.save()
serializer = InterestConfigSerializer(interest_config, many=False)
return Response(serializer.data)
except Exception as e:
print(e)
message = {'detail': 'There was an error creating this interest configuration'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET'])
@permission_classes([IsAuthenticated])
def getInterestConfigs(request, product_id, product_config_id):
user = request.user
data = request.data
# Check to make sure the product_config is valid
product_config = ProductConfig.objects.filter(product_config_id=product_config_id, status='active').first()
if not product_config:
message = {'detail': 'Product configuration does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Check if the product is valid
product = Product.objects.filter(product_id=product_id, status='active').first()
if not product:
message = {'detail': 'Product does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if product_config.product != product:
message = {'detail': 'Product id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Verify the user has permission to be looking at and editing product details (must be part of the org and admin)
organization_id = product.organization.organization_id
user_organization = check_organization_permissions(user=user, organization_id=organization_id, roles=['admin'])
if not user_organization['organization']:
message = {'detail': user_organization['message']}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if product.organization != user_organization['organization']:
message = {'detail': 'Organization id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
organization = user_organization['organization']
interest_configs = InterestConfig.objects.filter(
product_config=product_config,
status='active',
).order_by('day')
page = request.query_params.get('page')
paginator = Paginator(interest_configs, 5)
try:
interest_configs = paginator.page(page)
except PageNotAnInteger:
interest_configs = paginator.page(1)
except EmptyPage:
interest_configs = paginator.page(paginator.num_pages)
if page == None:
page = 1
page = int(page)
print('Page:', page)
serializer = InterestConfigSerializer(interest_configs, many=True)
return Response({'interest_configs': serializer.data, 'page': page, 'pages': paginator.num_pages})
@api_view(['GET'])
@permission_classes([IsAuthenticated])
def getInterestConfig(request, product_id, product_config_id, interest_config_id):
user = request.user
# Check to make sure the interest_config is valid
interest_config = InterestConfig.objects.filter(interest_config_id=interest_config_id, status='active').first()
if not interest_config:
message = {'detail': 'Interest configuration does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Check to make sure the product_config is valid
product_config = ProductConfig.objects.filter(product_config_id=product_config_id, status='active').first()
if not product_config:
message = {'detail': 'Product configuration does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if interest_config.product_config != product_config:
message = {'detail': 'Product config id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Check if the product is valid
product = Product.objects.filter(product_id=product_id, status='active').first()
if not product:
message = {'detail': 'Product does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if product_config.product != product:
message = {'detail': 'Product id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Verify the user has permission to be looking at and editing product details (must be part of the org and admin)
organization_id = product.organization.organization_id
user_organization = check_organization_permissions(user=user, organization_id=organization_id, roles=['admin'])
if not user_organization['organization']:
message = {'detail': user_organization['message']}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if product.organization != user_organization['organization']:
message = {'detail': 'Organization id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
serializer = InterestConfigSerializer(interest_config, many=False)
return Response(serializer.data)
@api_view(['PUT'])
@permission_classes([IsAuthenticated])
def updateInterestConfig(request, product_id, product_config_id, interest_config_id):
data = request.data
user = request.user
# Check to make sure the interest_config is valid
interest_config = InterestConfig.objects.filter(interest_config_id=interest_config_id, status='active').first()
if not interest_config:
message = {'detail': 'Interest configuration does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Check to make sure the product_config is valid
product_config = ProductConfig.objects.filter(product_config_id=product_config_id, status='active').first()
if not product_config:
message = {'detail': 'Product configuration does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if interest_config.product_config != product_config:
message = {'detail': 'Product config id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Check if the product is valid
product = Product.objects.filter(product_id=product_id, status='active').first()
if not product:
message = {'detail': 'Product does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if product_config.product != product:
message = {'detail': 'Product id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Verify the user has permission to be looking at and editing product details (must be part of the org and admin)
organization_id = product.organization.organization_id
user_organization = check_organization_permissions(user=user, organization_id=organization_id, roles=['admin'])
if not user_organization['organization']:
message = {'detail': user_organization['message']}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if product.organization != user_organization['organization']:
message = {'detail': 'Organization id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if 'label' in data:
interest_config.label=data['label']
if 'day' in data:
interest_config.day=data['day']
if 'structure' in data:
interest_config.structure=data['structure']
if 'amount' in data:
interest_config.amount=data['amount']
interest_config.save()
serializer = InterestConfigSerializer(interest_config, many=False)
return Response(serializer.data)
@api_view(['PUT'])
@permission_classes([IsAuthenticated])
def archiveInterestConfig(request, product_id, product_config_id, interest_config_id):
user = request.user
# Check to make sure the interest_config is valid
interest_config = InterestConfig.objects.filter(interest_config_id=interest_config_id, status='active').first()
if not interest_config:
message = {'detail': 'Interest configuration does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Check to make sure the product_config is valid
product_config = ProductConfig.objects.filter(product_config_id=product_config_id, status='active').first()
if not product_config:
message = {'detail': 'Product configuration does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if interest_config.product_config != product_config:
message = {'detail': 'Product config id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Check if the product is valid
product = Product.objects.filter(product_id=product_id, status='active').first()
if not product:
message = {'detail': 'Product does not exist'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if product_config.product != product:
message = {'detail': 'Product id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
# Verify the user has permission to be looking at and editing product details (must be part of the org and admin)
organization_id = product.organization.organization_id
user_organization = check_organization_permissions(user=user, organization_id=organization_id, roles=['admin'])
if not user_organization['organization']:
message = {'detail': user_organization['message']}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
if product.organization != user_organization['organization']:
message = {'detail': 'Organization id is not valid'}
return Response(message, status=status.HTTP_400_BAD_REQUEST)
interest_config.status = 'archived'
interest_config.save()
message = {'detail': 'Interest configuration was archived'}
return Response(message, status=200)
| 45.748175
| 117
| 0.72485
| 1,512
| 12,535
| 5.812169
| 0.080688
| 0.071006
| 0.078858
| 0.101388
| 0.817478
| 0.817478
| 0.817478
| 0.807237
| 0.807237
| 0.787096
| 0
| 0.009966
| 0.183486
| 12,535
| 273
| 118
| 45.915751
| 0.848657
| 0.08943
| 0
| 0.760976
| 0
| 0
| 0.127348
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02439
| false
| 0
| 0.04878
| 0
| 0.253659
| 0.009756
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d59db42d9018ab444e1309d413bd98376a0faae
| 87,677
|
py
|
Python
|
test/unit/test_iam_identity_v1.py
|
zachsirotto/platform-services-python-sdk
|
32a080b7a93567f9528867a31bd0b47423297bab
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_iam_identity_v1.py
|
zachsirotto/platform-services-python-sdk
|
32a080b7a93567f9528867a31bd0b47423297bab
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_iam_identity_v1.py
|
zachsirotto/platform-services-python-sdk
|
32a080b7a93567f9528867a31bd0b47423297bab
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# (C) Copyright IBM Corp. 2020.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit Tests for IamIdentityV1
"""
from ibm_cloud_sdk_core.authenticators.no_auth_authenticator import NoAuthAuthenticator
import inspect
import json
import pytest
import re
import requests
import responses
import urllib
from ibm_platform_services.iam_identity_v1 import *
service = IamIdentityV1(
authenticator=NoAuthAuthenticator()
)
base_url = 'https://iam.cloud.ibm.com'
service.set_service_url(base_url)
##############################################################################
# Start of Service: IdentityOperations
##############################################################################
# region
class TestListApiKeys():
"""
Test Class for list_api_keys
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_list_api_keys_all_params(self):
"""
list_api_keys()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "offset": 6, "limit": 5, "first": "first", "previous": "previous", "next": "next", "apikeys": [{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
account_id = 'testString'
iam_id = 'testString'
pagesize = 38
pagetoken = 'testString'
scope = 'entity'
type = 'user'
sort = 'testString'
order = 'asc'
include_history = True
# Invoke method
response = service.list_api_keys(
account_id=account_id,
iam_id=iam_id,
pagesize=pagesize,
pagetoken=pagetoken,
scope=scope,
type=type,
sort=sort,
order=order,
include_history=include_history,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'account_id={}'.format(account_id) in query_string
assert 'iam_id={}'.format(iam_id) in query_string
assert 'pagesize={}'.format(pagesize) in query_string
assert 'pagetoken={}'.format(pagetoken) in query_string
assert 'scope={}'.format(scope) in query_string
assert 'type={}'.format(type) in query_string
assert 'sort={}'.format(sort) in query_string
assert 'order={}'.format(order) in query_string
assert 'include_history={}'.format('true' if include_history else 'false') in query_string
@responses.activate
def test_list_api_keys_required_params(self):
"""
test_list_api_keys_required_params()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "offset": 6, "limit": 5, "first": "first", "previous": "previous", "next": "next", "apikeys": [{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.list_api_keys()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
class TestCreateApiKey():
"""
Test Class for create_api_key
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_create_api_key_all_params(self):
"""
create_api_key()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
name = 'testString'
iam_id = 'testString'
description = 'testString'
account_id = 'testString'
apikey = 'testString'
store_value = True
entity_lock = 'testString'
# Invoke method
response = service.create_api_key(
name,
iam_id,
description=description,
account_id=account_id,
apikey=apikey,
store_value=store_value,
entity_lock=entity_lock,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['name'] == 'testString'
assert req_body['iam_id'] == 'testString'
assert req_body['description'] == 'testString'
assert req_body['account_id'] == 'testString'
assert req_body['apikey'] == 'testString'
assert req_body['store_value'] == True
@responses.activate
def test_create_api_key_required_params(self):
"""
test_create_api_key_required_params()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
name = 'testString'
iam_id = 'testString'
description = 'testString'
account_id = 'testString'
apikey = 'testString'
store_value = True
# Invoke method
response = service.create_api_key(
name,
iam_id,
description=description,
account_id=account_id,
apikey=apikey,
store_value=store_value,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['name'] == 'testString'
assert req_body['iam_id'] == 'testString'
assert req_body['description'] == 'testString'
assert req_body['account_id'] == 'testString'
assert req_body['apikey'] == 'testString'
assert req_body['store_value'] == True
@responses.activate
def test_create_api_key_value_error(self):
"""
test_create_api_key_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
name = 'testString'
iam_id = 'testString'
description = 'testString'
account_id = 'testString'
apikey = 'testString'
store_value = True
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"name": name,
"iam_id": iam_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.create_api_key(**req_copy)
class TestGetApiKeysDetails():
"""
Test Class for get_api_keys_details
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_api_keys_details_all_params(self):
"""
get_api_keys_details()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/details')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
iam_api_key = 'testString'
include_history = True
# Invoke method
response = service.get_api_keys_details(
iam_api_key=iam_api_key,
include_history=include_history,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'include_history={}'.format('true' if include_history else 'false') in query_string
@responses.activate
def test_get_api_keys_details_required_params(self):
"""
test_get_api_keys_details_required_params()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/details')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.get_api_keys_details()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
class TestGetApiKey():
"""
Test Class for get_api_key
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_api_key_all_params(self):
"""
get_api_key()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/testString')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
id = 'testString'
include_history = True
# Invoke method
response = service.get_api_key(
id,
include_history=include_history,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'include_history={}'.format('true' if include_history else 'false') in query_string
@responses.activate
def test_get_api_key_required_params(self):
"""
test_get_api_key_required_params()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/testString')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
id = 'testString'
# Invoke method
response = service.get_api_key(
id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_api_key_value_error(self):
"""
test_get_api_key_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/testString')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"id": id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.get_api_key(**req_copy)
class TestUpdateApiKey():
"""
Test Class for update_api_key
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_update_api_key_all_params(self):
"""
update_api_key()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/testString')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
id = 'testString'
if_match = 'testString'
name = 'testString'
description = 'testString'
# Invoke method
response = service.update_api_key(
id,
if_match,
name=name,
description=description,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['name'] == 'testString'
assert req_body['description'] == 'testString'
@responses.activate
def test_update_api_key_value_error(self):
"""
test_update_api_key_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/testString')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
id = 'testString'
if_match = 'testString'
name = 'testString'
description = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"id": id,
"if_match": if_match,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.update_api_key(**req_copy)
class TestDeleteApiKey():
"""
Test Class for delete_api_key
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_delete_api_key_all_params(self):
"""
delete_api_key()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/testString')
responses.add(responses.DELETE,
url,
status=204)
# Set up parameter values
id = 'testString'
# Invoke method
response = service.delete_api_key(
id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 204
@responses.activate
def test_delete_api_key_value_error(self):
"""
test_delete_api_key_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/testString')
responses.add(responses.DELETE,
url,
status=204)
# Set up parameter values
id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"id": id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.delete_api_key(**req_copy)
class TestLockApiKey():
"""
Test Class for lock_api_key
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_lock_api_key_all_params(self):
"""
lock_api_key()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/testString/lock')
responses.add(responses.POST,
url,
status=204)
# Set up parameter values
id = 'testString'
# Invoke method
response = service.lock_api_key(
id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 204
@responses.activate
def test_lock_api_key_value_error(self):
"""
test_lock_api_key_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/testString/lock')
responses.add(responses.POST,
url,
status=204)
# Set up parameter values
id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"id": id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.lock_api_key(**req_copy)
class TestUnlockApiKey():
"""
Test Class for unlock_api_key
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_unlock_api_key_all_params(self):
"""
unlock_api_key()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/testString/lock')
responses.add(responses.DELETE,
url,
status=204)
# Set up parameter values
id = 'testString'
# Invoke method
response = service.unlock_api_key(
id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 204
@responses.activate
def test_unlock_api_key_value_error(self):
"""
test_unlock_api_key_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/apikeys/testString/lock')
responses.add(responses.DELETE,
url,
status=204)
# Set up parameter values
id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"id": id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.unlock_api_key(**req_copy)
class TestListServiceIds():
"""
Test Class for list_service_ids
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_list_service_ids_all_params(self):
"""
list_service_ids()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "offset": 6, "limit": 5, "first": "first", "previous": "previous", "next": "next", "serviceids": [{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "iam_id": "iam_id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "modified_at": "modified_at", "account_id": "account_id", "name": "name", "description": "description", "unique_instance_crns": ["unique_instance_crns"], "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}], "apikey": {"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
account_id = 'testString'
name = 'testString'
pagesize = 38
pagetoken = 'testString'
sort = 'testString'
order = 'asc'
include_history = True
# Invoke method
response = service.list_service_ids(
account_id=account_id,
name=name,
pagesize=pagesize,
pagetoken=pagetoken,
sort=sort,
order=order,
include_history=include_history,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'account_id={}'.format(account_id) in query_string
assert 'name={}'.format(name) in query_string
assert 'pagesize={}'.format(pagesize) in query_string
assert 'pagetoken={}'.format(pagetoken) in query_string
assert 'sort={}'.format(sort) in query_string
assert 'order={}'.format(order) in query_string
assert 'include_history={}'.format('true' if include_history else 'false') in query_string
@responses.activate
def test_list_service_ids_required_params(self):
"""
test_list_service_ids_required_params()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "offset": 6, "limit": 5, "first": "first", "previous": "previous", "next": "next", "serviceids": [{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "iam_id": "iam_id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "modified_at": "modified_at", "account_id": "account_id", "name": "name", "description": "description", "unique_instance_crns": ["unique_instance_crns"], "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}], "apikey": {"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.list_service_ids()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
class TestCreateServiceId():
"""
Test Class for create_service_id
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_create_service_id_all_params(self):
"""
create_service_id()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "iam_id": "iam_id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "modified_at": "modified_at", "account_id": "account_id", "name": "name", "description": "description", "unique_instance_crns": ["unique_instance_crns"], "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}], "apikey": {"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Construct a dict representation of a CreateApiKeyRequest model
create_api_key_request_model = {}
create_api_key_request_model['name'] = 'testString'
create_api_key_request_model['description'] = 'testString'
create_api_key_request_model['iam_id'] = 'testString'
create_api_key_request_model['account_id'] = 'testString'
create_api_key_request_model['apikey'] = 'testString'
create_api_key_request_model['store_value'] = True
# Set up parameter values
account_id = 'testString'
name = 'testString'
description = 'testString'
unique_instance_crns = ['testString']
apikey = create_api_key_request_model
entity_lock = 'testString'
# Invoke method
response = service.create_service_id(
account_id,
name,
description=description,
unique_instance_crns=unique_instance_crns,
apikey=apikey,
entity_lock=entity_lock,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['account_id'] == 'testString'
assert req_body['name'] == 'testString'
assert req_body['description'] == 'testString'
assert req_body['unique_instance_crns'] == ['testString']
assert req_body['apikey'] == create_api_key_request_model
@responses.activate
def test_create_service_id_required_params(self):
"""
test_create_service_id_required_params()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "iam_id": "iam_id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "modified_at": "modified_at", "account_id": "account_id", "name": "name", "description": "description", "unique_instance_crns": ["unique_instance_crns"], "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}], "apikey": {"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Construct a dict representation of a CreateApiKeyRequest model
create_api_key_request_model = {}
create_api_key_request_model['name'] = 'testString'
create_api_key_request_model['description'] = 'testString'
create_api_key_request_model['iam_id'] = 'testString'
create_api_key_request_model['account_id'] = 'testString'
create_api_key_request_model['apikey'] = 'testString'
create_api_key_request_model['store_value'] = True
# Set up parameter values
account_id = 'testString'
name = 'testString'
description = 'testString'
unique_instance_crns = ['testString']
apikey = create_api_key_request_model
# Invoke method
response = service.create_service_id(
account_id,
name,
description=description,
unique_instance_crns=unique_instance_crns,
apikey=apikey,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['account_id'] == 'testString'
assert req_body['name'] == 'testString'
assert req_body['description'] == 'testString'
assert req_body['unique_instance_crns'] == ['testString']
assert req_body['apikey'] == create_api_key_request_model
@responses.activate
def test_create_service_id_value_error(self):
"""
test_create_service_id_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "iam_id": "iam_id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "modified_at": "modified_at", "account_id": "account_id", "name": "name", "description": "description", "unique_instance_crns": ["unique_instance_crns"], "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}], "apikey": {"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Construct a dict representation of a CreateApiKeyRequest model
create_api_key_request_model = {}
create_api_key_request_model['name'] = 'testString'
create_api_key_request_model['description'] = 'testString'
create_api_key_request_model['iam_id'] = 'testString'
create_api_key_request_model['account_id'] = 'testString'
create_api_key_request_model['apikey'] = 'testString'
create_api_key_request_model['store_value'] = True
# Set up parameter values
account_id = 'testString'
name = 'testString'
description = 'testString'
unique_instance_crns = ['testString']
apikey = create_api_key_request_model
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"account_id": account_id,
"name": name,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.create_service_id(**req_copy)
class TestGetServiceId():
"""
Test Class for get_service_id
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_service_id_all_params(self):
"""
get_service_id()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/testString')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "iam_id": "iam_id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "modified_at": "modified_at", "account_id": "account_id", "name": "name", "description": "description", "unique_instance_crns": ["unique_instance_crns"], "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}], "apikey": {"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
id = 'testString'
include_history = True
# Invoke method
response = service.get_service_id(
id,
include_history=include_history,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'include_history={}'.format('true' if include_history else 'false') in query_string
@responses.activate
def test_get_service_id_required_params(self):
"""
test_get_service_id_required_params()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/testString')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "iam_id": "iam_id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "modified_at": "modified_at", "account_id": "account_id", "name": "name", "description": "description", "unique_instance_crns": ["unique_instance_crns"], "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}], "apikey": {"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
id = 'testString'
# Invoke method
response = service.get_service_id(
id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_service_id_value_error(self):
"""
test_get_service_id_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/testString')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "iam_id": "iam_id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "modified_at": "modified_at", "account_id": "account_id", "name": "name", "description": "description", "unique_instance_crns": ["unique_instance_crns"], "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}], "apikey": {"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"id": id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.get_service_id(**req_copy)
class TestUpdateServiceId():
"""
Test Class for update_service_id
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_update_service_id_all_params(self):
"""
update_service_id()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/testString')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "iam_id": "iam_id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "modified_at": "modified_at", "account_id": "account_id", "name": "name", "description": "description", "unique_instance_crns": ["unique_instance_crns"], "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}], "apikey": {"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
id = 'testString'
if_match = 'testString'
name = 'testString'
description = 'testString'
unique_instance_crns = ['testString']
# Invoke method
response = service.update_service_id(
id,
if_match,
name=name,
description=description,
unique_instance_crns=unique_instance_crns,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['name'] == 'testString'
assert req_body['description'] == 'testString'
assert req_body['unique_instance_crns'] == ['testString']
@responses.activate
def test_update_service_id_value_error(self):
"""
test_update_service_id_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/testString')
mock_response = '{"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "iam_id": "iam_id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "modified_at": "modified_at", "account_id": "account_id", "name": "name", "description": "description", "unique_instance_crns": ["unique_instance_crns"], "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}], "apikey": {"context": {"transaction_id": "transaction_id", "operation": "operation", "user_agent": "user_agent", "url": "url", "instance_id": "instance_id", "thread_id": "thread_id", "host": "host", "start_time": "start_time", "end_time": "end_time", "elapsed_time": "elapsed_time", "cluster_name": "cluster_name"}, "id": "id", "entity_tag": "entity_tag", "crn": "crn", "locked": true, "created_at": "created_at", "created_by": "created_by", "modified_at": "modified_at", "name": "name", "description": "description", "iam_id": "iam_id", "account_id": "account_id", "apikey": "apikey", "history": [{"timestamp": "timestamp", "iam_id": "iam_id", "iam_id_account": "iam_id_account", "action": "action", "params": ["params"], "message": "message"}]}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
id = 'testString'
if_match = 'testString'
name = 'testString'
description = 'testString'
unique_instance_crns = ['testString']
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"id": id,
"if_match": if_match,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.update_service_id(**req_copy)
class TestDeleteServiceId():
"""
Test Class for delete_service_id
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_delete_service_id_all_params(self):
"""
delete_service_id()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/testString')
responses.add(responses.DELETE,
url,
status=204)
# Set up parameter values
id = 'testString'
# Invoke method
response = service.delete_service_id(
id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 204
@responses.activate
def test_delete_service_id_value_error(self):
"""
test_delete_service_id_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/testString')
responses.add(responses.DELETE,
url,
status=204)
# Set up parameter values
id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"id": id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.delete_service_id(**req_copy)
class TestLockServiceId():
"""
Test Class for lock_service_id
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_lock_service_id_all_params(self):
"""
lock_service_id()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/testString/lock')
responses.add(responses.POST,
url,
status=204)
# Set up parameter values
id = 'testString'
# Invoke method
response = service.lock_service_id(
id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 204
@responses.activate
def test_lock_service_id_value_error(self):
"""
test_lock_service_id_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/testString/lock')
responses.add(responses.POST,
url,
status=204)
# Set up parameter values
id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"id": id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.lock_service_id(**req_copy)
class TestUnlockServiceId():
"""
Test Class for unlock_service_id
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_unlock_service_id_all_params(self):
"""
unlock_service_id()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/testString/lock')
responses.add(responses.DELETE,
url,
status=204)
# Set up parameter values
id = 'testString'
# Invoke method
response = service.unlock_service_id(
id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 204
@responses.activate
def test_unlock_service_id_value_error(self):
"""
test_unlock_service_id_value_error()
"""
# Set up mock
url = self.preprocess_url(base_url + '/v1/serviceids/testString/lock')
responses.add(responses.DELETE,
url,
status=204)
# Set up parameter values
id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"id": id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
service.unlock_service_id(**req_copy)
# endregion
##############################################################################
# End of Service: IdentityOperations
##############################################################################
##############################################################################
# Start of Model Tests
##############################################################################
# region
class TestApiKey():
"""
Test Class for ApiKey
"""
def test_api_key_serialization(self):
"""
Test serialization/deserialization for ApiKey
"""
# Construct dict forms of any model objects needed in order to build this model.
response_context_model = {} # ResponseContext
response_context_model['transaction_id'] = 'testString'
response_context_model['operation'] = 'testString'
response_context_model['user_agent'] = 'testString'
response_context_model['url'] = 'testString'
response_context_model['instance_id'] = 'testString'
response_context_model['thread_id'] = 'testString'
response_context_model['host'] = 'testString'
response_context_model['start_time'] = 'testString'
response_context_model['end_time'] = 'testString'
response_context_model['elapsed_time'] = 'testString'
response_context_model['cluster_name'] = 'testString'
enity_history_record_model = {} # EnityHistoryRecord
enity_history_record_model['timestamp'] = 'testString'
enity_history_record_model['iam_id'] = 'testString'
enity_history_record_model['iam_id_account'] = 'testString'
enity_history_record_model['action'] = 'testString'
enity_history_record_model['params'] = ['testString']
enity_history_record_model['message'] = 'testString'
# Construct a json representation of a ApiKey model
api_key_model_json = {}
api_key_model_json['context'] = response_context_model
api_key_model_json['id'] = 'testString'
api_key_model_json['entity_tag'] = 'testString'
api_key_model_json['crn'] = 'testString'
api_key_model_json['locked'] = True
api_key_model_json['created_at'] = 'testString'
api_key_model_json['created_by'] = 'testString'
api_key_model_json['modified_at'] = 'testString'
api_key_model_json['name'] = 'testString'
api_key_model_json['description'] = 'testString'
api_key_model_json['iam_id'] = 'testString'
api_key_model_json['account_id'] = 'testString'
api_key_model_json['apikey'] = 'testString'
api_key_model_json['history'] = [enity_history_record_model]
# Construct a model instance of ApiKey by calling from_dict on the json representation
api_key_model = ApiKey.from_dict(api_key_model_json)
assert api_key_model != False
# Construct a model instance of ApiKey by calling from_dict on the json representation
api_key_model_dict = ApiKey.from_dict(api_key_model_json).__dict__
api_key_model2 = ApiKey(**api_key_model_dict)
# Verify the model instances are equivalent
assert api_key_model == api_key_model2
# Convert model instance back to dict and verify no loss of data
api_key_model_json2 = api_key_model.to_dict()
assert api_key_model_json2 == api_key_model_json
class TestApiKeyList():
"""
Test Class for ApiKeyList
"""
def test_api_key_list_serialization(self):
"""
Test serialization/deserialization for ApiKeyList
"""
# Construct dict forms of any model objects needed in order to build this model.
response_context_model = {} # ResponseContext
response_context_model['transaction_id'] = 'testString'
response_context_model['operation'] = 'testString'
response_context_model['user_agent'] = 'testString'
response_context_model['url'] = 'testString'
response_context_model['instance_id'] = 'testString'
response_context_model['thread_id'] = 'testString'
response_context_model['host'] = 'testString'
response_context_model['start_time'] = 'testString'
response_context_model['end_time'] = 'testString'
response_context_model['elapsed_time'] = 'testString'
response_context_model['cluster_name'] = 'testString'
enity_history_record_model = {} # EnityHistoryRecord
enity_history_record_model['timestamp'] = 'testString'
enity_history_record_model['iam_id'] = 'testString'
enity_history_record_model['iam_id_account'] = 'testString'
enity_history_record_model['action'] = 'testString'
enity_history_record_model['params'] = ['testString']
enity_history_record_model['message'] = 'testString'
api_key_model = {} # ApiKey
api_key_model['context'] = response_context_model
api_key_model['id'] = 'testString'
api_key_model['entity_tag'] = 'testString'
api_key_model['crn'] = 'testString'
api_key_model['locked'] = True
api_key_model['created_at'] = 'testString'
api_key_model['created_by'] = 'testString'
api_key_model['modified_at'] = 'testString'
api_key_model['name'] = 'testString'
api_key_model['description'] = 'testString'
api_key_model['iam_id'] = 'testString'
api_key_model['account_id'] = 'testString'
api_key_model['apikey'] = 'testString'
api_key_model['history'] = [enity_history_record_model]
# Construct a json representation of a ApiKeyList model
api_key_list_model_json = {}
api_key_list_model_json['context'] = response_context_model
api_key_list_model_json['offset'] = 26
api_key_list_model_json['limit'] = 26
api_key_list_model_json['first'] = 'testString'
api_key_list_model_json['previous'] = 'testString'
api_key_list_model_json['next'] = 'testString'
api_key_list_model_json['apikeys'] = [api_key_model]
# Construct a model instance of ApiKeyList by calling from_dict on the json representation
api_key_list_model = ApiKeyList.from_dict(api_key_list_model_json)
assert api_key_list_model != False
# Construct a model instance of ApiKeyList by calling from_dict on the json representation
api_key_list_model_dict = ApiKeyList.from_dict(api_key_list_model_json).__dict__
api_key_list_model2 = ApiKeyList(**api_key_list_model_dict)
# Verify the model instances are equivalent
assert api_key_list_model == api_key_list_model2
# Convert model instance back to dict and verify no loss of data
api_key_list_model_json2 = api_key_list_model.to_dict()
assert api_key_list_model_json2 == api_key_list_model_json
class TestCreateApiKeyRequest():
"""
Test Class for CreateApiKeyRequest
"""
def test_create_api_key_request_serialization(self):
"""
Test serialization/deserialization for CreateApiKeyRequest
"""
# Construct a json representation of a CreateApiKeyRequest model
create_api_key_request_model_json = {}
create_api_key_request_model_json['name'] = 'testString'
create_api_key_request_model_json['description'] = 'testString'
create_api_key_request_model_json['iam_id'] = 'testString'
create_api_key_request_model_json['account_id'] = 'testString'
create_api_key_request_model_json['apikey'] = 'testString'
create_api_key_request_model_json['store_value'] = True
# Construct a model instance of CreateApiKeyRequest by calling from_dict on the json representation
create_api_key_request_model = CreateApiKeyRequest.from_dict(create_api_key_request_model_json)
assert create_api_key_request_model != False
# Construct a model instance of CreateApiKeyRequest by calling from_dict on the json representation
create_api_key_request_model_dict = CreateApiKeyRequest.from_dict(create_api_key_request_model_json).__dict__
create_api_key_request_model2 = CreateApiKeyRequest(**create_api_key_request_model_dict)
# Verify the model instances are equivalent
assert create_api_key_request_model == create_api_key_request_model2
# Convert model instance back to dict and verify no loss of data
create_api_key_request_model_json2 = create_api_key_request_model.to_dict()
assert create_api_key_request_model_json2 == create_api_key_request_model_json
class TestEnityHistoryRecord():
"""
Test Class for EnityHistoryRecord
"""
def test_enity_history_record_serialization(self):
"""
Test serialization/deserialization for EnityHistoryRecord
"""
# Construct a json representation of a EnityHistoryRecord model
enity_history_record_model_json = {}
enity_history_record_model_json['timestamp'] = 'testString'
enity_history_record_model_json['iam_id'] = 'testString'
enity_history_record_model_json['iam_id_account'] = 'testString'
enity_history_record_model_json['action'] = 'testString'
enity_history_record_model_json['params'] = ['testString']
enity_history_record_model_json['message'] = 'testString'
# Construct a model instance of EnityHistoryRecord by calling from_dict on the json representation
enity_history_record_model = EnityHistoryRecord.from_dict(enity_history_record_model_json)
assert enity_history_record_model != False
# Construct a model instance of EnityHistoryRecord by calling from_dict on the json representation
enity_history_record_model_dict = EnityHistoryRecord.from_dict(enity_history_record_model_json).__dict__
enity_history_record_model2 = EnityHistoryRecord(**enity_history_record_model_dict)
# Verify the model instances are equivalent
assert enity_history_record_model == enity_history_record_model2
# Convert model instance back to dict and verify no loss of data
enity_history_record_model_json2 = enity_history_record_model.to_dict()
assert enity_history_record_model_json2 == enity_history_record_model_json
class TestResponseContext():
"""
Test Class for ResponseContext
"""
def test_response_context_serialization(self):
"""
Test serialization/deserialization for ResponseContext
"""
# Construct a json representation of a ResponseContext model
response_context_model_json = {}
response_context_model_json['transaction_id'] = 'testString'
response_context_model_json['operation'] = 'testString'
response_context_model_json['user_agent'] = 'testString'
response_context_model_json['url'] = 'testString'
response_context_model_json['instance_id'] = 'testString'
response_context_model_json['thread_id'] = 'testString'
response_context_model_json['host'] = 'testString'
response_context_model_json['start_time'] = 'testString'
response_context_model_json['end_time'] = 'testString'
response_context_model_json['elapsed_time'] = 'testString'
response_context_model_json['cluster_name'] = 'testString'
# Construct a model instance of ResponseContext by calling from_dict on the json representation
response_context_model = ResponseContext.from_dict(response_context_model_json)
assert response_context_model != False
# Construct a model instance of ResponseContext by calling from_dict on the json representation
response_context_model_dict = ResponseContext.from_dict(response_context_model_json).__dict__
response_context_model2 = ResponseContext(**response_context_model_dict)
# Verify the model instances are equivalent
assert response_context_model == response_context_model2
# Convert model instance back to dict and verify no loss of data
response_context_model_json2 = response_context_model.to_dict()
assert response_context_model_json2 == response_context_model_json
class TestServiceId():
"""
Test Class for ServiceId
"""
def test_service_id_serialization(self):
"""
Test serialization/deserialization for ServiceId
"""
# Construct dict forms of any model objects needed in order to build this model.
response_context_model = {} # ResponseContext
response_context_model['transaction_id'] = 'testString'
response_context_model['operation'] = 'testString'
response_context_model['user_agent'] = 'testString'
response_context_model['url'] = 'testString'
response_context_model['instance_id'] = 'testString'
response_context_model['thread_id'] = 'testString'
response_context_model['host'] = 'testString'
response_context_model['start_time'] = 'testString'
response_context_model['end_time'] = 'testString'
response_context_model['elapsed_time'] = 'testString'
response_context_model['cluster_name'] = 'testString'
enity_history_record_model = {} # EnityHistoryRecord
enity_history_record_model['timestamp'] = 'testString'
enity_history_record_model['iam_id'] = 'testString'
enity_history_record_model['iam_id_account'] = 'testString'
enity_history_record_model['action'] = 'testString'
enity_history_record_model['params'] = ['testString']
enity_history_record_model['message'] = 'testString'
api_key_model = {} # ApiKey
api_key_model['context'] = response_context_model
api_key_model['id'] = 'testString'
api_key_model['entity_tag'] = 'testString'
api_key_model['crn'] = 'testString'
api_key_model['locked'] = True
api_key_model['created_at'] = 'testString'
api_key_model['created_by'] = 'testString'
api_key_model['modified_at'] = 'testString'
api_key_model['name'] = 'testString'
api_key_model['description'] = 'testString'
api_key_model['iam_id'] = 'testString'
api_key_model['account_id'] = 'testString'
api_key_model['apikey'] = 'testString'
api_key_model['history'] = [enity_history_record_model]
# Construct a json representation of a ServiceId model
service_id_model_json = {}
service_id_model_json['context'] = response_context_model
service_id_model_json['id'] = 'testString'
service_id_model_json['iam_id'] = 'testString'
service_id_model_json['entity_tag'] = 'testString'
service_id_model_json['crn'] = 'testString'
service_id_model_json['locked'] = True
service_id_model_json['created_at'] = 'testString'
service_id_model_json['modified_at'] = 'testString'
service_id_model_json['account_id'] = 'testString'
service_id_model_json['name'] = 'testString'
service_id_model_json['description'] = 'testString'
service_id_model_json['unique_instance_crns'] = ['testString']
service_id_model_json['history'] = [enity_history_record_model]
service_id_model_json['apikey'] = api_key_model
# Construct a model instance of ServiceId by calling from_dict on the json representation
service_id_model = ServiceId.from_dict(service_id_model_json)
assert service_id_model != False
# Construct a model instance of ServiceId by calling from_dict on the json representation
service_id_model_dict = ServiceId.from_dict(service_id_model_json).__dict__
service_id_model2 = ServiceId(**service_id_model_dict)
# Verify the model instances are equivalent
assert service_id_model == service_id_model2
# Convert model instance back to dict and verify no loss of data
service_id_model_json2 = service_id_model.to_dict()
assert service_id_model_json2 == service_id_model_json
class TestServiceIdList():
"""
Test Class for ServiceIdList
"""
def test_service_id_list_serialization(self):
"""
Test serialization/deserialization for ServiceIdList
"""
# Construct dict forms of any model objects needed in order to build this model.
response_context_model = {} # ResponseContext
response_context_model['transaction_id'] = 'testString'
response_context_model['operation'] = 'testString'
response_context_model['user_agent'] = 'testString'
response_context_model['url'] = 'testString'
response_context_model['instance_id'] = 'testString'
response_context_model['thread_id'] = 'testString'
response_context_model['host'] = 'testString'
response_context_model['start_time'] = 'testString'
response_context_model['end_time'] = 'testString'
response_context_model['elapsed_time'] = 'testString'
response_context_model['cluster_name'] = 'testString'
enity_history_record_model = {} # EnityHistoryRecord
enity_history_record_model['timestamp'] = 'testString'
enity_history_record_model['iam_id'] = 'testString'
enity_history_record_model['iam_id_account'] = 'testString'
enity_history_record_model['action'] = 'testString'
enity_history_record_model['params'] = ['testString']
enity_history_record_model['message'] = 'testString'
api_key_model = {} # ApiKey
api_key_model['context'] = response_context_model
api_key_model['id'] = 'testString'
api_key_model['entity_tag'] = 'testString'
api_key_model['crn'] = 'testString'
api_key_model['locked'] = True
api_key_model['created_at'] = 'testString'
api_key_model['created_by'] = 'testString'
api_key_model['modified_at'] = 'testString'
api_key_model['name'] = 'testString'
api_key_model['description'] = 'testString'
api_key_model['iam_id'] = 'testString'
api_key_model['account_id'] = 'testString'
api_key_model['apikey'] = 'testString'
api_key_model['history'] = [enity_history_record_model]
service_id_model = {} # ServiceId
service_id_model['context'] = response_context_model
service_id_model['id'] = 'testString'
service_id_model['iam_id'] = 'testString'
service_id_model['entity_tag'] = 'testString'
service_id_model['crn'] = 'testString'
service_id_model['locked'] = True
service_id_model['created_at'] = 'testString'
service_id_model['modified_at'] = 'testString'
service_id_model['account_id'] = 'testString'
service_id_model['name'] = 'testString'
service_id_model['description'] = 'testString'
service_id_model['unique_instance_crns'] = ['testString']
service_id_model['history'] = [enity_history_record_model]
service_id_model['apikey'] = api_key_model
# Construct a json representation of a ServiceIdList model
service_id_list_model_json = {}
service_id_list_model_json['context'] = response_context_model
service_id_list_model_json['offset'] = 26
service_id_list_model_json['limit'] = 26
service_id_list_model_json['first'] = 'testString'
service_id_list_model_json['previous'] = 'testString'
service_id_list_model_json['next'] = 'testString'
service_id_list_model_json['serviceids'] = [service_id_model]
# Construct a model instance of ServiceIdList by calling from_dict on the json representation
service_id_list_model = ServiceIdList.from_dict(service_id_list_model_json)
assert service_id_list_model != False
# Construct a model instance of ServiceIdList by calling from_dict on the json representation
service_id_list_model_dict = ServiceIdList.from_dict(service_id_list_model_json).__dict__
service_id_list_model2 = ServiceIdList(**service_id_list_model_dict)
# Verify the model instances are equivalent
assert service_id_list_model == service_id_list_model2
# Convert model instance back to dict and verify no loss of data
service_id_list_model_json2 = service_id_list_model.to_dict()
assert service_id_list_model_json2 == service_id_list_model_json
# endregion
##############################################################################
# End of Model Tests
##############################################################################
| 48.872352
| 1,918
| 0.625021
| 10,064
| 87,677
| 5.117349
| 0.028219
| 0.021941
| 0.014815
| 0.019029
| 0.943496
| 0.913652
| 0.865983
| 0.84043
| 0.823926
| 0.811848
| 0
| 0.00461
| 0.230517
| 87,677
| 1,793
| 1,919
| 48.89961
| 0.758753
| 0.113451
| 0
| 0.736499
| 0
| 0.020484
| 0.418889
| 0.013777
| 0
| 0
| 0
| 0
| 0.103352
| 1
| 0.052142
| false
| 0
| 0.00838
| 0
| 0.108939
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1868856f64aafe72abcc4d42f1a325fa86469a08
| 18,901
|
py
|
Python
|
sdk/python/pulumi_wavefront/service_account.py
|
pulumi/pulumi-wavefront
|
1d199d386ee241fa2ef94553e6cae1359ec9ccf6
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2022-02-20T09:48:33.000Z
|
2022-02-20T09:48:33.000Z
|
sdk/python/pulumi_wavefront/service_account.py
|
pulumi/pulumi-wavefront
|
1d199d386ee241fa2ef94553e6cae1359ec9ccf6
|
[
"ECL-2.0",
"Apache-2.0"
] | 40
|
2020-08-12T08:37:24.000Z
|
2022-03-31T15:51:17.000Z
|
sdk/python/pulumi_wavefront/service_account.py
|
pulumi/pulumi-wavefront
|
1d199d386ee241fa2ef94553e6cae1359ec9ccf6
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['ServiceAccountArgs', 'ServiceAccount']
@pulumi.input_type
class ServiceAccountArgs:
def __init__(__self__, *,
identifier: pulumi.Input[str],
active: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
ingestion_policy: Optional[pulumi.Input[str]] = None,
permissions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a ServiceAccount resource.
:param pulumi.Input[str] identifier: The (unique) identifier of the service account to create. Must start with sa::
:param pulumi.Input[bool] active: Whether or not the service account is active
:param pulumi.Input[str] description: The description of the service account
:param pulumi.Input[str] ingestion_policy: ID of ingestion policy
:param pulumi.Input[Sequence[pulumi.Input[str]]] permissions: List of permission to grant to this service account. Valid options are
`agent_management`, `alerts_management`, `dashboard_management`, `embedded_charts`, `events_management`, `external_links_management`,
`host_tag_management`, `metrics_management`, `user_management`
:param pulumi.Input[Sequence[pulumi.Input[str]]] user_groups: List of user groups for this service account
"""
pulumi.set(__self__, "identifier", identifier)
if active is not None:
pulumi.set(__self__, "active", active)
if description is not None:
pulumi.set(__self__, "description", description)
if ingestion_policy is not None:
pulumi.set(__self__, "ingestion_policy", ingestion_policy)
if permissions is not None:
pulumi.set(__self__, "permissions", permissions)
if user_groups is not None:
pulumi.set(__self__, "user_groups", user_groups)
@property
@pulumi.getter
def identifier(self) -> pulumi.Input[str]:
"""
The (unique) identifier of the service account to create. Must start with sa::
"""
return pulumi.get(self, "identifier")
@identifier.setter
def identifier(self, value: pulumi.Input[str]):
pulumi.set(self, "identifier", value)
@property
@pulumi.getter
def active(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not the service account is active
"""
return pulumi.get(self, "active")
@active.setter
def active(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "active", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the service account
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="ingestionPolicy")
def ingestion_policy(self) -> Optional[pulumi.Input[str]]:
"""
ID of ingestion policy
"""
return pulumi.get(self, "ingestion_policy")
@ingestion_policy.setter
def ingestion_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ingestion_policy", value)
@property
@pulumi.getter
def permissions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of permission to grant to this service account. Valid options are
`agent_management`, `alerts_management`, `dashboard_management`, `embedded_charts`, `events_management`, `external_links_management`,
`host_tag_management`, `metrics_management`, `user_management`
"""
return pulumi.get(self, "permissions")
@permissions.setter
def permissions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "permissions", value)
@property
@pulumi.getter(name="userGroups")
def user_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of user groups for this service account
"""
return pulumi.get(self, "user_groups")
@user_groups.setter
def user_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "user_groups", value)
@pulumi.input_type
class _ServiceAccountState:
def __init__(__self__, *,
active: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
identifier: Optional[pulumi.Input[str]] = None,
ingestion_policy: Optional[pulumi.Input[str]] = None,
permissions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering ServiceAccount resources.
:param pulumi.Input[bool] active: Whether or not the service account is active
:param pulumi.Input[str] description: The description of the service account
:param pulumi.Input[str] identifier: The (unique) identifier of the service account to create. Must start with sa::
:param pulumi.Input[str] ingestion_policy: ID of ingestion policy
:param pulumi.Input[Sequence[pulumi.Input[str]]] permissions: List of permission to grant to this service account. Valid options are
`agent_management`, `alerts_management`, `dashboard_management`, `embedded_charts`, `events_management`, `external_links_management`,
`host_tag_management`, `metrics_management`, `user_management`
:param pulumi.Input[Sequence[pulumi.Input[str]]] user_groups: List of user groups for this service account
"""
if active is not None:
pulumi.set(__self__, "active", active)
if description is not None:
pulumi.set(__self__, "description", description)
if identifier is not None:
pulumi.set(__self__, "identifier", identifier)
if ingestion_policy is not None:
pulumi.set(__self__, "ingestion_policy", ingestion_policy)
if permissions is not None:
pulumi.set(__self__, "permissions", permissions)
if user_groups is not None:
pulumi.set(__self__, "user_groups", user_groups)
@property
@pulumi.getter
def active(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not the service account is active
"""
return pulumi.get(self, "active")
@active.setter
def active(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "active", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the service account
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def identifier(self) -> Optional[pulumi.Input[str]]:
"""
The (unique) identifier of the service account to create. Must start with sa::
"""
return pulumi.get(self, "identifier")
@identifier.setter
def identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "identifier", value)
@property
@pulumi.getter(name="ingestionPolicy")
def ingestion_policy(self) -> Optional[pulumi.Input[str]]:
"""
ID of ingestion policy
"""
return pulumi.get(self, "ingestion_policy")
@ingestion_policy.setter
def ingestion_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ingestion_policy", value)
@property
@pulumi.getter
def permissions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of permission to grant to this service account. Valid options are
`agent_management`, `alerts_management`, `dashboard_management`, `embedded_charts`, `events_management`, `external_links_management`,
`host_tag_management`, `metrics_management`, `user_management`
"""
return pulumi.get(self, "permissions")
@permissions.setter
def permissions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "permissions", value)
@property
@pulumi.getter(name="userGroups")
def user_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of user groups for this service account
"""
return pulumi.get(self, "user_groups")
@user_groups.setter
def user_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "user_groups", value)
class ServiceAccount(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
identifier: Optional[pulumi.Input[str]] = None,
ingestion_policy: Optional[pulumi.Input[str]] = None,
permissions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Provides a Wavefront Service Account Resource. This allows service accounts to be created, updated, and deleted.
## Example Usage
```python
import pulumi
import pulumi_wavefront as wavefront
basic = wavefront.ServiceAccount("basic",
active=True,
identifier="sa::tftesting")
```
## Import
Service accounts can be imported using `identifier`, e.g.
```sh
$ pulumi import wavefront:index/serviceAccount:ServiceAccount basic sa::tftesting
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] active: Whether or not the service account is active
:param pulumi.Input[str] description: The description of the service account
:param pulumi.Input[str] identifier: The (unique) identifier of the service account to create. Must start with sa::
:param pulumi.Input[str] ingestion_policy: ID of ingestion policy
:param pulumi.Input[Sequence[pulumi.Input[str]]] permissions: List of permission to grant to this service account. Valid options are
`agent_management`, `alerts_management`, `dashboard_management`, `embedded_charts`, `events_management`, `external_links_management`,
`host_tag_management`, `metrics_management`, `user_management`
:param pulumi.Input[Sequence[pulumi.Input[str]]] user_groups: List of user groups for this service account
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ServiceAccountArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Wavefront Service Account Resource. This allows service accounts to be created, updated, and deleted.
## Example Usage
```python
import pulumi
import pulumi_wavefront as wavefront
basic = wavefront.ServiceAccount("basic",
active=True,
identifier="sa::tftesting")
```
## Import
Service accounts can be imported using `identifier`, e.g.
```sh
$ pulumi import wavefront:index/serviceAccount:ServiceAccount basic sa::tftesting
```
:param str resource_name: The name of the resource.
:param ServiceAccountArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServiceAccountArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
identifier: Optional[pulumi.Input[str]] = None,
ingestion_policy: Optional[pulumi.Input[str]] = None,
permissions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServiceAccountArgs.__new__(ServiceAccountArgs)
__props__.__dict__["active"] = active
__props__.__dict__["description"] = description
if identifier is None and not opts.urn:
raise TypeError("Missing required property 'identifier'")
__props__.__dict__["identifier"] = identifier
__props__.__dict__["ingestion_policy"] = ingestion_policy
__props__.__dict__["permissions"] = permissions
__props__.__dict__["user_groups"] = user_groups
super(ServiceAccount, __self__).__init__(
'wavefront:index/serviceAccount:ServiceAccount',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
active: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
identifier: Optional[pulumi.Input[str]] = None,
ingestion_policy: Optional[pulumi.Input[str]] = None,
permissions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'ServiceAccount':
"""
Get an existing ServiceAccount resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] active: Whether or not the service account is active
:param pulumi.Input[str] description: The description of the service account
:param pulumi.Input[str] identifier: The (unique) identifier of the service account to create. Must start with sa::
:param pulumi.Input[str] ingestion_policy: ID of ingestion policy
:param pulumi.Input[Sequence[pulumi.Input[str]]] permissions: List of permission to grant to this service account. Valid options are
`agent_management`, `alerts_management`, `dashboard_management`, `embedded_charts`, `events_management`, `external_links_management`,
`host_tag_management`, `metrics_management`, `user_management`
:param pulumi.Input[Sequence[pulumi.Input[str]]] user_groups: List of user groups for this service account
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ServiceAccountState.__new__(_ServiceAccountState)
__props__.__dict__["active"] = active
__props__.__dict__["description"] = description
__props__.__dict__["identifier"] = identifier
__props__.__dict__["ingestion_policy"] = ingestion_policy
__props__.__dict__["permissions"] = permissions
__props__.__dict__["user_groups"] = user_groups
return ServiceAccount(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def active(self) -> pulumi.Output[Optional[bool]]:
"""
Whether or not the service account is active
"""
return pulumi.get(self, "active")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the service account
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def identifier(self) -> pulumi.Output[str]:
"""
The (unique) identifier of the service account to create. Must start with sa::
"""
return pulumi.get(self, "identifier")
@property
@pulumi.getter(name="ingestionPolicy")
def ingestion_policy(self) -> pulumi.Output[Optional[str]]:
"""
ID of ingestion policy
"""
return pulumi.get(self, "ingestion_policy")
@property
@pulumi.getter
def permissions(self) -> pulumi.Output[Sequence[str]]:
"""
List of permission to grant to this service account. Valid options are
`agent_management`, `alerts_management`, `dashboard_management`, `embedded_charts`, `events_management`, `external_links_management`,
`host_tag_management`, `metrics_management`, `user_management`
"""
return pulumi.get(self, "permissions")
@property
@pulumi.getter(name="userGroups")
def user_groups(self) -> pulumi.Output[Sequence[str]]:
"""
List of user groups for this service account
"""
return pulumi.get(self, "user_groups")
| 43.350917
| 148
| 0.650495
| 2,075
| 18,901
| 5.718072
| 0.085301
| 0.100126
| 0.079056
| 0.054783
| 0.853519
| 0.835735
| 0.818458
| 0.803287
| 0.790223
| 0.78011
| 0
| 0.00007
| 0.244908
| 18,901
| 435
| 149
| 43.450575
| 0.831278
| 0.346595
| 0
| 0.767241
| 1
| 0
| 0.084216
| 0.004006
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159483
| false
| 0.00431
| 0.021552
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1994904dccc7d18f494c9cd20311d3a121a5abd
| 29
|
py
|
Python
|
katas/beta/easy_kata.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
katas/beta/easy_kata.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
katas/beta/easy_kata.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
def print_x(x):
return x
| 9.666667
| 15
| 0.62069
| 6
| 29
| 2.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.275862
| 29
| 2
| 16
| 14.5
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
|
0
| 7
|
a1bf4449134103523df95854cd263ed8835a684f
| 16,394
|
py
|
Python
|
docker-app/qfieldcloud/core/tests/test_queryset.py
|
stcz/qfieldcloud
|
b128f8689268d73de052e67e0c09c6c0d5abdc05
|
[
"MIT"
] | null | null | null |
docker-app/qfieldcloud/core/tests/test_queryset.py
|
stcz/qfieldcloud
|
b128f8689268d73de052e67e0c09c6c0d5abdc05
|
[
"MIT"
] | null | null | null |
docker-app/qfieldcloud/core/tests/test_queryset.py
|
stcz/qfieldcloud
|
b128f8689268d73de052e67e0c09c6c0d5abdc05
|
[
"MIT"
] | null | null | null |
import logging
from qfieldcloud.authentication.models import AuthToken
from qfieldcloud.core import querysets_utils
from qfieldcloud.core.models import (
Organization,
OrganizationMember,
Project,
ProjectCollaborator,
ProjectQueryset,
Team,
TeamMember,
User,
)
from rest_framework.test import APITestCase
logging.disable(logging.CRITICAL)
class QfcTestCase(APITestCase):
def setUp(self):
# user1 owns p1 and p2
# user1 owns o1
# user1 collaborates on p7
self.user1 = User.objects.create_user(username="user1", password="abc123")
self.token1 = AuthToken.objects.get_or_create(user=self.user1)[0]
# user2 owns p3 and p4
# user2 admins o1
self.user2 = User.objects.create_user(username="user2", password="abc123")
self.token2 = AuthToken.objects.get_or_create(user=self.user2)[0]
# user2 owns p7 and p8
# user2 is member of o1
self.user3 = User.objects.create_user(username="user3", password="abc123")
self.token3 = AuthToken.objects.get_or_create(user=self.user3)[0]
# organization1 owns p4 and p5
self.organization1 = Organization.objects.create(
username="organization1",
password="abc123",
user_type=2,
organization_owner=self.user1,
)
self.membership1 = OrganizationMember.objects.create(
organization=self.organization1,
member=self.user2,
role=OrganizationMember.Roles.ADMIN,
)
self.membership2 = OrganizationMember.objects.create(
organization=self.organization1,
member=self.user3,
role=OrganizationMember.Roles.MEMBER,
)
self.team1 = Team.objects.create(
username="team1",
password="abc123",
user_type=User.TYPE_TEAM,
team_organization=self.organization1,
)
self.teammembership1 = TeamMember.objects.create(
team=self.team1,
member=self.user3,
)
self.project1 = Project.objects.create(
name="project1", is_public=False, owner=self.user1
)
self.project2 = Project.objects.create(
name="project2", is_public=True, owner=self.user1
)
self.project3 = Project.objects.create(
name="project3", is_public=False, owner=self.user2
)
self.project4 = Project.objects.create(
name="project4", is_public=True, owner=self.user2
)
self.project5 = Project.objects.create(
name="project5", is_public=False, owner=self.organization1
)
self.project6 = Project.objects.create(
name="project6", is_public=True, owner=self.organization1
)
self.project7 = Project.objects.create(
name="project7", is_public=False, owner=self.user3
)
self.project8 = Project.objects.create(
name="project8", is_public=True, owner=self.user3
)
self.project9 = Project.objects.create(
name="project9", is_public=False, owner=self.organization1
)
self.collaborator1 = ProjectCollaborator.objects.create(
project=self.project7,
collaborator=self.user1,
role=ProjectCollaborator.Roles.REPORTER,
)
self.collaborator2 = ProjectCollaborator.objects.create(
project=self.project9,
collaborator=self.team1,
role=ProjectCollaborator.Roles.EDITOR,
)
def test_get_users(self):
# should get all the available users
queryset = querysets_utils.get_users("")
self.assertEqual(len(queryset), 5)
self.assertTrue(self.user1 in queryset)
self.assertTrue(self.user2 in queryset)
self.assertTrue(self.user3 in queryset)
self.assertTrue(self.organization1.user_ptr in queryset)
self.assertTrue(self.team1.user_ptr in queryset)
# should get all the available users
queryset = querysets_utils.get_users("user3")
self.assertEqual(len(queryset), 1)
self.assertTrue(self.user3 in queryset)
# should get only the users that are not an organization
queryset = querysets_utils.get_users("", exclude_organizations=True)
self.assertEqual(len(queryset), 4)
self.assertTrue(self.user1 in queryset)
self.assertTrue(self.user2 in queryset)
self.assertTrue(self.user3 in queryset)
self.assertTrue(self.team1.user_ptr in queryset)
# should get only the users that are not a team
queryset = querysets_utils.get_users("", exclude_teams=True)
self.assertEqual(len(queryset), 4)
self.assertTrue(self.user1 in queryset)
self.assertTrue(self.user2 in queryset)
self.assertTrue(self.user3 in queryset)
self.assertTrue(self.organization1.user_ptr in queryset)
# should get all the users, that are not members or owners of an organization
queryset = querysets_utils.get_users("", organization=self.organization1)
self.assertEqual(len(queryset), 1)
# should get all the users, that are not members or owner of a project
queryset = querysets_utils.get_users("", project=self.project1)
self.assertEqual(len(queryset), 3)
self.assertTrue(self.user2 in queryset)
self.assertTrue(self.user3 in queryset)
self.assertTrue(self.organization1.user_ptr in queryset)
# should get all the users, that are not members or owner of a project
queryset = querysets_utils.get_users("", project=self.project5)
self.assertEqual(len(queryset), 4)
self.assertTrue(self.user1 in queryset)
self.assertTrue(self.user2 in queryset)
self.assertTrue(self.user3 in queryset)
self.assertTrue(self.team1.user_ptr in queryset)
# should get all the users, that are not members or owner of a project and are not an organization
queryset = querysets_utils.get_users(
"", project=self.project1, exclude_organizations=True
)
self.assertEqual(len(queryset), 2)
self.assertTrue(self.user2 in queryset)
self.assertTrue(self.user3 in queryset)
def test_projects_roles_and_role_origins(self):
"""
Checks user_role and user_role_origin are correctly defined
"""
def p(proj, user):
return Project.objects.for_user(user).get(pk=proj.pk)
# fmt: off
self.assertEqual(p(self.project1, self.user1).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project1, self.user1).user_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
self.assertEqual(p(self.project2, self.user1).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project2, self.user1).user_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
with self.assertRaises(Project.DoesNotExist):
p(self.project3, self.user1)
self.assertEqual(p(self.project4, self.user1).user_role, ProjectCollaborator.Roles.READER)
self.assertEqual(p(self.project4, self.user1).user_role_origin, ProjectQueryset.RoleOrigins.PUBLIC.value)
self.assertEqual(p(self.project5, self.user1).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project5, self.user1).user_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONOWNER.value)
self.assertEqual(p(self.project6, self.user1).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project6, self.user1).user_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONOWNER.value)
self.assertEqual(p(self.project7, self.user1).user_role, ProjectCollaborator.Roles.REPORTER)
self.assertEqual(p(self.project7, self.user1).user_role_origin, ProjectQueryset.RoleOrigins.COLLABORATOR.value)
self.assertEqual(p(self.project8, self.user1).user_role, ProjectCollaborator.Roles.READER)
self.assertEqual(p(self.project8, self.user1).user_role_origin, ProjectQueryset.RoleOrigins.PUBLIC.value)
self.assertEqual(p(self.project9, self.user1).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project9, self.user1).user_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONOWNER.value)
with self.assertRaises(Project.DoesNotExist):
p(self.project1, self.user2)
self.assertEqual(p(self.project2, self.user2).user_role, ProjectCollaborator.Roles.READER)
self.assertEqual(p(self.project2, self.user2).user_role_origin, ProjectQueryset.RoleOrigins.PUBLIC.value)
self.assertEqual(p(self.project3, self.user2).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project3, self.user2).user_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
self.assertEqual(p(self.project4, self.user2).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project4, self.user2).user_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
self.assertEqual(p(self.project5, self.user2).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project5, self.user2).user_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONADMIN.value)
self.assertEqual(p(self.project6, self.user2).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project6, self.user2).user_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONADMIN.value)
with self.assertRaises(Project.DoesNotExist):
p(self.project7, self.user2)
self.assertEqual(p(self.project8, self.user2).user_role, ProjectCollaborator.Roles.READER)
self.assertEqual(p(self.project8, self.user2).user_role_origin, ProjectQueryset.RoleOrigins.PUBLIC.value)
self.assertEqual(p(self.project9, self.user2).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project9, self.user2).user_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONADMIN.value)
with self.assertRaises(Project.DoesNotExist):
p(self.project1, self.user3)
self.assertEqual(p(self.project2, self.user3).user_role, ProjectCollaborator.Roles.READER)
self.assertEqual(p(self.project2, self.user3).user_role_origin, ProjectQueryset.RoleOrigins.PUBLIC.value)
with self.assertRaises(Project.DoesNotExist):
p(self.project3, self.user3)
self.assertEqual(p(self.project4, self.user3).user_role, ProjectCollaborator.Roles.READER)
self.assertEqual(p(self.project4, self.user3).user_role_origin, ProjectQueryset.RoleOrigins.PUBLIC.value)
with self.assertRaises(Project.DoesNotExist):
p(self.project5, self.user3)
self.assertEqual(p(self.project6, self.user3).user_role, ProjectCollaborator.Roles.READER)
self.assertEqual(p(self.project6, self.user3).user_role_origin, ProjectQueryset.RoleOrigins.PUBLIC.value)
self.assertEqual(p(self.project7, self.user3).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project7, self.user3).user_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
self.assertEqual(p(self.project8, self.user3).user_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project8, self.user3).user_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
self.assertEqual(p(self.project9, self.user3).user_role, ProjectCollaborator.Roles.EDITOR)
self.assertEqual(p(self.project9, self.user3).user_role_origin, ProjectQueryset.RoleOrigins.TEAMMEMBER.value)
# fmt: on
def test_user_roles_and_role_origins(self):
"""
Checks project_role and project_role_origin are correctly defined
"""
def p(proj, user):
return User.objects.for_project(proj).get(pk=user.pk)
# fmt: off
self.assertEqual(p(self.project1, self.user1).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project1, self.user1).project_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
self.assertEqual(p(self.project2, self.user1).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project2, self.user1).project_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
with self.assertRaises(User.DoesNotExist):
p(self.project3, self.user1)
with self.assertRaises(User.DoesNotExist):
p(self.project4, self.user1)
self.assertEqual(p(self.project5, self.user1).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project5, self.user1).project_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONOWNER.value)
self.assertEqual(p(self.project6, self.user1).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project6, self.user1).project_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONOWNER.value)
self.assertEqual(p(self.project7, self.user1).project_role, ProjectCollaborator.Roles.REPORTER)
self.assertEqual(p(self.project7, self.user1).project_role_origin, ProjectQueryset.RoleOrigins.COLLABORATOR.value)
with self.assertRaises(User.DoesNotExist):
p(self.project8, self.user1)
self.assertEqual(p(self.project9, self.user1).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project9, self.user1).project_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONOWNER.value)
with self.assertRaises(User.DoesNotExist):
p(self.project1, self.user2)
with self.assertRaises(User.DoesNotExist):
p(self.project2, self.user2)
self.assertEqual(p(self.project3, self.user2).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project3, self.user2).project_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
self.assertEqual(p(self.project4, self.user2).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project4, self.user2).project_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
self.assertEqual(p(self.project5, self.user2).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project5, self.user2).project_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONADMIN.value)
self.assertEqual(p(self.project6, self.user2).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project6, self.user2).project_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONADMIN.value)
with self.assertRaises(User.DoesNotExist):
p(self.project7, self.user2)
with self.assertRaises(User.DoesNotExist):
p(self.project8, self.user2)
self.assertEqual(p(self.project9, self.user2).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project9, self.user2).project_role_origin, ProjectQueryset.RoleOrigins.ORGANIZATIONADMIN.value)
with self.assertRaises(User.DoesNotExist):
p(self.project1, self.user3)
with self.assertRaises(User.DoesNotExist):
p(self.project2, self.user3)
with self.assertRaises(User.DoesNotExist):
p(self.project3, self.user3)
with self.assertRaises(User.DoesNotExist):
p(self.project4, self.user3)
with self.assertRaises(User.DoesNotExist):
p(self.project5, self.user3)
with self.assertRaises(User.DoesNotExist):
p(self.project6, self.user3)
self.assertEqual(p(self.project7, self.user3).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project7, self.user3).project_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
self.assertEqual(p(self.project8, self.user3).project_role, ProjectCollaborator.Roles.ADMIN)
self.assertEqual(p(self.project8, self.user3).project_role_origin, ProjectQueryset.RoleOrigins.PROJECTOWNER.value)
self.assertEqual(p(self.project9, self.user3).project_role, ProjectCollaborator.Roles.EDITOR)
self.assertEqual(p(self.project9, self.user3).project_role_origin, ProjectQueryset.RoleOrigins.TEAMMEMBER.value)
# fmt: on
| 53.055016
| 127
| 0.711358
| 1,885
| 16,394
| 6.093899
| 0.072149
| 0.038739
| 0.097502
| 0.121877
| 0.842692
| 0.805693
| 0.784278
| 0.752416
| 0.728389
| 0.613476
| 0
| 0.023446
| 0.185678
| 16,394
| 308
| 128
| 53.227273
| 0.837004
| 0.049469
| 0
| 0.27686
| 0
| 0
| 0.009022
| 0
| 0
| 0
| 0
| 0
| 0.495868
| 1
| 0.024793
| false
| 0.020661
| 0.020661
| 0.008264
| 0.057851
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1e3fb76cfeda7f0d50d7419bff92a7044440a47
| 2,900
|
py
|
Python
|
automation_orchestrator/orchestrator/migrations/0004_v022_2.py
|
basico-ps/AutomationOrchestrator
|
c2e9e2496acb53f00e51a03e6d2cada17fd7c5e4
|
[
"BSD-3-Clause"
] | 26
|
2019-11-11T14:15:55.000Z
|
2022-02-07T07:32:33.000Z
|
automation_orchestrator/orchestrator/migrations/0004_v022_2.py
|
basico-ps/AutomationOrchestrator
|
c2e9e2496acb53f00e51a03e6d2cada17fd7c5e4
|
[
"BSD-3-Clause"
] | 50
|
2020-01-07T13:38:51.000Z
|
2021-11-29T11:31:14.000Z
|
automation_orchestrator/orchestrator/migrations/0004_v022_2.py
|
basico-ps/AutomationOrchestrator
|
c2e9e2496acb53f00e51a03e6d2cada17fd7c5e4
|
[
"BSD-3-Clause"
] | 9
|
2020-02-20T12:06:07.000Z
|
2022-01-10T12:41:34.000Z
|
# Generated by Django 3.0.3 on 2020-02-27 08:03
from django.db import migrations, models
import sortedm2m.fields
class Migration(migrations.Migration):
dependencies = [
('orchestrator', '0003_v022_1'),
]
operations = [
migrations.AlterField(
model_name='filetrigger',
name='bot',
field=models.ForeignKey(help_text='Select the bot for this trigger.', null=True, on_delete=models.deletion.PROTECT, to='orchestrator.Bot'),
),
migrations.AlterField(
model_name='scheduletrigger',
name='bot',
field=models.ForeignKey(help_text='Select the bot for this trigger.', null=True, on_delete=models.deletion.PROTECT, to='orchestrator.Bot'),
),
migrations.AlterField(
model_name='emailimaptrigger',
name='bot',
field=models.ForeignKey(help_text='Select the bot for this trigger.', null=True, on_delete=models.deletion.PROTECT, to='orchestrator.Bot'),
),
migrations.AlterField(
model_name='emailoutlooktrigger',
name='bot',
field=models.ForeignKey(help_text='Select the bot for this trigger.', null=True, on_delete=models.deletion.PROTECT, to='orchestrator.Bot'),
),
migrations.AlterField(
model_name='apitrigger',
name='bot',
field=models.ForeignKey(help_text='Select the bot for this trigger.', null=True, on_delete=models.deletion.PROTECT, to='orchestrator.Bot'),
),
migrations.AddField(
model_name='apitrigger',
name='bots',
field=sortedm2m.fields.SortedManyToManyField(help_text='Select the bots for this trigger.', related_name='api_trigger_bot', to='orchestrator.Bot'),
),
migrations.AddField(
model_name='emailimaptrigger',
name='bots',
field=sortedm2m.fields.SortedManyToManyField(help_text='Select the bots for this trigger.', related_name='email_imap_trigger_bot', to='orchestrator.Bot'),
),
migrations.AddField(
model_name='emailoutlooktrigger',
name='bots',
field=sortedm2m.fields.SortedManyToManyField(help_text='Select the bots for this trigger.', related_name='email_outlook_trigger_bot', to='orchestrator.Bot'),
),
migrations.AddField(
model_name='filetrigger',
name='bots',
field=sortedm2m.fields.SortedManyToManyField(help_text='Select the bots for this trigger.', related_name='file_trigger_bot', to='orchestrator.Bot'),
),
migrations.AddField(
model_name='scheduletrigger',
name='bots',
field=sortedm2m.fields.SortedManyToManyField(help_text='Select the bots for this trigger.', related_name='schedule_trigger_bot', to='orchestrator.Bot'),
),
]
| 44.615385
| 169
| 0.640345
| 308
| 2,900
| 5.886364
| 0.194805
| 0.049641
| 0.07722
| 0.093767
| 0.800331
| 0.785439
| 0.785439
| 0.776062
| 0.776062
| 0.656922
| 0
| 0.013188
| 0.241724
| 2,900
| 64
| 170
| 45.3125
| 0.811278
| 0.015517
| 0
| 0.775862
| 1
| 0
| 0.274448
| 0.016474
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034483
| 0
| 0.086207
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
62d3dabe2d0ce869bae0b7b6634e37d8ca35caf2
| 442,767
|
py
|
Python
|
src/ks33requests/schemas/s3_api.py
|
tanbro/ks33requests
|
b86fcded23beeb3b32ec0b7b0c6855394b26033d
|
[
"Apache-2.0"
] | 1
|
2021-08-30T03:31:45.000Z
|
2021-08-30T03:31:45.000Z
|
src/ks33requests/schemas/s3_api.py
|
tanbro/ks33requests
|
b86fcded23beeb3b32ec0b7b0c6855394b26033d
|
[
"Apache-2.0"
] | 1
|
2019-06-27T02:42:26.000Z
|
2019-06-27T02:42:26.000Z
|
src/ks33requests/schemas/s3_api.py
|
tanbro/ks33requests
|
b86fcded23beeb3b32ec0b7b0c6855394b26033d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Mon Jun 10 11:49:52 2019 by generateDS.py version 2.32.0.
# Python 3.6.7 (default, Oct 22 2018, 11:32:17) [GCC 8.2.0]
#
# Command line options:
# ('-f', '')
# ('-o', 's3_api.py')
# ('-s', 's3_sub.py')
# ('--super', 's3_api')
#
# Command line arguments:
# schemas/AmazonS3.xsd
#
# Command line:
# generateDS.py -f -o "s3_api.py" -s "s3_sub.py" --super="s3_api" schemas/AmazonS3.xsd
#
# Current working directory (os.getcwd()):
# ks33requests
#
import base64
import datetime as datetime_
import decimal as decimal_
import os
import re as re_
import sys
import warnings as warnings_
try:
from lxml import etree as etree_
except ImportError:
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
try:
if isinstance(infile, os.PathLike):
infile = os.path.join(infile)
except AttributeError:
pass
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
def parsexmlstring_(instring, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
element = etree_.fromstring(instring, parser=parser, **kwargs)
return element
#
# Namespace prefix definition table (and other attributes, too)
#
# The module generatedsnamespaces, if it is importable, must contain
# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
# should map element type names (strings) to XML schema namespace prefix
# definitions. The export method for any class for which there is
# a namespace prefix definition, will export that definition in the
# XML representation of that element. See the export method of
# any generated element type class for an example of the use of this
# table.
# A sample table is:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceDefs = {
# "ElementtypeA": "http://www.xxx.com/namespaceA",
# "ElementtypeB": "http://www.xxx.com/namespaceB",
# }
#
# Additionally, the generatedsnamespaces module can contain a python
# dictionary named GenerateDSNamespaceTypePrefixes that associates element
# types with the namespace prefixes that are to be added to the
# "xsi:type" attribute value. See the exportAttributes method of
# any generated element type and the generation of "xsi:type" for an
# example of the use of this table.
# An example table:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceTypePrefixes = {
# "ElementtypeC": "aaa:",
# "ElementtypeD": "bbb:",
# }
#
try:
from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ImportError:
GenerateDSNamespaceDefs_ = {}
try:
from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_
except ImportError:
GenerateDSNamespaceTypePrefixes_ = {}
#
# The super-class for enum types
#
try:
from enum import Enum
except ImportError:
Enum = object
#
# The root super-class for element type classes
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_parse_string(self, input_data, node=None, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_parse_integer(self, input_data, node=None, input_name=''):
try:
ival = int(input_data)
except (TypeError, ValueError) as exp:
raise_parse_error(node, 'requires integer: %s' % exp)
return ival
def gds_validate_integer(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_parse_float(self, input_data, node=None, input_name=''):
try:
fval_ = float(input_data)
except (TypeError, ValueError) as exp:
raise_parse_error(node, 'requires float or double: %s' % exp)
return fval_
def gds_validate_float(self, input_data, node=None, input_name=''):
try:
value = float(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return value
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return values
def gds_format_decimal(self, input_data, input_name=''):
return ('%0.10f' % input_data).rstrip('0')
def gds_parse_decimal(self, input_data, node=None, input_name=''):
try:
decimal_.Decimal(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires decimal value')
return input_data
def gds_validate_decimal(self, input_data, node=None, input_name=''):
try:
value = decimal_.Decimal(input_data)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires decimal value')
return value
def gds_format_decimal_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_decimal_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
decimal_.Decimal(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of decimal values')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_parse_double(self, input_data, node=None, input_name=''):
try:
fval_ = float(input_data)
except (TypeError, ValueError) as exp:
raise_parse_error(node, 'requires float or double: %s' % exp)
return fval_
def gds_validate_double(self, input_data, node=None, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_parse_boolean(self, input_data, node=None, input_name=''):
if input_data in ('true', '1'):
bval = True
elif input_data in ('false', '0'):
bval = False
else:
raise_parse_error(node, 'requires boolean')
return bval
def gds_validate_boolean(self, input_data, node=None, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0',):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (
time_parts[0], "{}".format(micro_seconds).rjust(6, "0"),)
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns.
# The target value must match at least one of the patterns
# in order for the test to succeed.
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
mo = re_.search(patterns2, target)
if mo is not None and len(mo.group(0)) == len(target):
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
content = etree_.tostring(node, encoding="unicode")
return content
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.items()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2:
if ExternalEncoding:
encoding = ExternalEncoding
else:
encoding = 'utf-8'
return instring.encode(encoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
if type(self) != type(other):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
# Django ETL transform hooks.
def gds_djo_etl_transform(self):
pass
def gds_djo_etl_transform_db_obj(self, dbobj):
pass
# SQLAlchemy ETL transform hooks.
def gds_sqa_etl_transform(self):
return 0, None
def gds_sqa_etl_transform_db_obj(self, dbobj):
pass
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = ''
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
# Change this to redirect the generated superclass module to use a
# specific subclass module.
CurrentSubclassModule_ = None
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name,))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
if node is not None:
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline,)
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name_=name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
def set_name(self, name):
self.name = name
def get_name(self):
return self.name
def set_data_type(self, data_type):
self.data_type = data_type
def get_data_type_chain(self):
return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container):
self.container = container
def get_container(self):
return self.container
def set_child_attrs(self, child_attrs):
self.child_attrs = child_attrs
def get_child_attrs(self):
return self.child_attrs
def set_choice(self, choice):
self.choice = choice
def get_choice(self):
return self.choice
def set_optional(self, optional):
self.optional = optional
def get_optional(self):
return self.optional
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class MetadataDirective(Enum):
COPY = 'COPY'
REPLACE = 'REPLACE'
class MfaDeleteStatus(Enum):
ENABLED = 'Enabled'
DISABLED = 'Disabled'
class Payer(Enum):
BUCKET_OWNER = 'BucketOwner'
REQUESTER = 'Requester'
class Permission(Enum):
READ = 'READ'
WRITE = 'WRITE'
READ_ACP = 'READ_ACP'
WRITE_ACP = 'WRITE_ACP'
FULL_CONTROL = 'FULL_CONTROL'
class StorageClass(Enum):
STANDARD = 'STANDARD'
REDUCED_REDUNDANCY = 'REDUCED_REDUNDANCY'
GLACIER = 'GLACIER'
UNKNOWN = 'UNKNOWN'
class VersioningStatus(Enum):
ENABLED = 'Enabled'
SUSPENDED = 'Suspended'
class CreateBucket(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, AccessControlList=None, AWSAccessKeyId=None, Timestamp=None, Signature=None,
**kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.AccessControlList = AccessControlList
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CreateBucket)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CreateBucket.subclass:
return CreateBucket.subclass(*args_, **kwargs_)
else:
return CreateBucket(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_AccessControlList(self):
return self.AccessControlList
def set_AccessControlList(self, AccessControlList):
self.AccessControlList = AccessControlList
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def hasContent_(self):
if (
self.Bucket is not None or
self.AccessControlList is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CreateBucket',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CreateBucket')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CreateBucket')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='CreateBucket', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CreateBucket'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CreateBucket',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.AccessControlList is not None:
self.AccessControlList.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AccessControlList',
pretty_print=pretty_print)
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'AccessControlList':
obj_ = AccessControlList.factory(parent_object_=self)
obj_.build(child_)
self.AccessControlList = obj_
obj_.original_tagname_ = 'AccessControlList'
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
# end class CreateBucket
class MetadataEntry(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Name=None, Value=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Name = Name
self.Value = Value
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, MetadataEntry)
if subclass is not None:
return subclass(*args_, **kwargs_)
if MetadataEntry.subclass:
return MetadataEntry.subclass(*args_, **kwargs_)
else:
return MetadataEntry(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Name(self):
return self.Name
def set_Name(self, Name):
self.Name = Name
def get_Value(self):
return self.Value
def set_Value(self, Value):
self.Value = Value
def hasContent_(self):
if (
self.Name is not None or
self.Value is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='MetadataEntry',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('MetadataEntry')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='MetadataEntry')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='MetadataEntry', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='MetadataEntry'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='MetadataEntry',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Name is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sName>%s</%sName>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Name), input_name='Name')),
namespaceprefix_, eol_))
if self.Value is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sValue>%s</%sValue>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Value), input_name='Value')),
namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Name':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Name')
value_ = self.gds_validate_string(value_, node, 'Name')
self.Name = value_
elif nodeName_ == 'Value':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Value')
value_ = self.gds_validate_string(value_, node, 'Value')
self.Value = value_
# end class MetadataEntry
class CreateBucketResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CreateBucketReturn=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.CreateBucketReturn = CreateBucketReturn
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CreateBucketResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CreateBucketResponse.subclass:
return CreateBucketResponse.subclass(*args_, **kwargs_)
else:
return CreateBucketResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CreateBucketReturn(self):
return self.CreateBucketReturn
def set_CreateBucketReturn(self, CreateBucketReturn):
self.CreateBucketReturn = CreateBucketReturn
def hasContent_(self):
if (
self.CreateBucketReturn is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CreateBucketResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CreateBucketResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CreateBucketResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='CreateBucketResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CreateBucketResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='CreateBucketResponse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CreateBucketReturn is not None:
self.CreateBucketReturn.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='CreateBucketReturn', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CreateBucketReturn':
obj_ = CreateBucketResult.factory(parent_object_=self)
obj_.build(child_)
self.CreateBucketReturn = obj_
obj_.original_tagname_ = 'CreateBucketReturn'
# end class CreateBucketResponse
class Status(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Code=None, Description=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Code = Code
self.Description = Description
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Status)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Status.subclass:
return Status.subclass(*args_, **kwargs_)
else:
return Status(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Code(self):
return self.Code
def set_Code(self, Code):
self.Code = Code
def get_Description(self):
return self.Description
def set_Description(self, Description):
self.Description = Description
def hasContent_(self):
if (
self.Code is not None or
self.Description is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='Status', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Status')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Status')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='Status', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Status'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='Status',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Code is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCode>%s</%sCode>%s' % (
namespaceprefix_, self.gds_format_integer(self.Code, input_name='Code'), namespaceprefix_, eol_))
if self.Description is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sDescription>%s</%sDescription>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Description), input_name='Description')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Code' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'Code')
ival_ = self.gds_validate_integer(ival_, node, 'Code')
self.Code = ival_
elif nodeName_ == 'Description':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Description')
value_ = self.gds_validate_string(value_, node, 'Description')
self.Description = value_
# end class Status
class Result(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Status=None, extensiontype_=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Status = Status
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Result)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Result.subclass:
return Result.subclass(*args_, **kwargs_)
else:
return Result(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Status(self):
return self.Status
def set_Status(self, Status):
self.Status = Status
def get_extensiontype_(self):
return self.extensiontype_
def set_extensiontype_(self, extensiontype_):
self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
self.Status is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='Result', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Result')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Result')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='Result', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Result'):
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
if ":" not in self.extensiontype_:
imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '')
outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_))
else:
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='Result',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Status is not None:
self.Status.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Status',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Status':
obj_ = Status.factory(parent_object_=self)
obj_.build(child_)
self.Status = obj_
obj_.original_tagname_ = 'Status'
# end class Result
class CreateBucketResult(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, BucketName=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.BucketName = BucketName
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CreateBucketResult)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CreateBucketResult.subclass:
return CreateBucketResult.subclass(*args_, **kwargs_)
else:
return CreateBucketResult(*args_, **kwargs_)
factory = staticmethod(factory)
def get_BucketName(self):
return self.BucketName
def set_BucketName(self, BucketName):
self.BucketName = BucketName
def hasContent_(self):
if (
self.BucketName is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CreateBucketResult',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CreateBucketResult')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CreateBucketResult')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='CreateBucketResult',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CreateBucketResult'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CreateBucketResult',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.BucketName is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucketName>%s</%sBucketName>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.BucketName), input_name='BucketName')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'BucketName':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'BucketName')
value_ = self.gds_validate_string(value_, node, 'BucketName')
self.BucketName = value_
# end class CreateBucketResult
class DeleteBucket(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, AWSAccessKeyId=None, Timestamp=None, Signature=None, Credential=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DeleteBucket)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DeleteBucket.subclass:
return DeleteBucket.subclass(*args_, **kwargs_)
else:
return DeleteBucket(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def hasContent_(self):
if (
self.Bucket is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='DeleteBucket',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DeleteBucket')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DeleteBucket')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='DeleteBucket', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DeleteBucket'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='DeleteBucket',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class DeleteBucket
class DeleteBucketResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, DeleteBucketResponse_member=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.DeleteBucketResponse = DeleteBucketResponse_member
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DeleteBucketResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DeleteBucketResponse.subclass:
return DeleteBucketResponse.subclass(*args_, **kwargs_)
else:
return DeleteBucketResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_DeleteBucketResponse(self):
return self.DeleteBucketResponse
def set_DeleteBucketResponse(self, DeleteBucketResponse):
self.DeleteBucketResponse = DeleteBucketResponse
def hasContent_(self):
if (
self.DeleteBucketResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='DeleteBucketResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DeleteBucketResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DeleteBucketResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='DeleteBucketResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DeleteBucketResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='DeleteBucketResponse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.DeleteBucketResponse is not None:
self.DeleteBucketResponse.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='DeleteBucketResponse', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'DeleteBucketResponse':
obj_ = Status.factory(parent_object_=self)
obj_.build(child_)
self.DeleteBucketResponse = obj_
obj_.original_tagname_ = 'DeleteBucketResponse'
# end class DeleteBucketResponse
class BucketLoggingStatus(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, LoggingEnabled=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.LoggingEnabled = LoggingEnabled
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, BucketLoggingStatus)
if subclass is not None:
return subclass(*args_, **kwargs_)
if BucketLoggingStatus.subclass:
return BucketLoggingStatus.subclass(*args_, **kwargs_)
else:
return BucketLoggingStatus(*args_, **kwargs_)
factory = staticmethod(factory)
def get_LoggingEnabled(self):
return self.LoggingEnabled
def set_LoggingEnabled(self, LoggingEnabled):
self.LoggingEnabled = LoggingEnabled
def hasContent_(self):
if (
self.LoggingEnabled is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='BucketLoggingStatus',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('BucketLoggingStatus')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BucketLoggingStatus')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='BucketLoggingStatus',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BucketLoggingStatus'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='BucketLoggingStatus',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.LoggingEnabled is not None:
self.LoggingEnabled.export(outfile, level, namespaceprefix_, namespacedef_='', name_='LoggingEnabled',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'LoggingEnabled':
obj_ = LoggingSettings.factory(parent_object_=self)
obj_.build(child_)
self.LoggingEnabled = obj_
obj_.original_tagname_ = 'LoggingEnabled'
# end class BucketLoggingStatus
class LoggingSettings(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, TargetBucket=None, TargetPrefix=None, TargetGrants=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.TargetBucket = TargetBucket
self.TargetPrefix = TargetPrefix
self.TargetGrants = TargetGrants
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LoggingSettings)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LoggingSettings.subclass:
return LoggingSettings.subclass(*args_, **kwargs_)
else:
return LoggingSettings(*args_, **kwargs_)
factory = staticmethod(factory)
def get_TargetBucket(self):
return self.TargetBucket
def set_TargetBucket(self, TargetBucket):
self.TargetBucket = TargetBucket
def get_TargetPrefix(self):
return self.TargetPrefix
def set_TargetPrefix(self, TargetPrefix):
self.TargetPrefix = TargetPrefix
def get_TargetGrants(self):
return self.TargetGrants
def set_TargetGrants(self, TargetGrants):
self.TargetGrants = TargetGrants
def hasContent_(self):
if (
self.TargetBucket is not None or
self.TargetPrefix is not None or
self.TargetGrants is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='LoggingSettings',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LoggingSettings')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LoggingSettings')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='LoggingSettings',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LoggingSettings'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='LoggingSettings',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.TargetBucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTargetBucket>%s</%sTargetBucket>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.TargetBucket), input_name='TargetBucket')), namespaceprefix_,
eol_))
if self.TargetPrefix is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTargetPrefix>%s</%sTargetPrefix>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.TargetPrefix), input_name='TargetPrefix')), namespaceprefix_,
eol_))
if self.TargetGrants is not None:
self.TargetGrants.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TargetGrants',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'TargetBucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'TargetBucket')
value_ = self.gds_validate_string(value_, node, 'TargetBucket')
self.TargetBucket = value_
elif nodeName_ == 'TargetPrefix':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'TargetPrefix')
value_ = self.gds_validate_string(value_, node, 'TargetPrefix')
self.TargetPrefix = value_
elif nodeName_ == 'TargetGrants':
obj_ = AccessControlList.factory(parent_object_=self)
obj_.build(child_)
self.TargetGrants = obj_
obj_.original_tagname_ = 'TargetGrants'
# end class LoggingSettings
class GetBucketLoggingStatus(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, AWSAccessKeyId=None, Timestamp=None, Signature=None, Credential=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GetBucketLoggingStatus)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GetBucketLoggingStatus.subclass:
return GetBucketLoggingStatus.subclass(*args_, **kwargs_)
else:
return GetBucketLoggingStatus(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def hasContent_(self):
if (
self.Bucket is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='GetBucketLoggingStatus',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GetBucketLoggingStatus')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GetBucketLoggingStatus')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='GetBucketLoggingStatus',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GetBucketLoggingStatus'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetBucketLoggingStatus', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class GetBucketLoggingStatus
class GetBucketLoggingStatusResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, GetBucketLoggingStatusResponse_member=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.GetBucketLoggingStatusResponse = GetBucketLoggingStatusResponse_member
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GetBucketLoggingStatusResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GetBucketLoggingStatusResponse.subclass:
return GetBucketLoggingStatusResponse.subclass(*args_, **kwargs_)
else:
return GetBucketLoggingStatusResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_GetBucketLoggingStatusResponse(self):
return self.GetBucketLoggingStatusResponse
def set_GetBucketLoggingStatusResponse(self, GetBucketLoggingStatusResponse):
self.GetBucketLoggingStatusResponse = GetBucketLoggingStatusResponse
def hasContent_(self):
if (
self.GetBucketLoggingStatusResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetBucketLoggingStatusResponse', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GetBucketLoggingStatusResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_,
name_='GetBucketLoggingStatusResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='GetBucketLoggingStatusResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='GetBucketLoggingStatusResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetBucketLoggingStatusResponse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.GetBucketLoggingStatusResponse is not None:
self.GetBucketLoggingStatusResponse.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='GetBucketLoggingStatusResponse',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'GetBucketLoggingStatusResponse':
obj_ = BucketLoggingStatus.factory(parent_object_=self)
obj_.build(child_)
self.GetBucketLoggingStatusResponse = obj_
obj_.original_tagname_ = 'GetBucketLoggingStatusResponse'
# end class GetBucketLoggingStatusResponse
class SetBucketLoggingStatus(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, AWSAccessKeyId=None, Timestamp=None, Signature=None, Credential=None,
BucketLoggingStatus=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
self.BucketLoggingStatus = BucketLoggingStatus
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SetBucketLoggingStatus)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SetBucketLoggingStatus.subclass:
return SetBucketLoggingStatus.subclass(*args_, **kwargs_)
else:
return SetBucketLoggingStatus(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def get_BucketLoggingStatus(self):
return self.BucketLoggingStatus
def set_BucketLoggingStatus(self, BucketLoggingStatus):
self.BucketLoggingStatus = BucketLoggingStatus
def hasContent_(self):
if (
self.Bucket is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None or
self.BucketLoggingStatus is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='SetBucketLoggingStatus',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SetBucketLoggingStatus')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SetBucketLoggingStatus')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='SetBucketLoggingStatus',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SetBucketLoggingStatus'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='SetBucketLoggingStatus', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
if self.BucketLoggingStatus is not None:
self.BucketLoggingStatus.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='BucketLoggingStatus', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
elif nodeName_ == 'BucketLoggingStatus':
obj_ = BucketLoggingStatus.factory(parent_object_=self)
obj_.build(child_)
self.BucketLoggingStatus = obj_
obj_.original_tagname_ = 'BucketLoggingStatus'
# end class SetBucketLoggingStatus
class SetBucketLoggingStatusResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SetBucketLoggingStatusResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SetBucketLoggingStatusResponse.subclass:
return SetBucketLoggingStatusResponse.subclass(*args_, **kwargs_)
else:
return SetBucketLoggingStatusResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='SetBucketLoggingStatusResponse', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SetBucketLoggingStatusResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_,
name_='SetBucketLoggingStatusResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='SetBucketLoggingStatusResponse',
pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='SetBucketLoggingStatusResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='SetBucketLoggingStatusResponse', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SetBucketLoggingStatusResponse
class GetObjectAccessControlPolicy(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, Key=None, AWSAccessKeyId=None, Timestamp=None, Signature=None, Credential=None,
**kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.Key = Key
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GetObjectAccessControlPolicy)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GetObjectAccessControlPolicy.subclass:
return GetObjectAccessControlPolicy.subclass(*args_, **kwargs_)
else:
return GetObjectAccessControlPolicy(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def hasContent_(self):
if (
self.Bucket is not None or
self.Key is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetObjectAccessControlPolicy', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GetObjectAccessControlPolicy')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GetObjectAccessControlPolicy')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='GetObjectAccessControlPolicy',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='GetObjectAccessControlPolicy'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetObjectAccessControlPolicy', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.Key is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')),
namespaceprefix_, eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class GetObjectAccessControlPolicy
class GetObjectAccessControlPolicyResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, GetObjectAccessControlPolicyResponse_member=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.GetObjectAccessControlPolicyResponse = GetObjectAccessControlPolicyResponse_member
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GetObjectAccessControlPolicyResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GetObjectAccessControlPolicyResponse.subclass:
return GetObjectAccessControlPolicyResponse.subclass(*args_, **kwargs_)
else:
return GetObjectAccessControlPolicyResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_GetObjectAccessControlPolicyResponse(self):
return self.GetObjectAccessControlPolicyResponse
def set_GetObjectAccessControlPolicyResponse(self, GetObjectAccessControlPolicyResponse):
self.GetObjectAccessControlPolicyResponse = GetObjectAccessControlPolicyResponse
def hasContent_(self):
if (
self.GetObjectAccessControlPolicyResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetObjectAccessControlPolicyResponse', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GetObjectAccessControlPolicyResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_,
name_='GetObjectAccessControlPolicyResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='GetObjectAccessControlPolicyResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='GetObjectAccessControlPolicyResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetObjectAccessControlPolicyResponse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.GetObjectAccessControlPolicyResponse is not None:
self.GetObjectAccessControlPolicyResponse.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='GetObjectAccessControlPolicyResponse',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'GetObjectAccessControlPolicyResponse':
obj_ = AccessControlPolicy.factory(parent_object_=self)
obj_.build(child_)
self.GetObjectAccessControlPolicyResponse = obj_
obj_.original_tagname_ = 'GetObjectAccessControlPolicyResponse'
# end class GetObjectAccessControlPolicyResponse
class GetBucketAccessControlPolicy(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, AWSAccessKeyId=None, Timestamp=None, Signature=None, Credential=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GetBucketAccessControlPolicy)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GetBucketAccessControlPolicy.subclass:
return GetBucketAccessControlPolicy.subclass(*args_, **kwargs_)
else:
return GetBucketAccessControlPolicy(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def hasContent_(self):
if (
self.Bucket is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetBucketAccessControlPolicy', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GetBucketAccessControlPolicy')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GetBucketAccessControlPolicy')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='GetBucketAccessControlPolicy',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='GetBucketAccessControlPolicy'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetBucketAccessControlPolicy', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class GetBucketAccessControlPolicy
class GetBucketAccessControlPolicyResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, GetBucketAccessControlPolicyResponse_member=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.GetBucketAccessControlPolicyResponse = GetBucketAccessControlPolicyResponse_member
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GetBucketAccessControlPolicyResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GetBucketAccessControlPolicyResponse.subclass:
return GetBucketAccessControlPolicyResponse.subclass(*args_, **kwargs_)
else:
return GetBucketAccessControlPolicyResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_GetBucketAccessControlPolicyResponse(self):
return self.GetBucketAccessControlPolicyResponse
def set_GetBucketAccessControlPolicyResponse(self, GetBucketAccessControlPolicyResponse):
self.GetBucketAccessControlPolicyResponse = GetBucketAccessControlPolicyResponse
def hasContent_(self):
if (
self.GetBucketAccessControlPolicyResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetBucketAccessControlPolicyResponse', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GetBucketAccessControlPolicyResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_,
name_='GetBucketAccessControlPolicyResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='GetBucketAccessControlPolicyResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='GetBucketAccessControlPolicyResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetBucketAccessControlPolicyResponse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.GetBucketAccessControlPolicyResponse is not None:
self.GetBucketAccessControlPolicyResponse.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='GetBucketAccessControlPolicyResponse',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'GetBucketAccessControlPolicyResponse':
obj_ = AccessControlPolicy.factory(parent_object_=self)
obj_.build(child_)
self.GetBucketAccessControlPolicyResponse = obj_
obj_.original_tagname_ = 'GetBucketAccessControlPolicyResponse'
# end class GetBucketAccessControlPolicyResponse
class Grantee(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, extensiontype_=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Grantee)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Grantee.subclass:
return Grantee.subclass(*args_, **kwargs_)
else:
return Grantee(*args_, **kwargs_)
factory = staticmethod(factory)
def get_extensiontype_(self):
return self.extensiontype_
def set_extensiontype_(self, extensiontype_):
self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='Grantee', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Grantee')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Grantee')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='Grantee', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Grantee'):
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
if ":" not in self.extensiontype_:
imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '')
outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_))
else:
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='Grantee',
fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Grantee
class User(Grantee):
subclass = None
superclass = Grantee
def __init__(self, extensiontype_=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
super(User, self).__init__(extensiontype_, **kwargs_)
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, User)
if subclass is not None:
return subclass(*args_, **kwargs_)
if User.subclass:
return User.subclass(*args_, **kwargs_)
else:
return User(*args_, **kwargs_)
factory = staticmethod(factory)
def get_extensiontype_(self):
return self.extensiontype_
def set_extensiontype_(self, extensiontype_):
self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
super(User, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='User', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('User')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='User')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='User', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='User'):
super(User, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='User')
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
if ":" not in self.extensiontype_:
imported_ns_type_prefix_ = GenerateDSNamespaceTypePrefixes_.get(self.extensiontype_, '')
outfile.write(' xsi:type="%s%s"' % (imported_ns_type_prefix_, self.extensiontype_))
else:
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='User',
fromsubclass_=False, pretty_print=True):
super(User, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True,
pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
super(User, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(User, self).buildChildren(child_, node, nodeName_, True)
pass
# end class User
class AmazonCustomerByEmail(User):
subclass = None
superclass = User
def __init__(self, EmailAddress=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
super(AmazonCustomerByEmail, self).__init__(**kwargs_)
self.EmailAddress = EmailAddress
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, AmazonCustomerByEmail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if AmazonCustomerByEmail.subclass:
return AmazonCustomerByEmail.subclass(*args_, **kwargs_)
else:
return AmazonCustomerByEmail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_EmailAddress(self):
return self.EmailAddress
def set_EmailAddress(self, EmailAddress):
self.EmailAddress = EmailAddress
def hasContent_(self):
if (
self.EmailAddress is not None or
super(AmazonCustomerByEmail, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='AmazonCustomerByEmail',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('AmazonCustomerByEmail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AmazonCustomerByEmail')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='AmazonCustomerByEmail',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AmazonCustomerByEmail'):
super(AmazonCustomerByEmail, self).exportAttributes(outfile, level, already_processed, namespaceprefix_,
name_='AmazonCustomerByEmail')
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='AmazonCustomerByEmail', fromsubclass_=False, pretty_print=True):
super(AmazonCustomerByEmail, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True,
pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.EmailAddress is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sEmailAddress>%s</%sEmailAddress>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.EmailAddress), input_name='EmailAddress')), namespaceprefix_,
eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(AmazonCustomerByEmail, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'EmailAddress':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'EmailAddress')
value_ = self.gds_validate_string(value_, node, 'EmailAddress')
self.EmailAddress = value_
super(AmazonCustomerByEmail, self).buildChildren(child_, node, nodeName_, True)
# end class AmazonCustomerByEmail
class CanonicalUser(User):
subclass = None
superclass = User
def __init__(self, ID=None, DisplayName=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
super(CanonicalUser, self).__init__(**kwargs_)
self.ID = ID
self.DisplayName = DisplayName
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CanonicalUser)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CanonicalUser.subclass:
return CanonicalUser.subclass(*args_, **kwargs_)
else:
return CanonicalUser(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ID(self):
return self.ID
def set_ID(self, ID):
self.ID = ID
def get_DisplayName(self):
return self.DisplayName
def set_DisplayName(self, DisplayName):
self.DisplayName = DisplayName
def hasContent_(self):
if (
self.ID is not None or
self.DisplayName is not None or
super(CanonicalUser, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CanonicalUser',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CanonicalUser')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CanonicalUser')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='CanonicalUser', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CanonicalUser'):
super(CanonicalUser, self).exportAttributes(outfile, level, already_processed, namespaceprefix_,
name_='CanonicalUser')
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CanonicalUser',
fromsubclass_=False, pretty_print=True):
super(CanonicalUser, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True,
pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ID is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sID>%s</%sID>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.ID), input_name='ID')),
namespaceprefix_, eol_))
if self.DisplayName is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sDisplayName>%s</%sDisplayName>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.DisplayName), input_name='DisplayName')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(CanonicalUser, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ID':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ID')
value_ = self.gds_validate_string(value_, node, 'ID')
self.ID = value_
elif nodeName_ == 'DisplayName':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'DisplayName')
value_ = self.gds_validate_string(value_, node, 'DisplayName')
self.DisplayName = value_
super(CanonicalUser, self).buildChildren(child_, node, nodeName_, True)
# end class CanonicalUser
class Group(Grantee):
subclass = None
superclass = Grantee
def __init__(self, URI=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
super(Group, self).__init__(**kwargs_)
self.URI = URI
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Group)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Group.subclass:
return Group.subclass(*args_, **kwargs_)
else:
return Group(*args_, **kwargs_)
factory = staticmethod(factory)
def get_URI(self):
return self.URI
def set_URI(self, URI):
self.URI = URI
def hasContent_(self):
if (
self.URI is not None or
super(Group, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='Group', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Group')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Group')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='Group', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Group'):
super(Group, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Group')
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='Group',
fromsubclass_=False, pretty_print=True):
super(Group, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True,
pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.URI is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sURI>%s</%sURI>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.URI), input_name='URI')),
namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(Group, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'URI':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'URI')
value_ = self.gds_validate_string(value_, node, 'URI')
self.URI = value_
super(Group, self).buildChildren(child_, node, nodeName_, True)
# end class Group
class Grant(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Grantee=None, Permission=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Grantee = Grantee
self.Permission = Permission
self.validate_Permission(self.Permission)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Grant)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Grant.subclass:
return Grant.subclass(*args_, **kwargs_)
else:
return Grant(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Grantee(self):
return self.Grantee
def set_Grantee(self, Grantee):
self.Grantee = Grantee
def set_Grantee_with_type(self, value):
self.Grantee = value
value.original_tagname_ = 'Grantee'
value.extensiontype_ = value.__class__.__name__
def get_Permission(self):
return self.Permission
def set_Permission(self, Permission):
self.Permission = Permission
def validate_Permission(self, value):
# Validate type Permission, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['READ', 'WRITE', 'READ_ACP', 'WRITE_ACP', 'FULL_CONTROL']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on Permission' % {
"value": value.encode("utf-8")})
def hasContent_(self):
if (
self.Grantee is not None or
self.Permission is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='Grant', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Grant')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Grant')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='Grant', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Grant'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='Grant',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Grantee is not None:
self.Grantee.export(outfile, level, namespaceprefix_, namespacedef_='', pretty_print=pretty_print)
if self.Permission is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sPermission>%s</%sPermission>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Permission), input_name='Permission')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Grantee':
type_name_ = child_.attrib.get(
'{http://www.w3.org/2001/XMLSchema-instance}type')
if type_name_ is None:
type_name_ = child_.attrib.get('type')
if type_name_ is not None:
type_names_ = type_name_.split(':')
if len(type_names_) == 1:
type_name_ = type_names_[0]
else:
type_name_ = type_names_[1]
class_ = globals()[type_name_]
obj_ = class_.factory()
obj_.build(child_)
else:
raise NotImplementedError(
'Class not implemented for <Grantee> element')
self.Grantee = obj_
obj_.original_tagname_ = 'Grantee'
elif nodeName_ == 'Permission':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Permission')
value_ = self.gds_validate_string(value_, node, 'Permission')
self.Permission = value_
# validate type Permission
self.validate_Permission(self.Permission)
# end class Grant
class AccessControlList(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Grant=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
if Grant is None:
self.Grant = []
else:
self.Grant = Grant
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, AccessControlList)
if subclass is not None:
return subclass(*args_, **kwargs_)
if AccessControlList.subclass:
return AccessControlList.subclass(*args_, **kwargs_)
else:
return AccessControlList(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Grant(self):
return self.Grant
def set_Grant(self, Grant):
self.Grant = Grant
def add_Grant(self, value):
self.Grant.append(value)
def insert_Grant_at(self, index, value):
self.Grant.insert(index, value)
def replace_Grant_at(self, index, value):
self.Grant[index] = value
def hasContent_(self):
if (
self.Grant
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='AccessControlList',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('AccessControlList')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AccessControlList')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='AccessControlList',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AccessControlList'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='AccessControlList',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Grant_ in self.Grant:
Grant_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Grant', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Grant':
obj_ = Grant.factory(parent_object_=self)
obj_.build(child_)
self.Grant.append(obj_)
obj_.original_tagname_ = 'Grant'
# end class AccessControlList
class CreateBucketConfiguration(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, LocationConstraint=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.LocationConstraint = LocationConstraint
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CreateBucketConfiguration)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CreateBucketConfiguration.subclass:
return CreateBucketConfiguration.subclass(*args_, **kwargs_)
else:
return CreateBucketConfiguration(*args_, **kwargs_)
factory = staticmethod(factory)
def get_LocationConstraint(self):
return self.LocationConstraint
def set_LocationConstraint(self, LocationConstraint):
self.LocationConstraint = LocationConstraint
def hasContent_(self):
if (
self.LocationConstraint is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CreateBucketConfiguration',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CreateBucketConfiguration')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CreateBucketConfiguration')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='CreateBucketConfiguration',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='CreateBucketConfiguration'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='CreateBucketConfiguration', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.LocationConstraint is not None:
self.LocationConstraint.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='LocationConstraint', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'LocationConstraint':
obj_ = LocationConstraint.factory(parent_object_=self)
obj_.build(child_)
self.LocationConstraint = obj_
obj_.original_tagname_ = 'LocationConstraint'
# end class CreateBucketConfiguration
class LocationConstraint(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, LocationConstraint)
if subclass is not None:
return subclass(*args_, **kwargs_)
if LocationConstraint.subclass:
return LocationConstraint.subclass(*args_, **kwargs_)
else:
return LocationConstraint(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self):
return self.valueOf_
def set_valueOf_(self, valueOf_):
self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int, float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='LocationConstraint',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('LocationConstraint')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='LocationConstraint')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='LocationConstraint',
pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='LocationConstraint'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='LocationConstraint',
fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class LocationConstraint
class AccessControlPolicy(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Owner=None, AccessControlList=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Owner = Owner
self.AccessControlList = AccessControlList
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, AccessControlPolicy)
if subclass is not None:
return subclass(*args_, **kwargs_)
if AccessControlPolicy.subclass:
return AccessControlPolicy.subclass(*args_, **kwargs_)
else:
return AccessControlPolicy(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Owner(self):
return self.Owner
def set_Owner(self, Owner):
self.Owner = Owner
def get_AccessControlList(self):
return self.AccessControlList
def set_AccessControlList(self, AccessControlList):
self.AccessControlList = AccessControlList
def hasContent_(self):
if (
self.Owner is not None or
self.AccessControlList is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='AccessControlPolicy',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('AccessControlPolicy')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AccessControlPolicy')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='AccessControlPolicy',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AccessControlPolicy'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='AccessControlPolicy',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Owner is not None:
self.Owner.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Owner',
pretty_print=pretty_print)
if self.AccessControlList is not None:
self.AccessControlList.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AccessControlList',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Owner':
obj_ = CanonicalUser.factory(parent_object_=self)
obj_.build(child_)
self.Owner = obj_
obj_.original_tagname_ = 'Owner'
elif nodeName_ == 'AccessControlList':
obj_ = AccessControlList.factory(parent_object_=self)
obj_.build(child_)
self.AccessControlList = obj_
obj_.original_tagname_ = 'AccessControlList'
# end class AccessControlPolicy
class SetObjectAccessControlPolicy(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, Key=None, AccessControlList=None, AWSAccessKeyId=None, Timestamp=None,
Signature=None, Credential=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.Key = Key
self.AccessControlList = AccessControlList
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SetObjectAccessControlPolicy)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SetObjectAccessControlPolicy.subclass:
return SetObjectAccessControlPolicy.subclass(*args_, **kwargs_)
else:
return SetObjectAccessControlPolicy(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_AccessControlList(self):
return self.AccessControlList
def set_AccessControlList(self, AccessControlList):
self.AccessControlList = AccessControlList
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def hasContent_(self):
if (
self.Bucket is not None or
self.Key is not None or
self.AccessControlList is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='SetObjectAccessControlPolicy', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SetObjectAccessControlPolicy')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SetObjectAccessControlPolicy')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='SetObjectAccessControlPolicy',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='SetObjectAccessControlPolicy'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='SetObjectAccessControlPolicy', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.Key is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')),
namespaceprefix_, eol_))
if self.AccessControlList is not None:
self.AccessControlList.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AccessControlList',
pretty_print=pretty_print)
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
elif nodeName_ == 'AccessControlList':
obj_ = AccessControlList.factory(parent_object_=self)
obj_.build(child_)
self.AccessControlList = obj_
obj_.original_tagname_ = 'AccessControlList'
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class SetObjectAccessControlPolicy
class SetObjectAccessControlPolicyResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SetObjectAccessControlPolicyResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SetObjectAccessControlPolicyResponse.subclass:
return SetObjectAccessControlPolicyResponse.subclass(*args_, **kwargs_)
else:
return SetObjectAccessControlPolicyResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='SetObjectAccessControlPolicyResponse', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SetObjectAccessControlPolicyResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_,
name_='SetObjectAccessControlPolicyResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='SetObjectAccessControlPolicyResponse',
pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='SetObjectAccessControlPolicyResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='SetObjectAccessControlPolicyResponse', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SetObjectAccessControlPolicyResponse
class SetBucketAccessControlPolicy(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, AccessControlList=None, AWSAccessKeyId=None, Timestamp=None, Signature=None,
Credential=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.AccessControlList = AccessControlList
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SetBucketAccessControlPolicy)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SetBucketAccessControlPolicy.subclass:
return SetBucketAccessControlPolicy.subclass(*args_, **kwargs_)
else:
return SetBucketAccessControlPolicy(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_AccessControlList(self):
return self.AccessControlList
def set_AccessControlList(self, AccessControlList):
self.AccessControlList = AccessControlList
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def hasContent_(self):
if (
self.Bucket is not None or
self.AccessControlList is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='SetBucketAccessControlPolicy', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SetBucketAccessControlPolicy')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SetBucketAccessControlPolicy')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='SetBucketAccessControlPolicy',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='SetBucketAccessControlPolicy'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='SetBucketAccessControlPolicy', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.AccessControlList is not None:
self.AccessControlList.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AccessControlList',
pretty_print=pretty_print)
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'AccessControlList':
obj_ = AccessControlList.factory(parent_object_=self)
obj_.build(child_)
self.AccessControlList = obj_
obj_.original_tagname_ = 'AccessControlList'
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class SetBucketAccessControlPolicy
class SetBucketAccessControlPolicyResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SetBucketAccessControlPolicyResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SetBucketAccessControlPolicyResponse.subclass:
return SetBucketAccessControlPolicyResponse.subclass(*args_, **kwargs_)
else:
return SetBucketAccessControlPolicyResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='SetBucketAccessControlPolicyResponse', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SetBucketAccessControlPolicyResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_,
name_='SetBucketAccessControlPolicyResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='SetBucketAccessControlPolicyResponse',
pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='SetBucketAccessControlPolicyResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='SetBucketAccessControlPolicyResponse', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SetBucketAccessControlPolicyResponse
class GetObject(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, Key=None, GetMetadata=None, GetData=None, InlineData=None, AWSAccessKeyId=None,
Timestamp=None, Signature=None, Credential=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.Key = Key
self.GetMetadata = GetMetadata
self.GetData = GetData
self.InlineData = InlineData
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GetObject)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GetObject.subclass:
return GetObject.subclass(*args_, **kwargs_)
else:
return GetObject(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_GetMetadata(self):
return self.GetMetadata
def set_GetMetadata(self, GetMetadata):
self.GetMetadata = GetMetadata
def get_GetData(self):
return self.GetData
def set_GetData(self, GetData):
self.GetData = GetData
def get_InlineData(self):
return self.InlineData
def set_InlineData(self, InlineData):
self.InlineData = InlineData
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def hasContent_(self):
if (
self.Bucket is not None or
self.Key is not None or
self.GetMetadata is not None or
self.GetData is not None or
self.InlineData is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='GetObject',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GetObject')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GetObject')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='GetObject', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GetObject'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='GetObject',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.Key is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')),
namespaceprefix_, eol_))
if self.GetMetadata is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sGetMetadata>%s</%sGetMetadata>%s' % (
namespaceprefix_, self.gds_format_boolean(self.GetMetadata, input_name='GetMetadata'), namespaceprefix_,
eol_))
if self.GetData is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sGetData>%s</%sGetData>%s' % (
namespaceprefix_, self.gds_format_boolean(self.GetData, input_name='GetData'), namespaceprefix_, eol_))
if self.InlineData is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sInlineData>%s</%sInlineData>%s' % (
namespaceprefix_, self.gds_format_boolean(self.InlineData, input_name='InlineData'), namespaceprefix_,
eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
elif nodeName_ == 'GetMetadata':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'GetMetadata')
ival_ = self.gds_validate_boolean(ival_, node, 'GetMetadata')
self.GetMetadata = ival_
elif nodeName_ == 'GetData':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'GetData')
ival_ = self.gds_validate_boolean(ival_, node, 'GetData')
self.GetData = ival_
elif nodeName_ == 'InlineData':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'InlineData')
ival_ = self.gds_validate_boolean(ival_, node, 'InlineData')
self.InlineData = ival_
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class GetObject
class GetObjectResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, GetObjectResponse_member=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.GetObjectResponse = GetObjectResponse_member
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GetObjectResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GetObjectResponse.subclass:
return GetObjectResponse.subclass(*args_, **kwargs_)
else:
return GetObjectResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_GetObjectResponse(self):
return self.GetObjectResponse
def set_GetObjectResponse(self, GetObjectResponse):
self.GetObjectResponse = GetObjectResponse
def hasContent_(self):
if (
self.GetObjectResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='GetObjectResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GetObjectResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GetObjectResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='GetObjectResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GetObjectResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='GetObjectResponse',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.GetObjectResponse is not None:
self.GetObjectResponse.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GetObjectResponse',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'GetObjectResponse':
obj_ = GetObjectResult.factory(parent_object_=self)
obj_.build(child_)
self.GetObjectResponse = obj_
obj_.original_tagname_ = 'GetObjectResponse'
# end class GetObjectResponse
class GetObjectResult(Result):
subclass = None
superclass = Result
def __init__(self, Status=None, Metadata=None, Data=None, LastModified=None, ETag=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
super(GetObjectResult, self).__init__(Status, **kwargs_)
if Metadata is None:
self.Metadata = []
else:
self.Metadata = Metadata
self.Data = Data
if isinstance(LastModified, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(LastModified, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = LastModified
self.LastModified = initvalue_
self.ETag = ETag
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GetObjectResult)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GetObjectResult.subclass:
return GetObjectResult.subclass(*args_, **kwargs_)
else:
return GetObjectResult(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Metadata(self):
return self.Metadata
def set_Metadata(self, Metadata):
self.Metadata = Metadata
def add_Metadata(self, value):
self.Metadata.append(value)
def insert_Metadata_at(self, index, value):
self.Metadata.insert(index, value)
def replace_Metadata_at(self, index, value):
self.Metadata[index] = value
def get_Data(self):
return self.Data
def set_Data(self, Data):
self.Data = Data
def get_LastModified(self):
return self.LastModified
def set_LastModified(self, LastModified):
self.LastModified = LastModified
def get_ETag(self):
return self.ETag
def set_ETag(self, ETag):
self.ETag = ETag
def hasContent_(self):
if (
self.Metadata or
self.Data is not None or
self.LastModified is not None or
self.ETag is not None or
super(GetObjectResult, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='GetObjectResult',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GetObjectResult')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GetObjectResult')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='GetObjectResult',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GetObjectResult'):
super(GetObjectResult, self).exportAttributes(outfile, level, already_processed, namespaceprefix_,
name_='GetObjectResult')
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='GetObjectResult',
fromsubclass_=False, pretty_print=True):
super(GetObjectResult, self).exportChildren(outfile, level, namespaceprefix_, namespacedef_, name_, True,
pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Metadata_ in self.Metadata:
Metadata_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Metadata',
pretty_print=pretty_print)
if self.Data is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sData>%s</%sData>%s' % (
namespaceprefix_, self.gds_format_base64(self.Data, input_name='Data'), namespaceprefix_, eol_))
if self.LastModified is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sLastModified>%s</%sLastModified>%s' % (
namespaceprefix_, self.gds_format_datetime(self.LastModified, input_name='LastModified'), namespaceprefix_,
eol_))
if self.ETag is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sETag>%s</%sETag>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.ETag), input_name='ETag')),
namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(GetObjectResult, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Metadata':
obj_ = MetadataEntry.factory(parent_object_=self)
obj_.build(child_)
self.Metadata.append(obj_)
obj_.original_tagname_ = 'Metadata'
elif nodeName_ == 'Data':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Data')
else:
bval_ = None
self.Data = bval_
elif nodeName_ == 'LastModified':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.LastModified = dval_
elif nodeName_ == 'ETag':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ETag')
value_ = self.gds_validate_string(value_, node, 'ETag')
self.ETag = value_
super(GetObjectResult, self).buildChildren(child_, node, nodeName_, True)
# end class GetObjectResult
class GetObjectExtended(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, Key=None, GetMetadata=None, GetData=None, InlineData=None, ByteRangeStart=None,
ByteRangeEnd=None, IfModifiedSince=None, IfUnmodifiedSince=None, IfMatch=None, IfNoneMatch=None,
ReturnCompleteObjectOnConditionFailure=None, AWSAccessKeyId=None, Timestamp=None, Signature=None,
Credential=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.Key = Key
self.GetMetadata = GetMetadata
self.GetData = GetData
self.InlineData = InlineData
self.ByteRangeStart = ByteRangeStart
self.ByteRangeEnd = ByteRangeEnd
if isinstance(IfModifiedSince, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(IfModifiedSince, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = IfModifiedSince
self.IfModifiedSince = initvalue_
if isinstance(IfUnmodifiedSince, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(IfUnmodifiedSince, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = IfUnmodifiedSince
self.IfUnmodifiedSince = initvalue_
if IfMatch is None:
self.IfMatch = []
else:
self.IfMatch = IfMatch
if IfNoneMatch is None:
self.IfNoneMatch = []
else:
self.IfNoneMatch = IfNoneMatch
self.ReturnCompleteObjectOnConditionFailure = ReturnCompleteObjectOnConditionFailure
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GetObjectExtended)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GetObjectExtended.subclass:
return GetObjectExtended.subclass(*args_, **kwargs_)
else:
return GetObjectExtended(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_GetMetadata(self):
return self.GetMetadata
def set_GetMetadata(self, GetMetadata):
self.GetMetadata = GetMetadata
def get_GetData(self):
return self.GetData
def set_GetData(self, GetData):
self.GetData = GetData
def get_InlineData(self):
return self.InlineData
def set_InlineData(self, InlineData):
self.InlineData = InlineData
def get_ByteRangeStart(self):
return self.ByteRangeStart
def set_ByteRangeStart(self, ByteRangeStart):
self.ByteRangeStart = ByteRangeStart
def get_ByteRangeEnd(self):
return self.ByteRangeEnd
def set_ByteRangeEnd(self, ByteRangeEnd):
self.ByteRangeEnd = ByteRangeEnd
def get_IfModifiedSince(self):
return self.IfModifiedSince
def set_IfModifiedSince(self, IfModifiedSince):
self.IfModifiedSince = IfModifiedSince
def get_IfUnmodifiedSince(self):
return self.IfUnmodifiedSince
def set_IfUnmodifiedSince(self, IfUnmodifiedSince):
self.IfUnmodifiedSince = IfUnmodifiedSince
def get_IfMatch(self):
return self.IfMatch
def set_IfMatch(self, IfMatch):
self.IfMatch = IfMatch
def add_IfMatch(self, value):
self.IfMatch.append(value)
def insert_IfMatch_at(self, index, value):
self.IfMatch.insert(index, value)
def replace_IfMatch_at(self, index, value):
self.IfMatch[index] = value
def get_IfNoneMatch(self):
return self.IfNoneMatch
def set_IfNoneMatch(self, IfNoneMatch):
self.IfNoneMatch = IfNoneMatch
def add_IfNoneMatch(self, value):
self.IfNoneMatch.append(value)
def insert_IfNoneMatch_at(self, index, value):
self.IfNoneMatch.insert(index, value)
def replace_IfNoneMatch_at(self, index, value):
self.IfNoneMatch[index] = value
def get_ReturnCompleteObjectOnConditionFailure(self):
return self.ReturnCompleteObjectOnConditionFailure
def set_ReturnCompleteObjectOnConditionFailure(self, ReturnCompleteObjectOnConditionFailure):
self.ReturnCompleteObjectOnConditionFailure = ReturnCompleteObjectOnConditionFailure
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def hasContent_(self):
if (
self.Bucket is not None or
self.Key is not None or
self.GetMetadata is not None or
self.GetData is not None or
self.InlineData is not None or
self.ByteRangeStart is not None or
self.ByteRangeEnd is not None or
self.IfModifiedSince is not None or
self.IfUnmodifiedSince is not None or
self.IfMatch or
self.IfNoneMatch or
self.ReturnCompleteObjectOnConditionFailure is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='GetObjectExtended',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GetObjectExtended')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GetObjectExtended')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='GetObjectExtended',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GetObjectExtended'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='GetObjectExtended',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.Key is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')),
namespaceprefix_, eol_))
if self.GetMetadata is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sGetMetadata>%s</%sGetMetadata>%s' % (
namespaceprefix_, self.gds_format_boolean(self.GetMetadata, input_name='GetMetadata'), namespaceprefix_,
eol_))
if self.GetData is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sGetData>%s</%sGetData>%s' % (
namespaceprefix_, self.gds_format_boolean(self.GetData, input_name='GetData'), namespaceprefix_, eol_))
if self.InlineData is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sInlineData>%s</%sInlineData>%s' % (
namespaceprefix_, self.gds_format_boolean(self.InlineData, input_name='InlineData'), namespaceprefix_,
eol_))
if self.ByteRangeStart is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sByteRangeStart>%s</%sByteRangeStart>%s' % (
namespaceprefix_, self.gds_format_integer(self.ByteRangeStart, input_name='ByteRangeStart'),
namespaceprefix_, eol_))
if self.ByteRangeEnd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sByteRangeEnd>%s</%sByteRangeEnd>%s' % (
namespaceprefix_, self.gds_format_integer(self.ByteRangeEnd, input_name='ByteRangeEnd'), namespaceprefix_,
eol_))
if self.IfModifiedSince is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sIfModifiedSince>%s</%sIfModifiedSince>%s' % (
namespaceprefix_, self.gds_format_datetime(self.IfModifiedSince, input_name='IfModifiedSince'),
namespaceprefix_, eol_))
if self.IfUnmodifiedSince is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sIfUnmodifiedSince>%s</%sIfUnmodifiedSince>%s' % (
namespaceprefix_, self.gds_format_datetime(self.IfUnmodifiedSince, input_name='IfUnmodifiedSince'),
namespaceprefix_, eol_))
for IfMatch_ in self.IfMatch:
showIndent(outfile, level, pretty_print)
outfile.write('<%sIfMatch>%s</%sIfMatch>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(IfMatch_), input_name='IfMatch')),
namespaceprefix_, eol_))
for IfNoneMatch_ in self.IfNoneMatch:
showIndent(outfile, level, pretty_print)
outfile.write('<%sIfNoneMatch>%s</%sIfNoneMatch>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(IfNoneMatch_), input_name='IfNoneMatch')), namespaceprefix_, eol_))
if self.ReturnCompleteObjectOnConditionFailure is not None:
showIndent(outfile, level, pretty_print)
outfile.write(
'<%sReturnCompleteObjectOnConditionFailure>%s</%sReturnCompleteObjectOnConditionFailure>%s' % (
namespaceprefix_, self.gds_format_boolean(self.ReturnCompleteObjectOnConditionFailure,
input_name='ReturnCompleteObjectOnConditionFailure'),
namespaceprefix_, eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
elif nodeName_ == 'GetMetadata':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'GetMetadata')
ival_ = self.gds_validate_boolean(ival_, node, 'GetMetadata')
self.GetMetadata = ival_
elif nodeName_ == 'GetData':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'GetData')
ival_ = self.gds_validate_boolean(ival_, node, 'GetData')
self.GetData = ival_
elif nodeName_ == 'InlineData':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'InlineData')
ival_ = self.gds_validate_boolean(ival_, node, 'InlineData')
self.InlineData = ival_
elif nodeName_ == 'ByteRangeStart' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'ByteRangeStart')
ival_ = self.gds_validate_integer(ival_, node, 'ByteRangeStart')
self.ByteRangeStart = ival_
elif nodeName_ == 'ByteRangeEnd' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'ByteRangeEnd')
ival_ = self.gds_validate_integer(ival_, node, 'ByteRangeEnd')
self.ByteRangeEnd = ival_
elif nodeName_ == 'IfModifiedSince':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.IfModifiedSince = dval_
elif nodeName_ == 'IfUnmodifiedSince':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.IfUnmodifiedSince = dval_
elif nodeName_ == 'IfMatch':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'IfMatch')
value_ = self.gds_validate_string(value_, node, 'IfMatch')
self.IfMatch.append(value_)
elif nodeName_ == 'IfNoneMatch':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'IfNoneMatch')
value_ = self.gds_validate_string(value_, node, 'IfNoneMatch')
self.IfNoneMatch.append(value_)
elif nodeName_ == 'ReturnCompleteObjectOnConditionFailure':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'ReturnCompleteObjectOnConditionFailure')
ival_ = self.gds_validate_boolean(ival_, node, 'ReturnCompleteObjectOnConditionFailure')
self.ReturnCompleteObjectOnConditionFailure = ival_
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class GetObjectExtended
class GetObjectExtendedResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, GetObjectResponse=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.GetObjectResponse = GetObjectResponse
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GetObjectExtendedResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GetObjectExtendedResponse.subclass:
return GetObjectExtendedResponse.subclass(*args_, **kwargs_)
else:
return GetObjectExtendedResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_GetObjectResponse(self):
return self.GetObjectResponse
def set_GetObjectResponse(self, GetObjectResponse):
self.GetObjectResponse = GetObjectResponse
def hasContent_(self):
if (
self.GetObjectResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='GetObjectExtendedResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GetObjectExtendedResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GetObjectExtendedResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='GetObjectExtendedResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='GetObjectExtendedResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='GetObjectExtendedResponse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.GetObjectResponse is not None:
self.GetObjectResponse.export(outfile, level, namespaceprefix_, namespacedef_='', name_='GetObjectResponse',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'GetObjectResponse':
obj_ = GetObjectResult.factory(parent_object_=self)
obj_.build(child_)
self.GetObjectResponse = obj_
obj_.original_tagname_ = 'GetObjectResponse'
# end class GetObjectExtendedResponse
class PutObject(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, Key=None, Metadata=None, ContentLength=None, AccessControlList=None,
StorageClass=None, AWSAccessKeyId=None, Timestamp=None, Signature=None, Credential=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.Key = Key
if Metadata is None:
self.Metadata = []
else:
self.Metadata = Metadata
self.ContentLength = ContentLength
self.AccessControlList = AccessControlList
self.StorageClass = StorageClass
self.validate_StorageClass(self.StorageClass)
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PutObject)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PutObject.subclass:
return PutObject.subclass(*args_, **kwargs_)
else:
return PutObject(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_Metadata(self):
return self.Metadata
def set_Metadata(self, Metadata):
self.Metadata = Metadata
def add_Metadata(self, value):
self.Metadata.append(value)
def insert_Metadata_at(self, index, value):
self.Metadata.insert(index, value)
def replace_Metadata_at(self, index, value):
self.Metadata[index] = value
def get_ContentLength(self):
return self.ContentLength
def set_ContentLength(self, ContentLength):
self.ContentLength = ContentLength
def get_AccessControlList(self):
return self.AccessControlList
def set_AccessControlList(self, AccessControlList):
self.AccessControlList = AccessControlList
def get_StorageClass(self):
return self.StorageClass
def set_StorageClass(self, StorageClass):
self.StorageClass = StorageClass
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def validate_StorageClass(self, value):
# Validate type StorageClass, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['STANDARD', 'REDUCED_REDUNDANCY', 'GLACIER', 'UNKNOWN']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on StorageClass' % {
"value": value.encode("utf-8")})
def hasContent_(self):
if (
self.Bucket is not None or
self.Key is not None or
self.Metadata or
self.ContentLength is not None or
self.AccessControlList is not None or
self.StorageClass is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PutObject',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PutObject')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PutObject')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='PutObject', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PutObject'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PutObject',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.Key is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')),
namespaceprefix_, eol_))
for Metadata_ in self.Metadata:
Metadata_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Metadata',
pretty_print=pretty_print)
if self.ContentLength is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sContentLength>%s</%sContentLength>%s' % (
namespaceprefix_, self.gds_format_integer(self.ContentLength, input_name='ContentLength'), namespaceprefix_,
eol_))
if self.AccessControlList is not None:
self.AccessControlList.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AccessControlList',
pretty_print=pretty_print)
if self.StorageClass is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sStorageClass>%s</%sStorageClass>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.StorageClass), input_name='StorageClass')), namespaceprefix_,
eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
elif nodeName_ == 'Metadata':
obj_ = MetadataEntry.factory(parent_object_=self)
obj_.build(child_)
self.Metadata.append(obj_)
obj_.original_tagname_ = 'Metadata'
elif nodeName_ == 'ContentLength' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'ContentLength')
ival_ = self.gds_validate_integer(ival_, node, 'ContentLength')
self.ContentLength = ival_
elif nodeName_ == 'AccessControlList':
obj_ = AccessControlList.factory(parent_object_=self)
obj_.build(child_)
self.AccessControlList = obj_
obj_.original_tagname_ = 'AccessControlList'
elif nodeName_ == 'StorageClass':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'StorageClass')
value_ = self.gds_validate_string(value_, node, 'StorageClass')
self.StorageClass = value_
# validate type StorageClass
self.validate_StorageClass(self.StorageClass)
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class PutObject
class PutObjectResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, PutObjectResponse_member=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.PutObjectResponse = PutObjectResponse_member
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PutObjectResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PutObjectResponse.subclass:
return PutObjectResponse.subclass(*args_, **kwargs_)
else:
return PutObjectResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_PutObjectResponse(self):
return self.PutObjectResponse
def set_PutObjectResponse(self, PutObjectResponse):
self.PutObjectResponse = PutObjectResponse
def hasContent_(self):
if (
self.PutObjectResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PutObjectResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PutObjectResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PutObjectResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='PutObjectResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PutObjectResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PutObjectResponse',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.PutObjectResponse is not None:
self.PutObjectResponse.export(outfile, level, namespaceprefix_, namespacedef_='', name_='PutObjectResponse',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'PutObjectResponse':
obj_ = PutObjectResult.factory(parent_object_=self)
obj_.build(child_)
self.PutObjectResponse = obj_
obj_.original_tagname_ = 'PutObjectResponse'
# end class PutObjectResponse
class PutObjectResult(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ETag=None, LastModified=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ETag = ETag
if isinstance(LastModified, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(LastModified, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = LastModified
self.LastModified = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PutObjectResult)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PutObjectResult.subclass:
return PutObjectResult.subclass(*args_, **kwargs_)
else:
return PutObjectResult(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ETag(self):
return self.ETag
def set_ETag(self, ETag):
self.ETag = ETag
def get_LastModified(self):
return self.LastModified
def set_LastModified(self, LastModified):
self.LastModified = LastModified
def hasContent_(self):
if (
self.ETag is not None or
self.LastModified is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PutObjectResult',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PutObjectResult')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PutObjectResult')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='PutObjectResult',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PutObjectResult'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PutObjectResult',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ETag is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sETag>%s</%sETag>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.ETag), input_name='ETag')),
namespaceprefix_, eol_))
if self.LastModified is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sLastModified>%s</%sLastModified>%s' % (
namespaceprefix_, self.gds_format_datetime(self.LastModified, input_name='LastModified'), namespaceprefix_,
eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ETag':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ETag')
value_ = self.gds_validate_string(value_, node, 'ETag')
self.ETag = value_
elif nodeName_ == 'LastModified':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.LastModified = dval_
# end class PutObjectResult
class PutObjectInline(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, Key=None, Metadata=None, Data=None, ContentLength=None, AccessControlList=None,
StorageClass=None, AWSAccessKeyId=None, Timestamp=None, Signature=None, Credential=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.Key = Key
if Metadata is None:
self.Metadata = []
else:
self.Metadata = Metadata
self.Data = Data
self.ContentLength = ContentLength
self.AccessControlList = AccessControlList
self.StorageClass = StorageClass
self.validate_StorageClass(self.StorageClass)
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PutObjectInline)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PutObjectInline.subclass:
return PutObjectInline.subclass(*args_, **kwargs_)
else:
return PutObjectInline(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_Metadata(self):
return self.Metadata
def set_Metadata(self, Metadata):
self.Metadata = Metadata
def add_Metadata(self, value):
self.Metadata.append(value)
def insert_Metadata_at(self, index, value):
self.Metadata.insert(index, value)
def replace_Metadata_at(self, index, value):
self.Metadata[index] = value
def get_Data(self):
return self.Data
def set_Data(self, Data):
self.Data = Data
def get_ContentLength(self):
return self.ContentLength
def set_ContentLength(self, ContentLength):
self.ContentLength = ContentLength
def get_AccessControlList(self):
return self.AccessControlList
def set_AccessControlList(self, AccessControlList):
self.AccessControlList = AccessControlList
def get_StorageClass(self):
return self.StorageClass
def set_StorageClass(self, StorageClass):
self.StorageClass = StorageClass
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def validate_StorageClass(self, value):
# Validate type StorageClass, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['STANDARD', 'REDUCED_REDUNDANCY', 'GLACIER', 'UNKNOWN']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on StorageClass' % {
"value": value.encode("utf-8")})
def hasContent_(self):
if (
self.Bucket is not None or
self.Key is not None or
self.Metadata or
self.Data is not None or
self.ContentLength is not None or
self.AccessControlList is not None or
self.StorageClass is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PutObjectInline',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PutObjectInline')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PutObjectInline')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='PutObjectInline',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PutObjectInline'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PutObjectInline',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.Key is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')),
namespaceprefix_, eol_))
for Metadata_ in self.Metadata:
Metadata_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Metadata',
pretty_print=pretty_print)
if self.Data is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sData>%s</%sData>%s' % (
namespaceprefix_, self.gds_format_base64(self.Data, input_name='Data'), namespaceprefix_, eol_))
if self.ContentLength is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sContentLength>%s</%sContentLength>%s' % (
namespaceprefix_, self.gds_format_integer(self.ContentLength, input_name='ContentLength'), namespaceprefix_,
eol_))
if self.AccessControlList is not None:
self.AccessControlList.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AccessControlList',
pretty_print=pretty_print)
if self.StorageClass is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sStorageClass>%s</%sStorageClass>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.StorageClass), input_name='StorageClass')), namespaceprefix_,
eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
elif nodeName_ == 'Metadata':
obj_ = MetadataEntry.factory(parent_object_=self)
obj_.build(child_)
self.Metadata.append(obj_)
obj_.original_tagname_ = 'Metadata'
elif nodeName_ == 'Data':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Data')
else:
bval_ = None
self.Data = bval_
elif nodeName_ == 'ContentLength' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'ContentLength')
ival_ = self.gds_validate_integer(ival_, node, 'ContentLength')
self.ContentLength = ival_
elif nodeName_ == 'AccessControlList':
obj_ = AccessControlList.factory(parent_object_=self)
obj_.build(child_)
self.AccessControlList = obj_
obj_.original_tagname_ = 'AccessControlList'
elif nodeName_ == 'StorageClass':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'StorageClass')
value_ = self.gds_validate_string(value_, node, 'StorageClass')
self.StorageClass = value_
# validate type StorageClass
self.validate_StorageClass(self.StorageClass)
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class PutObjectInline
class PutObjectInlineResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, PutObjectInlineResponse_member=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.PutObjectInlineResponse = PutObjectInlineResponse_member
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PutObjectInlineResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PutObjectInlineResponse.subclass:
return PutObjectInlineResponse.subclass(*args_, **kwargs_)
else:
return PutObjectInlineResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_PutObjectInlineResponse(self):
return self.PutObjectInlineResponse
def set_PutObjectInlineResponse(self, PutObjectInlineResponse):
self.PutObjectInlineResponse = PutObjectInlineResponse
def hasContent_(self):
if (
self.PutObjectInlineResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PutObjectInlineResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PutObjectInlineResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PutObjectInlineResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='PutObjectInlineResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PutObjectInlineResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='PutObjectInlineResponse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.PutObjectInlineResponse is not None:
self.PutObjectInlineResponse.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='PutObjectInlineResponse', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'PutObjectInlineResponse':
obj_ = PutObjectResult.factory(parent_object_=self)
obj_.build(child_)
self.PutObjectInlineResponse = obj_
obj_.original_tagname_ = 'PutObjectInlineResponse'
# end class PutObjectInlineResponse
class DeleteObject(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, Key=None, AWSAccessKeyId=None, Timestamp=None, Signature=None, Credential=None,
**kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.Key = Key
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DeleteObject)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DeleteObject.subclass:
return DeleteObject.subclass(*args_, **kwargs_)
else:
return DeleteObject(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def hasContent_(self):
if (
self.Bucket is not None or
self.Key is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='DeleteObject',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DeleteObject')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DeleteObject')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='DeleteObject', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DeleteObject'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='DeleteObject',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.Key is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')),
namespaceprefix_, eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class DeleteObject
class DeleteObjectResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, DeleteObjectResponse_member=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.DeleteObjectResponse = DeleteObjectResponse_member
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DeleteObjectResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DeleteObjectResponse.subclass:
return DeleteObjectResponse.subclass(*args_, **kwargs_)
else:
return DeleteObjectResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_DeleteObjectResponse(self):
return self.DeleteObjectResponse
def set_DeleteObjectResponse(self, DeleteObjectResponse):
self.DeleteObjectResponse = DeleteObjectResponse
def hasContent_(self):
if (
self.DeleteObjectResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='DeleteObjectResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DeleteObjectResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DeleteObjectResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='DeleteObjectResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DeleteObjectResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='DeleteObjectResponse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.DeleteObjectResponse is not None:
self.DeleteObjectResponse.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='DeleteObjectResponse', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'DeleteObjectResponse':
obj_ = Status.factory(parent_object_=self)
obj_.build(child_)
self.DeleteObjectResponse = obj_
obj_.original_tagname_ = 'DeleteObjectResponse'
# end class DeleteObjectResponse
class ListBucket(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, Prefix=None, Marker=None, MaxKeys=None, Delimiter=None, AWSAccessKeyId=None,
Timestamp=None, Signature=None, Credential=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Bucket = Bucket
self.Prefix = Prefix
self.Marker = Marker
self.MaxKeys = MaxKeys
self.Delimiter = Delimiter
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListBucket)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListBucket.subclass:
return ListBucket.subclass(*args_, **kwargs_)
else:
return ListBucket(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_Prefix(self):
return self.Prefix
def set_Prefix(self, Prefix):
self.Prefix = Prefix
def get_Marker(self):
return self.Marker
def set_Marker(self, Marker):
self.Marker = Marker
def get_MaxKeys(self):
return self.MaxKeys
def set_MaxKeys(self, MaxKeys):
self.MaxKeys = MaxKeys
def get_Delimiter(self):
return self.Delimiter
def set_Delimiter(self, Delimiter):
self.Delimiter = Delimiter
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def hasContent_(self):
if (
self.Bucket is not None or
self.Prefix is not None or
self.Marker is not None or
self.MaxKeys is not None or
self.Delimiter is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListBucket',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListBucket')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ListBucket')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='ListBucket', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ListBucket'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListBucket',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.Prefix is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sPrefix>%s</%sPrefix>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Prefix), input_name='Prefix')),
namespaceprefix_, eol_))
if self.Marker is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sMarker>%s</%sMarker>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Marker), input_name='Marker')),
namespaceprefix_, eol_))
if self.MaxKeys is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sMaxKeys>%s</%sMaxKeys>%s' % (
namespaceprefix_, self.gds_format_integer(self.MaxKeys, input_name='MaxKeys'), namespaceprefix_, eol_))
if self.Delimiter is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sDelimiter>%s</%sDelimiter>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Delimiter), input_name='Delimiter')), namespaceprefix_, eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'Prefix':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Prefix')
value_ = self.gds_validate_string(value_, node, 'Prefix')
self.Prefix = value_
elif nodeName_ == 'Marker':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Marker')
value_ = self.gds_validate_string(value_, node, 'Marker')
self.Marker = value_
elif nodeName_ == 'MaxKeys' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'MaxKeys')
ival_ = self.gds_validate_integer(ival_, node, 'MaxKeys')
self.MaxKeys = ival_
elif nodeName_ == 'Delimiter':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Delimiter')
value_ = self.gds_validate_string(value_, node, 'Delimiter')
self.Delimiter = value_
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class ListBucket
class ListBucketResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ListBucketResponse_member=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ListBucketResponse = ListBucketResponse_member
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListBucketResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListBucketResponse.subclass:
return ListBucketResponse.subclass(*args_, **kwargs_)
else:
return ListBucketResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ListBucketResponse(self):
return self.ListBucketResponse
def set_ListBucketResponse(self, ListBucketResponse):
self.ListBucketResponse = ListBucketResponse
def hasContent_(self):
if (
self.ListBucketResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListBucketResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListBucketResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ListBucketResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='ListBucketResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ListBucketResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListBucketResponse',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ListBucketResponse is not None:
self.ListBucketResponse.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='ListBucketResponse', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ListBucketResponse':
obj_ = ListBucketResult.factory(parent_object_=self)
obj_.build(child_)
self.ListBucketResponse = obj_
obj_.original_tagname_ = 'ListBucketResponse'
# end class ListBucketResponse
class ListVersionsResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ListVersionsResponse_member=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ListVersionsResponse = ListVersionsResponse_member
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListVersionsResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListVersionsResponse.subclass:
return ListVersionsResponse.subclass(*args_, **kwargs_)
else:
return ListVersionsResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ListVersionsResponse(self):
return self.ListVersionsResponse
def set_ListVersionsResponse(self, ListVersionsResponse):
self.ListVersionsResponse = ListVersionsResponse
def hasContent_(self):
if (
self.ListVersionsResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListVersionsResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListVersionsResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ListVersionsResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='ListVersionsResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ListVersionsResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='ListVersionsResponse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ListVersionsResponse is not None:
self.ListVersionsResponse.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='ListVersionsResponse', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ListVersionsResponse':
obj_ = ListVersionsResult.factory(parent_object_=self)
obj_.build(child_)
self.ListVersionsResponse = obj_
obj_.original_tagname_ = 'ListVersionsResponse'
# end class ListVersionsResponse
class ListEntry(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Key=None, LastModified=None, ETag=None, Size=None, Owner=None, StorageClass=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Key = Key
if isinstance(LastModified, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(LastModified, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = LastModified
self.LastModified = initvalue_
self.ETag = ETag
self.Size = Size
self.Owner = Owner
self.StorageClass = StorageClass
self.validate_StorageClass(self.StorageClass)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListEntry)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListEntry.subclass:
return ListEntry.subclass(*args_, **kwargs_)
else:
return ListEntry(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_LastModified(self):
return self.LastModified
def set_LastModified(self, LastModified):
self.LastModified = LastModified
def get_ETag(self):
return self.ETag
def set_ETag(self, ETag):
self.ETag = ETag
def get_Size(self):
return self.Size
def set_Size(self, Size):
self.Size = Size
def get_Owner(self):
return self.Owner
def set_Owner(self, Owner):
self.Owner = Owner
def get_StorageClass(self):
return self.StorageClass
def set_StorageClass(self, StorageClass):
self.StorageClass = StorageClass
def validate_StorageClass(self, value):
# Validate type StorageClass, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['STANDARD', 'REDUCED_REDUNDANCY', 'GLACIER', 'UNKNOWN']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on StorageClass' % {
"value": value.encode("utf-8")})
def hasContent_(self):
if (
self.Key is not None or
self.LastModified is not None or
self.ETag is not None or
self.Size is not None or
self.Owner is not None or
self.StorageClass is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListEntry',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListEntry')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ListEntry')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='ListEntry', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ListEntry'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListEntry',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Key is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')),
namespaceprefix_, eol_))
if self.LastModified is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sLastModified>%s</%sLastModified>%s' % (
namespaceprefix_, self.gds_format_datetime(self.LastModified, input_name='LastModified'), namespaceprefix_,
eol_))
if self.ETag is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sETag>%s</%sETag>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.ETag), input_name='ETag')),
namespaceprefix_, eol_))
if self.Size is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSize>%s</%sSize>%s' % (
namespaceprefix_, self.gds_format_integer(self.Size, input_name='Size'), namespaceprefix_, eol_))
if self.Owner is not None:
self.Owner.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Owner',
pretty_print=pretty_print)
if self.StorageClass is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sStorageClass>%s</%sStorageClass>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.StorageClass), input_name='StorageClass')), namespaceprefix_,
eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
elif nodeName_ == 'LastModified':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.LastModified = dval_
elif nodeName_ == 'ETag':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ETag')
value_ = self.gds_validate_string(value_, node, 'ETag')
self.ETag = value_
elif nodeName_ == 'Size' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'Size')
ival_ = self.gds_validate_integer(ival_, node, 'Size')
self.Size = ival_
elif nodeName_ == 'Owner':
obj_ = CanonicalUser.factory(parent_object_=self)
obj_.build(child_)
self.Owner = obj_
obj_.original_tagname_ = 'Owner'
elif nodeName_ == 'StorageClass':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'StorageClass')
value_ = self.gds_validate_string(value_, node, 'StorageClass')
self.StorageClass = value_
# validate type StorageClass
self.validate_StorageClass(self.StorageClass)
# end class ListEntry
class VersionEntry(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Key=None, VersionId=None, IsLatest=None, LastModified=None, ETag=None, Size=None, Owner=None,
StorageClass=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Key = Key
self.VersionId = VersionId
self.IsLatest = IsLatest
if isinstance(LastModified, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(LastModified, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = LastModified
self.LastModified = initvalue_
self.ETag = ETag
self.Size = Size
self.Owner = Owner
self.StorageClass = StorageClass
self.validate_StorageClass(self.StorageClass)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, VersionEntry)
if subclass is not None:
return subclass(*args_, **kwargs_)
if VersionEntry.subclass:
return VersionEntry.subclass(*args_, **kwargs_)
else:
return VersionEntry(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_VersionId(self):
return self.VersionId
def set_VersionId(self, VersionId):
self.VersionId = VersionId
def get_IsLatest(self):
return self.IsLatest
def set_IsLatest(self, IsLatest):
self.IsLatest = IsLatest
def get_LastModified(self):
return self.LastModified
def set_LastModified(self, LastModified):
self.LastModified = LastModified
def get_ETag(self):
return self.ETag
def set_ETag(self, ETag):
self.ETag = ETag
def get_Size(self):
return self.Size
def set_Size(self, Size):
self.Size = Size
def get_Owner(self):
return self.Owner
def set_Owner(self, Owner):
self.Owner = Owner
def get_StorageClass(self):
return self.StorageClass
def set_StorageClass(self, StorageClass):
self.StorageClass = StorageClass
def validate_StorageClass(self, value):
# Validate type StorageClass, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['STANDARD', 'REDUCED_REDUNDANCY', 'GLACIER', 'UNKNOWN']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on StorageClass' % {
"value": value.encode("utf-8")})
def hasContent_(self):
if (
self.Key is not None or
self.VersionId is not None or
self.IsLatest is not None or
self.LastModified is not None or
self.ETag is not None or
self.Size is not None or
self.Owner is not None or
self.StorageClass is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='VersionEntry',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('VersionEntry')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VersionEntry')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='VersionEntry', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VersionEntry'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='VersionEntry',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Key is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')),
namespaceprefix_, eol_))
if self.VersionId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sVersionId>%s</%sVersionId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.VersionId), input_name='VersionId')), namespaceprefix_, eol_))
if self.IsLatest is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sIsLatest>%s</%sIsLatest>%s' % (
namespaceprefix_, self.gds_format_boolean(self.IsLatest, input_name='IsLatest'), namespaceprefix_, eol_))
if self.LastModified is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sLastModified>%s</%sLastModified>%s' % (
namespaceprefix_, self.gds_format_datetime(self.LastModified, input_name='LastModified'), namespaceprefix_,
eol_))
if self.ETag is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sETag>%s</%sETag>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.ETag), input_name='ETag')),
namespaceprefix_, eol_))
if self.Size is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSize>%s</%sSize>%s' % (
namespaceprefix_, self.gds_format_integer(self.Size, input_name='Size'), namespaceprefix_, eol_))
if self.Owner is not None:
self.Owner.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Owner',
pretty_print=pretty_print)
if self.StorageClass is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sStorageClass>%s</%sStorageClass>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.StorageClass), input_name='StorageClass')), namespaceprefix_,
eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
elif nodeName_ == 'VersionId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'VersionId')
value_ = self.gds_validate_string(value_, node, 'VersionId')
self.VersionId = value_
elif nodeName_ == 'IsLatest':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'IsLatest')
ival_ = self.gds_validate_boolean(ival_, node, 'IsLatest')
self.IsLatest = ival_
elif nodeName_ == 'LastModified':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.LastModified = dval_
elif nodeName_ == 'ETag':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ETag')
value_ = self.gds_validate_string(value_, node, 'ETag')
self.ETag = value_
elif nodeName_ == 'Size' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'Size')
ival_ = self.gds_validate_integer(ival_, node, 'Size')
self.Size = ival_
elif nodeName_ == 'Owner':
obj_ = CanonicalUser.factory(parent_object_=self)
obj_.build(child_)
self.Owner = obj_
obj_.original_tagname_ = 'Owner'
elif nodeName_ == 'StorageClass':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'StorageClass')
value_ = self.gds_validate_string(value_, node, 'StorageClass')
self.StorageClass = value_
# validate type StorageClass
self.validate_StorageClass(self.StorageClass)
# end class VersionEntry
class DeleteMarkerEntry(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Key=None, VersionId=None, IsLatest=None, LastModified=None, Owner=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Key = Key
self.VersionId = VersionId
self.IsLatest = IsLatest
if isinstance(LastModified, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(LastModified, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = LastModified
self.LastModified = initvalue_
self.Owner = Owner
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DeleteMarkerEntry)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DeleteMarkerEntry.subclass:
return DeleteMarkerEntry.subclass(*args_, **kwargs_)
else:
return DeleteMarkerEntry(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_VersionId(self):
return self.VersionId
def set_VersionId(self, VersionId):
self.VersionId = VersionId
def get_IsLatest(self):
return self.IsLatest
def set_IsLatest(self, IsLatest):
self.IsLatest = IsLatest
def get_LastModified(self):
return self.LastModified
def set_LastModified(self, LastModified):
self.LastModified = LastModified
def get_Owner(self):
return self.Owner
def set_Owner(self, Owner):
self.Owner = Owner
def hasContent_(self):
if (
self.Key is not None or
self.VersionId is not None or
self.IsLatest is not None or
self.LastModified is not None or
self.Owner is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='DeleteMarkerEntry',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DeleteMarkerEntry')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DeleteMarkerEntry')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='DeleteMarkerEntry',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DeleteMarkerEntry'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='DeleteMarkerEntry',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Key is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')),
namespaceprefix_, eol_))
if self.VersionId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sVersionId>%s</%sVersionId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.VersionId), input_name='VersionId')), namespaceprefix_, eol_))
if self.IsLatest is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sIsLatest>%s</%sIsLatest>%s' % (
namespaceprefix_, self.gds_format_boolean(self.IsLatest, input_name='IsLatest'), namespaceprefix_, eol_))
if self.LastModified is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sLastModified>%s</%sLastModified>%s' % (
namespaceprefix_, self.gds_format_datetime(self.LastModified, input_name='LastModified'), namespaceprefix_,
eol_))
if self.Owner is not None:
self.Owner.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Owner',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
elif nodeName_ == 'VersionId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'VersionId')
value_ = self.gds_validate_string(value_, node, 'VersionId')
self.VersionId = value_
elif nodeName_ == 'IsLatest':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'IsLatest')
ival_ = self.gds_validate_boolean(ival_, node, 'IsLatest')
self.IsLatest = ival_
elif nodeName_ == 'LastModified':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.LastModified = dval_
elif nodeName_ == 'Owner':
obj_ = CanonicalUser.factory(parent_object_=self)
obj_.build(child_)
self.Owner = obj_
obj_.original_tagname_ = 'Owner'
# end class DeleteMarkerEntry
class PrefixEntry(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Prefix=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Prefix = Prefix
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PrefixEntry)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PrefixEntry.subclass:
return PrefixEntry.subclass(*args_, **kwargs_)
else:
return PrefixEntry(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Prefix(self):
return self.Prefix
def set_Prefix(self, Prefix):
self.Prefix = Prefix
def hasContent_(self):
if (
self.Prefix is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PrefixEntry',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PrefixEntry')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PrefixEntry')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='PrefixEntry', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PrefixEntry'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PrefixEntry',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Prefix is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sPrefix>%s</%sPrefix>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Prefix), input_name='Prefix')),
namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Prefix':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Prefix')
value_ = self.gds_validate_string(value_, node, 'Prefix')
self.Prefix = value_
# end class PrefixEntry
class ListBucketResult(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Metadata=None, Name=None, Prefix=None, Marker=None, NextMarker=None, MaxKeys=None,
Delimiter=None, IsTruncated=None, Contents=None, CommonPrefixes=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
if Metadata is None:
self.Metadata = []
else:
self.Metadata = Metadata
self.Name = Name
self.Prefix = Prefix
self.Marker = Marker
self.NextMarker = NextMarker
self.MaxKeys = MaxKeys
self.Delimiter = Delimiter
self.IsTruncated = IsTruncated
if Contents is None:
self.Contents = []
else:
self.Contents = Contents
if CommonPrefixes is None:
self.CommonPrefixes = []
else:
self.CommonPrefixes = CommonPrefixes
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListBucketResult)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListBucketResult.subclass:
return ListBucketResult.subclass(*args_, **kwargs_)
else:
return ListBucketResult(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Metadata(self):
return self.Metadata
def set_Metadata(self, Metadata):
self.Metadata = Metadata
def add_Metadata(self, value):
self.Metadata.append(value)
def insert_Metadata_at(self, index, value):
self.Metadata.insert(index, value)
def replace_Metadata_at(self, index, value):
self.Metadata[index] = value
def get_Name(self):
return self.Name
def set_Name(self, Name):
self.Name = Name
def get_Prefix(self):
return self.Prefix
def set_Prefix(self, Prefix):
self.Prefix = Prefix
def get_Marker(self):
return self.Marker
def set_Marker(self, Marker):
self.Marker = Marker
def get_NextMarker(self):
return self.NextMarker
def set_NextMarker(self, NextMarker):
self.NextMarker = NextMarker
def get_MaxKeys(self):
return self.MaxKeys
def set_MaxKeys(self, MaxKeys):
self.MaxKeys = MaxKeys
def get_Delimiter(self):
return self.Delimiter
def set_Delimiter(self, Delimiter):
self.Delimiter = Delimiter
def get_IsTruncated(self):
return self.IsTruncated
def set_IsTruncated(self, IsTruncated):
self.IsTruncated = IsTruncated
def get_Contents(self):
return self.Contents
def set_Contents(self, Contents):
self.Contents = Contents
def add_Contents(self, value):
self.Contents.append(value)
def insert_Contents_at(self, index, value):
self.Contents.insert(index, value)
def replace_Contents_at(self, index, value):
self.Contents[index] = value
def get_CommonPrefixes(self):
return self.CommonPrefixes
def set_CommonPrefixes(self, CommonPrefixes):
self.CommonPrefixes = CommonPrefixes
def add_CommonPrefixes(self, value):
self.CommonPrefixes.append(value)
def insert_CommonPrefixes_at(self, index, value):
self.CommonPrefixes.insert(index, value)
def replace_CommonPrefixes_at(self, index, value):
self.CommonPrefixes[index] = value
def hasContent_(self):
if (
self.Metadata or
self.Name is not None or
self.Prefix is not None or
self.Marker is not None or
self.NextMarker is not None or
self.MaxKeys is not None or
self.Delimiter is not None or
self.IsTruncated is not None or
self.Contents or
self.CommonPrefixes
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListBucketResult',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListBucketResult')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ListBucketResult')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='ListBucketResult',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ListBucketResult'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListBucketResult',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Metadata_ in self.Metadata:
Metadata_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Metadata',
pretty_print=pretty_print)
if self.Name is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sName>%s</%sName>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Name), input_name='Name')),
namespaceprefix_, eol_))
if self.Prefix is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sPrefix>%s</%sPrefix>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Prefix), input_name='Prefix')),
namespaceprefix_, eol_))
if self.Marker is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sMarker>%s</%sMarker>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Marker), input_name='Marker')),
namespaceprefix_, eol_))
if self.NextMarker is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sNextMarker>%s</%sNextMarker>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.NextMarker), input_name='NextMarker')), namespaceprefix_, eol_))
if self.MaxKeys is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sMaxKeys>%s</%sMaxKeys>%s' % (
namespaceprefix_, self.gds_format_integer(self.MaxKeys, input_name='MaxKeys'), namespaceprefix_, eol_))
if self.Delimiter is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sDelimiter>%s</%sDelimiter>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Delimiter), input_name='Delimiter')), namespaceprefix_, eol_))
if self.IsTruncated is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sIsTruncated>%s</%sIsTruncated>%s' % (
namespaceprefix_, self.gds_format_boolean(self.IsTruncated, input_name='IsTruncated'), namespaceprefix_,
eol_))
for Contents_ in self.Contents:
Contents_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Contents',
pretty_print=pretty_print)
for CommonPrefixes_ in self.CommonPrefixes:
CommonPrefixes_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CommonPrefixes',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Metadata':
obj_ = MetadataEntry.factory(parent_object_=self)
obj_.build(child_)
self.Metadata.append(obj_)
obj_.original_tagname_ = 'Metadata'
elif nodeName_ == 'Name':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Name')
value_ = self.gds_validate_string(value_, node, 'Name')
self.Name = value_
elif nodeName_ == 'Prefix':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Prefix')
value_ = self.gds_validate_string(value_, node, 'Prefix')
self.Prefix = value_
elif nodeName_ == 'Marker':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Marker')
value_ = self.gds_validate_string(value_, node, 'Marker')
self.Marker = value_
elif nodeName_ == 'NextMarker':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'NextMarker')
value_ = self.gds_validate_string(value_, node, 'NextMarker')
self.NextMarker = value_
elif nodeName_ == 'MaxKeys' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'MaxKeys')
ival_ = self.gds_validate_integer(ival_, node, 'MaxKeys')
self.MaxKeys = ival_
elif nodeName_ == 'Delimiter':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Delimiter')
value_ = self.gds_validate_string(value_, node, 'Delimiter')
self.Delimiter = value_
elif nodeName_ == 'IsTruncated':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'IsTruncated')
ival_ = self.gds_validate_boolean(ival_, node, 'IsTruncated')
self.IsTruncated = ival_
elif nodeName_ == 'Contents':
obj_ = ListEntry.factory(parent_object_=self)
obj_.build(child_)
self.Contents.append(obj_)
obj_.original_tagname_ = 'Contents'
elif nodeName_ == 'CommonPrefixes':
obj_ = PrefixEntry.factory(parent_object_=self)
obj_.build(child_)
self.CommonPrefixes.append(obj_)
obj_.original_tagname_ = 'CommonPrefixes'
# end class ListBucketResult
class ListVersionsResult(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Metadata=None, Name=None, Prefix=None, KeyMarker=None, VersionIdMarker=None, NextKeyMarker=None,
NextVersionIdMarker=None, MaxKeys=None, Delimiter=None, IsTruncated=None, Version=None,
DeleteMarker=None, CommonPrefixes=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
if Metadata is None:
self.Metadata = []
else:
self.Metadata = Metadata
self.Name = Name
self.Prefix = Prefix
self.KeyMarker = KeyMarker
self.VersionIdMarker = VersionIdMarker
self.NextKeyMarker = NextKeyMarker
self.NextVersionIdMarker = NextVersionIdMarker
self.MaxKeys = MaxKeys
self.Delimiter = Delimiter
self.IsTruncated = IsTruncated
if Version is None:
self.Version = []
else:
self.Version = Version
if DeleteMarker is None:
self.DeleteMarker = []
else:
self.DeleteMarker = DeleteMarker
if CommonPrefixes is None:
self.CommonPrefixes = []
else:
self.CommonPrefixes = CommonPrefixes
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListVersionsResult)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListVersionsResult.subclass:
return ListVersionsResult.subclass(*args_, **kwargs_)
else:
return ListVersionsResult(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Metadata(self):
return self.Metadata
def set_Metadata(self, Metadata):
self.Metadata = Metadata
def add_Metadata(self, value):
self.Metadata.append(value)
def insert_Metadata_at(self, index, value):
self.Metadata.insert(index, value)
def replace_Metadata_at(self, index, value):
self.Metadata[index] = value
def get_Name(self):
return self.Name
def set_Name(self, Name):
self.Name = Name
def get_Prefix(self):
return self.Prefix
def set_Prefix(self, Prefix):
self.Prefix = Prefix
def get_KeyMarker(self):
return self.KeyMarker
def set_KeyMarker(self, KeyMarker):
self.KeyMarker = KeyMarker
def get_VersionIdMarker(self):
return self.VersionIdMarker
def set_VersionIdMarker(self, VersionIdMarker):
self.VersionIdMarker = VersionIdMarker
def get_NextKeyMarker(self):
return self.NextKeyMarker
def set_NextKeyMarker(self, NextKeyMarker):
self.NextKeyMarker = NextKeyMarker
def get_NextVersionIdMarker(self):
return self.NextVersionIdMarker
def set_NextVersionIdMarker(self, NextVersionIdMarker):
self.NextVersionIdMarker = NextVersionIdMarker
def get_MaxKeys(self):
return self.MaxKeys
def set_MaxKeys(self, MaxKeys):
self.MaxKeys = MaxKeys
def get_Delimiter(self):
return self.Delimiter
def set_Delimiter(self, Delimiter):
self.Delimiter = Delimiter
def get_IsTruncated(self):
return self.IsTruncated
def set_IsTruncated(self, IsTruncated):
self.IsTruncated = IsTruncated
def get_Version(self):
return self.Version
def set_Version(self, Version):
self.Version = Version
def add_Version(self, value):
self.Version.append(value)
def insert_Version_at(self, index, value):
self.Version.insert(index, value)
def replace_Version_at(self, index, value):
self.Version[index] = value
def get_DeleteMarker(self):
return self.DeleteMarker
def set_DeleteMarker(self, DeleteMarker):
self.DeleteMarker = DeleteMarker
def add_DeleteMarker(self, value):
self.DeleteMarker.append(value)
def insert_DeleteMarker_at(self, index, value):
self.DeleteMarker.insert(index, value)
def replace_DeleteMarker_at(self, index, value):
self.DeleteMarker[index] = value
def get_CommonPrefixes(self):
return self.CommonPrefixes
def set_CommonPrefixes(self, CommonPrefixes):
self.CommonPrefixes = CommonPrefixes
def add_CommonPrefixes(self, value):
self.CommonPrefixes.append(value)
def insert_CommonPrefixes_at(self, index, value):
self.CommonPrefixes.insert(index, value)
def replace_CommonPrefixes_at(self, index, value):
self.CommonPrefixes[index] = value
def hasContent_(self):
if (
self.Metadata or
self.Name is not None or
self.Prefix is not None or
self.KeyMarker is not None or
self.VersionIdMarker is not None or
self.NextKeyMarker is not None or
self.NextVersionIdMarker is not None or
self.MaxKeys is not None or
self.Delimiter is not None or
self.IsTruncated is not None or
self.Version or
self.DeleteMarker or
self.CommonPrefixes
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListVersionsResult',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListVersionsResult')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ListVersionsResult')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='ListVersionsResult',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ListVersionsResult'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListVersionsResult',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Metadata_ in self.Metadata:
Metadata_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Metadata',
pretty_print=pretty_print)
if self.Name is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sName>%s</%sName>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Name), input_name='Name')),
namespaceprefix_, eol_))
if self.Prefix is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sPrefix>%s</%sPrefix>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Prefix), input_name='Prefix')),
namespaceprefix_, eol_))
if self.KeyMarker is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKeyMarker>%s</%sKeyMarker>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.KeyMarker), input_name='KeyMarker')), namespaceprefix_, eol_))
if self.VersionIdMarker is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sVersionIdMarker>%s</%sVersionIdMarker>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.VersionIdMarker), input_name='VersionIdMarker')),
namespaceprefix_, eol_))
if self.NextKeyMarker is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sNextKeyMarker>%s</%sNextKeyMarker>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.NextKeyMarker), input_name='NextKeyMarker')), namespaceprefix_,
eol_))
if self.NextVersionIdMarker is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sNextVersionIdMarker>%s</%sNextVersionIdMarker>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.NextVersionIdMarker), input_name='NextVersionIdMarker')),
namespaceprefix_, eol_))
if self.MaxKeys is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sMaxKeys>%s</%sMaxKeys>%s' % (
namespaceprefix_, self.gds_format_integer(self.MaxKeys, input_name='MaxKeys'), namespaceprefix_, eol_))
if self.Delimiter is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sDelimiter>%s</%sDelimiter>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Delimiter), input_name='Delimiter')), namespaceprefix_, eol_))
if self.IsTruncated is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sIsTruncated>%s</%sIsTruncated>%s' % (
namespaceprefix_, self.gds_format_boolean(self.IsTruncated, input_name='IsTruncated'), namespaceprefix_,
eol_))
for Version_ in self.Version:
Version_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Version',
pretty_print=pretty_print)
for DeleteMarker_ in self.DeleteMarker:
DeleteMarker_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DeleteMarker',
pretty_print=pretty_print)
for CommonPrefixes_ in self.CommonPrefixes:
CommonPrefixes_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CommonPrefixes',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Metadata':
obj_ = MetadataEntry.factory(parent_object_=self)
obj_.build(child_)
self.Metadata.append(obj_)
obj_.original_tagname_ = 'Metadata'
elif nodeName_ == 'Name':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Name')
value_ = self.gds_validate_string(value_, node, 'Name')
self.Name = value_
elif nodeName_ == 'Prefix':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Prefix')
value_ = self.gds_validate_string(value_, node, 'Prefix')
self.Prefix = value_
elif nodeName_ == 'KeyMarker':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'KeyMarker')
value_ = self.gds_validate_string(value_, node, 'KeyMarker')
self.KeyMarker = value_
elif nodeName_ == 'VersionIdMarker':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'VersionIdMarker')
value_ = self.gds_validate_string(value_, node, 'VersionIdMarker')
self.VersionIdMarker = value_
elif nodeName_ == 'NextKeyMarker':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'NextKeyMarker')
value_ = self.gds_validate_string(value_, node, 'NextKeyMarker')
self.NextKeyMarker = value_
elif nodeName_ == 'NextVersionIdMarker':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'NextVersionIdMarker')
value_ = self.gds_validate_string(value_, node, 'NextVersionIdMarker')
self.NextVersionIdMarker = value_
elif nodeName_ == 'MaxKeys' and child_.text:
sval_ = child_.text
ival_ = self.gds_parse_integer(sval_, node, 'MaxKeys')
ival_ = self.gds_validate_integer(ival_, node, 'MaxKeys')
self.MaxKeys = ival_
elif nodeName_ == 'Delimiter':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Delimiter')
value_ = self.gds_validate_string(value_, node, 'Delimiter')
self.Delimiter = value_
elif nodeName_ == 'IsTruncated':
sval_ = child_.text
ival_ = self.gds_parse_boolean(sval_, node, 'IsTruncated')
ival_ = self.gds_validate_boolean(ival_, node, 'IsTruncated')
self.IsTruncated = ival_
elif nodeName_ == 'Version':
obj_ = VersionEntry.factory(parent_object_=self)
obj_.build(child_)
self.Version.append(obj_)
obj_.original_tagname_ = 'Version'
elif nodeName_ == 'DeleteMarker':
obj_ = DeleteMarkerEntry.factory(parent_object_=self)
obj_.build(child_)
self.DeleteMarker.append(obj_)
obj_.original_tagname_ = 'DeleteMarker'
elif nodeName_ == 'CommonPrefixes':
obj_ = PrefixEntry.factory(parent_object_=self)
obj_.build(child_)
self.CommonPrefixes.append(obj_)
obj_.original_tagname_ = 'CommonPrefixes'
# end class ListVersionsResult
class ListAllMyBuckets(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, AWSAccessKeyId=None, Timestamp=None, Signature=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListAllMyBuckets)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListAllMyBuckets.subclass:
return ListAllMyBuckets.subclass(*args_, **kwargs_)
else:
return ListAllMyBuckets(*args_, **kwargs_)
factory = staticmethod(factory)
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def hasContent_(self):
if (
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListAllMyBuckets',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListAllMyBuckets')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ListAllMyBuckets')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='ListAllMyBuckets',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ListAllMyBuckets'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListAllMyBuckets',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
# end class ListAllMyBuckets
class ListAllMyBucketsResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ListAllMyBucketsResponse_member=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.ListAllMyBucketsResponse = ListAllMyBucketsResponse_member
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListAllMyBucketsResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListAllMyBucketsResponse.subclass:
return ListAllMyBucketsResponse.subclass(*args_, **kwargs_)
else:
return ListAllMyBucketsResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ListAllMyBucketsResponse(self):
return self.ListAllMyBucketsResponse
def set_ListAllMyBucketsResponse(self, ListAllMyBucketsResponse):
self.ListAllMyBucketsResponse = ListAllMyBucketsResponse
def hasContent_(self):
if (
self.ListAllMyBucketsResponse is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListAllMyBucketsResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListAllMyBucketsResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ListAllMyBucketsResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='ListAllMyBucketsResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='ListAllMyBucketsResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='ListAllMyBucketsResponse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ListAllMyBucketsResponse is not None:
self.ListAllMyBucketsResponse.export(outfile, level, namespaceprefix_, namespacedef_='',
name_='ListAllMyBucketsResponse', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ListAllMyBucketsResponse':
obj_ = ListAllMyBucketsResult.factory(parent_object_=self)
obj_.build(child_)
self.ListAllMyBucketsResponse = obj_
obj_.original_tagname_ = 'ListAllMyBucketsResponse'
# end class ListAllMyBucketsResponse
class ListAllMyBucketsEntry(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Name=None, CreationDate=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Name = Name
if isinstance(CreationDate, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(CreationDate, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = CreationDate
self.CreationDate = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListAllMyBucketsEntry)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListAllMyBucketsEntry.subclass:
return ListAllMyBucketsEntry.subclass(*args_, **kwargs_)
else:
return ListAllMyBucketsEntry(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Name(self):
return self.Name
def set_Name(self, Name):
self.Name = Name
def get_CreationDate(self):
return self.CreationDate
def set_CreationDate(self, CreationDate):
self.CreationDate = CreationDate
def hasContent_(self):
if (
self.Name is not None or
self.CreationDate is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListAllMyBucketsEntry',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListAllMyBucketsEntry')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ListAllMyBucketsEntry')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='ListAllMyBucketsEntry',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ListAllMyBucketsEntry'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='ListAllMyBucketsEntry', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Name is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sName>%s</%sName>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Name), input_name='Name')),
namespaceprefix_, eol_))
if self.CreationDate is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCreationDate>%s</%sCreationDate>%s' % (
namespaceprefix_, self.gds_format_datetime(self.CreationDate, input_name='CreationDate'), namespaceprefix_,
eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Name':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Name')
value_ = self.gds_validate_string(value_, node, 'Name')
self.Name = value_
elif nodeName_ == 'CreationDate':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.CreationDate = dval_
# end class ListAllMyBucketsEntry
class ListAllMyBucketsResult(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Owner=None, Buckets=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Owner = Owner
self.Buckets = Buckets
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListAllMyBucketsResult)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListAllMyBucketsResult.subclass:
return ListAllMyBucketsResult.subclass(*args_, **kwargs_)
else:
return ListAllMyBucketsResult(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Owner(self):
return self.Owner
def set_Owner(self, Owner):
self.Owner = Owner
def get_Buckets(self):
return self.Buckets
def set_Buckets(self, Buckets):
self.Buckets = Buckets
def hasContent_(self):
if (
self.Owner is not None or
self.Buckets is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListAllMyBucketsResult',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListAllMyBucketsResult')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ListAllMyBucketsResult')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='ListAllMyBucketsResult',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ListAllMyBucketsResult'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='ListAllMyBucketsResult', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Owner is not None:
self.Owner.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Owner',
pretty_print=pretty_print)
if self.Buckets is not None:
self.Buckets.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Buckets',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Owner':
obj_ = CanonicalUser.factory(parent_object_=self)
obj_.build(child_)
self.Owner = obj_
obj_.original_tagname_ = 'Owner'
elif nodeName_ == 'Buckets':
obj_ = ListAllMyBucketsList.factory(parent_object_=self)
obj_.build(child_)
self.Buckets = obj_
obj_.original_tagname_ = 'Buckets'
# end class ListAllMyBucketsResult
class ListAllMyBucketsList(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Bucket=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
if Bucket is None:
self.Bucket = []
else:
self.Bucket = Bucket
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListAllMyBucketsList)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListAllMyBucketsList.subclass:
return ListAllMyBucketsList.subclass(*args_, **kwargs_)
else:
return ListAllMyBucketsList(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def add_Bucket(self, value):
self.Bucket.append(value)
def insert_Bucket_at(self, index, value):
self.Bucket.insert(index, value)
def replace_Bucket_at(self, index, value):
self.Bucket[index] = value
def hasContent_(self):
if (
self.Bucket
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='ListAllMyBucketsList',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListAllMyBucketsList')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ListAllMyBucketsList')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='ListAllMyBucketsList',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ListAllMyBucketsList'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='ListAllMyBucketsList', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Bucket_ in self.Bucket:
Bucket_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Bucket',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Bucket':
obj_ = ListAllMyBucketsEntry.factory(parent_object_=self)
obj_.build(child_)
self.Bucket.append(obj_)
obj_.original_tagname_ = 'Bucket'
# end class ListAllMyBucketsList
class PostResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Location=None, Bucket=None, Key=None, ETag=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Location = Location
self.Bucket = Bucket
self.Key = Key
self.ETag = ETag
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PostResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PostResponse.subclass:
return PostResponse.subclass(*args_, **kwargs_)
else:
return PostResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Location(self):
return self.Location
def set_Location(self, Location):
self.Location = Location
def get_Bucket(self):
return self.Bucket
def set_Bucket(self, Bucket):
self.Bucket = Bucket
def get_Key(self):
return self.Key
def set_Key(self, Key):
self.Key = Key
def get_ETag(self):
return self.ETag
def set_ETag(self, ETag):
self.ETag = ETag
def hasContent_(self):
if (
self.Location is not None or
self.Bucket is not None or
self.Key is not None or
self.ETag is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PostResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PostResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PostResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='PostResponse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PostResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='PostResponse',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Location is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sLocation>%s</%sLocation>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Location), input_name='Location')),
namespaceprefix_, eol_))
if self.Bucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sBucket>%s</%sBucket>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Bucket), input_name='Bucket')),
namespaceprefix_, eol_))
if self.Key is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sKey>%s</%sKey>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Key), input_name='Key')),
namespaceprefix_, eol_))
if self.ETag is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sETag>%s</%sETag>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.ETag), input_name='ETag')),
namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Location':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Location')
value_ = self.gds_validate_string(value_, node, 'Location')
self.Location = value_
elif nodeName_ == 'Bucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Bucket')
value_ = self.gds_validate_string(value_, node, 'Bucket')
self.Bucket = value_
elif nodeName_ == 'Key':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Key')
value_ = self.gds_validate_string(value_, node, 'Key')
self.Key = value_
elif nodeName_ == 'ETag':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ETag')
value_ = self.gds_validate_string(value_, node, 'ETag')
self.ETag = value_
# end class PostResponse
class CopyObject(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, SourceBucket=None, SourceKey=None, DestinationBucket=None, DestinationKey=None,
MetadataDirective=None, Metadata=None, AccessControlList=None, CopySourceIfModifiedSince=None,
CopySourceIfUnmodifiedSince=None, CopySourceIfMatch=None, CopySourceIfNoneMatch=None,
StorageClass=None, AWSAccessKeyId=None, Timestamp=None, Signature=None, Credential=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.SourceBucket = SourceBucket
self.SourceKey = SourceKey
self.DestinationBucket = DestinationBucket
self.DestinationKey = DestinationKey
self.MetadataDirective = MetadataDirective
self.validate_MetadataDirective(self.MetadataDirective)
if Metadata is None:
self.Metadata = []
else:
self.Metadata = Metadata
self.AccessControlList = AccessControlList
if isinstance(CopySourceIfModifiedSince, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(CopySourceIfModifiedSince, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = CopySourceIfModifiedSince
self.CopySourceIfModifiedSince = initvalue_
if isinstance(CopySourceIfUnmodifiedSince, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(CopySourceIfUnmodifiedSince, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = CopySourceIfUnmodifiedSince
self.CopySourceIfUnmodifiedSince = initvalue_
if CopySourceIfMatch is None:
self.CopySourceIfMatch = []
else:
self.CopySourceIfMatch = CopySourceIfMatch
if CopySourceIfNoneMatch is None:
self.CopySourceIfNoneMatch = []
else:
self.CopySourceIfNoneMatch = CopySourceIfNoneMatch
self.StorageClass = StorageClass
self.validate_StorageClass(self.StorageClass)
self.AWSAccessKeyId = AWSAccessKeyId
if isinstance(Timestamp, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(Timestamp, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = Timestamp
self.Timestamp = initvalue_
self.Signature = Signature
self.Credential = Credential
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CopyObject)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CopyObject.subclass:
return CopyObject.subclass(*args_, **kwargs_)
else:
return CopyObject(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SourceBucket(self):
return self.SourceBucket
def set_SourceBucket(self, SourceBucket):
self.SourceBucket = SourceBucket
def get_SourceKey(self):
return self.SourceKey
def set_SourceKey(self, SourceKey):
self.SourceKey = SourceKey
def get_DestinationBucket(self):
return self.DestinationBucket
def set_DestinationBucket(self, DestinationBucket):
self.DestinationBucket = DestinationBucket
def get_DestinationKey(self):
return self.DestinationKey
def set_DestinationKey(self, DestinationKey):
self.DestinationKey = DestinationKey
def get_MetadataDirective(self):
return self.MetadataDirective
def set_MetadataDirective(self, MetadataDirective):
self.MetadataDirective = MetadataDirective
def get_Metadata(self):
return self.Metadata
def set_Metadata(self, Metadata):
self.Metadata = Metadata
def add_Metadata(self, value):
self.Metadata.append(value)
def insert_Metadata_at(self, index, value):
self.Metadata.insert(index, value)
def replace_Metadata_at(self, index, value):
self.Metadata[index] = value
def get_AccessControlList(self):
return self.AccessControlList
def set_AccessControlList(self, AccessControlList):
self.AccessControlList = AccessControlList
def get_CopySourceIfModifiedSince(self):
return self.CopySourceIfModifiedSince
def set_CopySourceIfModifiedSince(self, CopySourceIfModifiedSince):
self.CopySourceIfModifiedSince = CopySourceIfModifiedSince
def get_CopySourceIfUnmodifiedSince(self):
return self.CopySourceIfUnmodifiedSince
def set_CopySourceIfUnmodifiedSince(self, CopySourceIfUnmodifiedSince):
self.CopySourceIfUnmodifiedSince = CopySourceIfUnmodifiedSince
def get_CopySourceIfMatch(self):
return self.CopySourceIfMatch
def set_CopySourceIfMatch(self, CopySourceIfMatch):
self.CopySourceIfMatch = CopySourceIfMatch
def add_CopySourceIfMatch(self, value):
self.CopySourceIfMatch.append(value)
def insert_CopySourceIfMatch_at(self, index, value):
self.CopySourceIfMatch.insert(index, value)
def replace_CopySourceIfMatch_at(self, index, value):
self.CopySourceIfMatch[index] = value
def get_CopySourceIfNoneMatch(self):
return self.CopySourceIfNoneMatch
def set_CopySourceIfNoneMatch(self, CopySourceIfNoneMatch):
self.CopySourceIfNoneMatch = CopySourceIfNoneMatch
def add_CopySourceIfNoneMatch(self, value):
self.CopySourceIfNoneMatch.append(value)
def insert_CopySourceIfNoneMatch_at(self, index, value):
self.CopySourceIfNoneMatch.insert(index, value)
def replace_CopySourceIfNoneMatch_at(self, index, value):
self.CopySourceIfNoneMatch[index] = value
def get_StorageClass(self):
return self.StorageClass
def set_StorageClass(self, StorageClass):
self.StorageClass = StorageClass
def get_AWSAccessKeyId(self):
return self.AWSAccessKeyId
def set_AWSAccessKeyId(self, AWSAccessKeyId):
self.AWSAccessKeyId = AWSAccessKeyId
def get_Timestamp(self):
return self.Timestamp
def set_Timestamp(self, Timestamp):
self.Timestamp = Timestamp
def get_Signature(self):
return self.Signature
def set_Signature(self, Signature):
self.Signature = Signature
def get_Credential(self):
return self.Credential
def set_Credential(self, Credential):
self.Credential = Credential
def validate_MetadataDirective(self, value):
# Validate type MetadataDirective, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['COPY', 'REPLACE']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on MetadataDirective' % {
"value": value.encode("utf-8")})
def validate_StorageClass(self, value):
# Validate type StorageClass, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['STANDARD', 'REDUCED_REDUNDANCY', 'GLACIER', 'UNKNOWN']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on StorageClass' % {
"value": value.encode("utf-8")})
def hasContent_(self):
if (
self.SourceBucket is not None or
self.SourceKey is not None or
self.DestinationBucket is not None or
self.DestinationKey is not None or
self.MetadataDirective is not None or
self.Metadata or
self.AccessControlList is not None or
self.CopySourceIfModifiedSince is not None or
self.CopySourceIfUnmodifiedSince is not None or
self.CopySourceIfMatch or
self.CopySourceIfNoneMatch or
self.StorageClass is not None or
self.AWSAccessKeyId is not None or
self.Timestamp is not None or
self.Signature is not None or
self.Credential is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CopyObject',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CopyObject')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CopyObject')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='CopyObject', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CopyObject'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CopyObject',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.SourceBucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSourceBucket>%s</%sSourceBucket>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.SourceBucket), input_name='SourceBucket')), namespaceprefix_,
eol_))
if self.SourceKey is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSourceKey>%s</%sSourceKey>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.SourceKey), input_name='SourceKey')), namespaceprefix_, eol_))
if self.DestinationBucket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sDestinationBucket>%s</%sDestinationBucket>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.DestinationBucket), input_name='DestinationBucket')),
namespaceprefix_, eol_))
if self.DestinationKey is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sDestinationKey>%s</%sDestinationKey>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.DestinationKey), input_name='DestinationKey')), namespaceprefix_,
eol_))
if self.MetadataDirective is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sMetadataDirective>%s</%sMetadataDirective>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.MetadataDirective), input_name='MetadataDirective')),
namespaceprefix_, eol_))
for Metadata_ in self.Metadata:
Metadata_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Metadata',
pretty_print=pretty_print)
if self.AccessControlList is not None:
self.AccessControlList.export(outfile, level, namespaceprefix_, namespacedef_='', name_='AccessControlList',
pretty_print=pretty_print)
if self.CopySourceIfModifiedSince is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCopySourceIfModifiedSince>%s</%sCopySourceIfModifiedSince>%s' % (namespaceprefix_,
self.gds_format_datetime(
self.CopySourceIfModifiedSince,
input_name='CopySourceIfModifiedSince'),
namespaceprefix_, eol_))
if self.CopySourceIfUnmodifiedSince is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCopySourceIfUnmodifiedSince>%s</%sCopySourceIfUnmodifiedSince>%s' % (namespaceprefix_,
self.gds_format_datetime(
self.CopySourceIfUnmodifiedSince,
input_name='CopySourceIfUnmodifiedSince'),
namespaceprefix_,
eol_))
for CopySourceIfMatch_ in self.CopySourceIfMatch:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCopySourceIfMatch>%s</%sCopySourceIfMatch>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(CopySourceIfMatch_), input_name='CopySourceIfMatch')),
namespaceprefix_, eol_))
for CopySourceIfNoneMatch_ in self.CopySourceIfNoneMatch:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCopySourceIfNoneMatch>%s</%sCopySourceIfNoneMatch>%s' % (namespaceprefix_,
self.gds_encode(
self.gds_format_string(
quote_xml(
CopySourceIfNoneMatch_),
input_name='CopySourceIfNoneMatch')),
namespaceprefix_, eol_))
if self.StorageClass is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sStorageClass>%s</%sStorageClass>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.StorageClass), input_name='StorageClass')), namespaceprefix_,
eol_))
if self.AWSAccessKeyId is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sAWSAccessKeyId>%s</%sAWSAccessKeyId>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.AWSAccessKeyId), input_name='AWSAccessKeyId')), namespaceprefix_,
eol_))
if self.Timestamp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTimestamp>%s</%sTimestamp>%s' % (
namespaceprefix_, self.gds_format_datetime(self.Timestamp, input_name='Timestamp'), namespaceprefix_, eol_))
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_, eol_))
if self.Credential is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sCredential>%s</%sCredential>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.Credential), input_name='Credential')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SourceBucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'SourceBucket')
value_ = self.gds_validate_string(value_, node, 'SourceBucket')
self.SourceBucket = value_
elif nodeName_ == 'SourceKey':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'SourceKey')
value_ = self.gds_validate_string(value_, node, 'SourceKey')
self.SourceKey = value_
elif nodeName_ == 'DestinationBucket':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'DestinationBucket')
value_ = self.gds_validate_string(value_, node, 'DestinationBucket')
self.DestinationBucket = value_
elif nodeName_ == 'DestinationKey':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'DestinationKey')
value_ = self.gds_validate_string(value_, node, 'DestinationKey')
self.DestinationKey = value_
elif nodeName_ == 'MetadataDirective':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'MetadataDirective')
value_ = self.gds_validate_string(value_, node, 'MetadataDirective')
self.MetadataDirective = value_
# validate type MetadataDirective
self.validate_MetadataDirective(self.MetadataDirective)
elif nodeName_ == 'Metadata':
obj_ = MetadataEntry.factory(parent_object_=self)
obj_.build(child_)
self.Metadata.append(obj_)
obj_.original_tagname_ = 'Metadata'
elif nodeName_ == 'AccessControlList':
obj_ = AccessControlList.factory(parent_object_=self)
obj_.build(child_)
self.AccessControlList = obj_
obj_.original_tagname_ = 'AccessControlList'
elif nodeName_ == 'CopySourceIfModifiedSince':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.CopySourceIfModifiedSince = dval_
elif nodeName_ == 'CopySourceIfUnmodifiedSince':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.CopySourceIfUnmodifiedSince = dval_
elif nodeName_ == 'CopySourceIfMatch':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'CopySourceIfMatch')
value_ = self.gds_validate_string(value_, node, 'CopySourceIfMatch')
self.CopySourceIfMatch.append(value_)
elif nodeName_ == 'CopySourceIfNoneMatch':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'CopySourceIfNoneMatch')
value_ = self.gds_validate_string(value_, node, 'CopySourceIfNoneMatch')
self.CopySourceIfNoneMatch.append(value_)
elif nodeName_ == 'StorageClass':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'StorageClass')
value_ = self.gds_validate_string(value_, node, 'StorageClass')
self.StorageClass = value_
# validate type StorageClass
self.validate_StorageClass(self.StorageClass)
elif nodeName_ == 'AWSAccessKeyId':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'AWSAccessKeyId')
value_ = self.gds_validate_string(value_, node, 'AWSAccessKeyId')
self.AWSAccessKeyId = value_
elif nodeName_ == 'Timestamp':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.Timestamp = dval_
elif nodeName_ == 'Signature':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Signature')
value_ = self.gds_validate_string(value_, node, 'Signature')
self.Signature = value_
elif nodeName_ == 'Credential':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Credential')
value_ = self.gds_validate_string(value_, node, 'Credential')
self.Credential = value_
# end class CopyObject
class CopyObjectResponse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CopyObjectResult=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.CopyObjectResult = CopyObjectResult
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CopyObjectResponse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CopyObjectResponse.subclass:
return CopyObjectResponse.subclass(*args_, **kwargs_)
else:
return CopyObjectResponse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CopyObjectResult(self):
return self.CopyObjectResult
def set_CopyObjectResult(self, CopyObjectResult):
self.CopyObjectResult = CopyObjectResult
def hasContent_(self):
if (
self.CopyObjectResult is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CopyObjectResponse',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CopyObjectResponse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CopyObjectResponse')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='CopyObjectResponse',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CopyObjectResponse'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CopyObjectResponse',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CopyObjectResult is not None:
self.CopyObjectResult.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CopyObjectResult',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CopyObjectResult':
obj_ = CopyObjectResult.factory(parent_object_=self)
obj_.build(child_)
self.CopyObjectResult = obj_
obj_.original_tagname_ = 'CopyObjectResult'
# end class CopyObjectResponse
class CopyObjectResult(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, LastModified=None, ETag=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
if isinstance(LastModified, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(LastModified, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = LastModified
self.LastModified = initvalue_
self.ETag = ETag
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CopyObjectResult)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CopyObjectResult.subclass:
return CopyObjectResult.subclass(*args_, **kwargs_)
else:
return CopyObjectResult(*args_, **kwargs_)
factory = staticmethod(factory)
def get_LastModified(self):
return self.LastModified
def set_LastModified(self, LastModified):
self.LastModified = LastModified
def get_ETag(self):
return self.ETag
def set_ETag(self, ETag):
self.ETag = ETag
def hasContent_(self):
if (
self.LastModified is not None or
self.ETag is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CopyObjectResult',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CopyObjectResult')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CopyObjectResult')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='CopyObjectResult',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CopyObjectResult'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='CopyObjectResult',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.LastModified is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sLastModified>%s</%sLastModified>%s' % (
namespaceprefix_, self.gds_format_datetime(self.LastModified, input_name='LastModified'), namespaceprefix_,
eol_))
if self.ETag is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sETag>%s</%sETag>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.ETag), input_name='ETag')),
namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'LastModified':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.LastModified = dval_
elif nodeName_ == 'ETag':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'ETag')
value_ = self.gds_validate_string(value_, node, 'ETag')
self.ETag = value_
# end class CopyObjectResult
class RequestPaymentConfiguration(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Payer=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Payer = Payer
self.validate_Payer(self.Payer)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RequestPaymentConfiguration)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RequestPaymentConfiguration.subclass:
return RequestPaymentConfiguration.subclass(*args_, **kwargs_)
else:
return RequestPaymentConfiguration(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Payer(self):
return self.Payer
def set_Payer(self, Payer):
self.Payer = Payer
def validate_Payer(self, value):
# Validate type Payer, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['BucketOwner', 'Requester']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on Payer' % {
"value": value.encode("utf-8")})
def hasContent_(self):
if (
self.Payer is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='RequestPaymentConfiguration',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RequestPaymentConfiguration')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RequestPaymentConfiguration')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='RequestPaymentConfiguration',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='RequestPaymentConfiguration'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='RequestPaymentConfiguration', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Payer is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sPayer>%s</%sPayer>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Payer), input_name='Payer')),
namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Payer':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Payer')
value_ = self.gds_validate_string(value_, node, 'Payer')
self.Payer = value_
# validate type Payer
self.validate_Payer(self.Payer)
# end class RequestPaymentConfiguration
class VersioningConfiguration(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Status=None, MfaDelete=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Status = Status
self.validate_VersioningStatus(self.Status)
self.MfaDelete = MfaDelete
self.validate_MfaDeleteStatus(self.MfaDelete)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, VersioningConfiguration)
if subclass is not None:
return subclass(*args_, **kwargs_)
if VersioningConfiguration.subclass:
return VersioningConfiguration.subclass(*args_, **kwargs_)
else:
return VersioningConfiguration(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Status(self):
return self.Status
def set_Status(self, Status):
self.Status = Status
def get_MfaDelete(self):
return self.MfaDelete
def set_MfaDelete(self, MfaDelete):
self.MfaDelete = MfaDelete
def validate_VersioningStatus(self, value):
# Validate type VersioningStatus, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['Enabled', 'Suspended']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on VersioningStatus' % {
"value": value.encode("utf-8")})
def validate_MfaDeleteStatus(self, value):
# Validate type MfaDeleteStatus, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['Enabled', 'Disabled']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on MfaDeleteStatus' % {
"value": value.encode("utf-8")})
def hasContent_(self):
if (
self.Status is not None or
self.MfaDelete is not None
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='VersioningConfiguration',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('VersioningConfiguration')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='VersioningConfiguration')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='VersioningConfiguration',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='VersioningConfiguration'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='VersioningConfiguration', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Status is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sStatus>%s</%sStatus>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Status), input_name='Status')),
namespaceprefix_, eol_))
if self.MfaDelete is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sMfaDelete>%s</%sMfaDelete>%s' % (namespaceprefix_, self.gds_encode(
self.gds_format_string(quote_xml(self.MfaDelete), input_name='MfaDelete')), namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Status':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Status')
value_ = self.gds_validate_string(value_, node, 'Status')
self.Status = value_
# validate type VersioningStatus
self.validate_VersioningStatus(self.Status)
elif nodeName_ == 'MfaDelete':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'MfaDelete')
value_ = self.gds_validate_string(value_, node, 'MfaDelete')
self.MfaDelete = value_
# validate type MfaDeleteStatus
self.validate_MfaDeleteStatus(self.MfaDelete)
# end class VersioningConfiguration
class NotificationConfiguration(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, TopicConfiguration=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
if TopicConfiguration is None:
self.TopicConfiguration = []
else:
self.TopicConfiguration = TopicConfiguration
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, NotificationConfiguration)
if subclass is not None:
return subclass(*args_, **kwargs_)
if NotificationConfiguration.subclass:
return NotificationConfiguration.subclass(*args_, **kwargs_)
else:
return NotificationConfiguration(*args_, **kwargs_)
factory = staticmethod(factory)
def get_TopicConfiguration(self):
return self.TopicConfiguration
def set_TopicConfiguration(self, TopicConfiguration):
self.TopicConfiguration = TopicConfiguration
def add_TopicConfiguration(self, value):
self.TopicConfiguration.append(value)
def insert_TopicConfiguration_at(self, index, value):
self.TopicConfiguration.insert(index, value)
def replace_TopicConfiguration_at(self, index, value):
self.TopicConfiguration[index] = value
def hasContent_(self):
if (
self.TopicConfiguration
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='NotificationConfiguration',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('NotificationConfiguration')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='NotificationConfiguration')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='NotificationConfiguration',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='',
name_='NotificationConfiguration'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
name_='NotificationConfiguration', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for TopicConfiguration_ in self.TopicConfiguration:
TopicConfiguration_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='TopicConfiguration',
pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'TopicConfiguration':
obj_ = TopicConfiguration.factory(parent_object_=self)
obj_.build(child_)
self.TopicConfiguration.append(obj_)
obj_.original_tagname_ = 'TopicConfiguration'
# end class NotificationConfiguration
class TopicConfiguration(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Topic=None, Event=None, **kwargs_):
self.original_tagname_ = None
self.parent_object_ = kwargs_.get('parent_object_')
self.Topic = Topic
if Event is None:
self.Event = []
else:
self.Event = Event
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TopicConfiguration)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TopicConfiguration.subclass:
return TopicConfiguration.subclass(*args_, **kwargs_)
else:
return TopicConfiguration(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Topic(self):
return self.Topic
def set_Topic(self, Topic):
self.Topic = Topic
def get_Event(self):
return self.Event
def set_Event(self, Event):
self.Event = Event
def add_Event(self, value):
self.Event.append(value)
def insert_Event_at(self, index, value):
self.Event.insert(index, value)
def replace_Event_at(self, index, value):
self.Event[index] = value
def hasContent_(self):
if (
self.Topic is not None or
self.Event
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='TopicConfiguration',
pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TopicConfiguration')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '',))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TopicConfiguration')
if self.hasContent_():
outfile.write('>%s' % (eol_,))
self.exportChildren(outfile, level + 1, '', namespacedef_, name_='TopicConfiguration',
pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_,))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TopicConfiguration'):
pass
def exportChildren(self, outfile, level, namespaceprefix_='',
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"', name_='TopicConfiguration',
fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Topic is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sTopic>%s</%sTopic>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(self.Topic), input_name='Topic')),
namespaceprefix_, eol_))
for Event_ in self.Event:
showIndent(outfile, level, pretty_print)
outfile.write('<%sEvent>%s</%sEvent>%s' % (
namespaceprefix_, self.gds_encode(self.gds_format_string(quote_xml(Event_), input_name='Event')),
namespaceprefix_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Topic':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Topic')
value_ = self.gds_validate_string(value_, node, 'Topic')
self.Topic = value_
elif nodeName_ == 'Event':
value_ = child_.text
value_ = self.gds_parse_string(value_, node, 'Event')
value_ = self.gds_validate_string(value_, node, 'Event')
self.Event.append(value_)
# end class TopicConfiguration
GDSClassesMapping = {
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'CreateBucket'
rootClass = CreateBucket
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"',
pretty_print=True)
return rootObj
def parseEtree(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'CreateBucket'
rootClass = CreateBucket
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
'''Parse a string, create the object tree, and export it.
Arguments:
- inString -- A string. This XML fragment should not start
with an XML declaration containing an encoding.
- silence -- A boolean. If False, export the object.
Returns -- The root object in the tree.
'''
parser = None
rootNode = parsexmlstring_(inString, parser)
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'CreateBucket'
rootClass = CreateBucket
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:tns="http://s3.amazonaws.com/doc/2006-03-01/"')
return rootObj
def parseLiteral(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'CreateBucket'
rootClass = CreateBucket
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from s3_api import *\n\n')
sys.stdout.write('import s3_api as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
# import pdb; pdb.set_trace()
main()
RenameMappings_ = {
}
__all__ = [
"AccessControlList",
"AccessControlPolicy",
"AmazonCustomerByEmail",
"BucketLoggingStatus",
"CanonicalUser",
"CopyObject",
"CopyObjectResponse",
"CopyObjectResult",
"CreateBucket",
"CreateBucketConfiguration",
"CreateBucketResponse",
"CreateBucketResult",
"DeleteBucket",
"DeleteBucketResponse",
"DeleteMarkerEntry",
"DeleteObject",
"DeleteObjectResponse",
"GetBucketAccessControlPolicy",
"GetBucketAccessControlPolicyResponse",
"GetBucketLoggingStatus",
"GetBucketLoggingStatusResponse",
"GetObject",
"GetObjectAccessControlPolicy",
"GetObjectAccessControlPolicyResponse",
"GetObjectExtended",
"GetObjectExtendedResponse",
"GetObjectResponse",
"GetObjectResult",
"Grant",
"Grantee",
"Group",
"ListAllMyBuckets",
"ListAllMyBucketsEntry",
"ListAllMyBucketsList",
"ListAllMyBucketsResponse",
"ListAllMyBucketsResult",
"ListBucket",
"ListBucketResponse",
"ListBucketResult",
"ListEntry",
"ListVersionsResponse",
"ListVersionsResult",
"LocationConstraint",
"LoggingSettings",
"MetadataEntry",
"NotificationConfiguration",
"PostResponse",
"PrefixEntry",
"PutObject",
"PutObjectInline",
"PutObjectInlineResponse",
"PutObjectResponse",
"PutObjectResult",
"RequestPaymentConfiguration",
"Result",
"SetBucketAccessControlPolicy",
"SetBucketAccessControlPolicyResponse",
"SetBucketLoggingStatus",
"SetBucketLoggingStatusResponse",
"SetObjectAccessControlPolicy",
"SetObjectAccessControlPolicyResponse",
"Status",
"TopicConfiguration",
"User",
"VersionEntry",
"VersioningConfiguration"
]
| 40.225947
| 145
| 0.613386
| 43,033
| 442,767
| 6.036646
| 0.017545
| 0.034087
| 0.024633
| 0.027447
| 0.836539
| 0.815756
| 0.802506
| 0.770347
| 0.74583
| 0.742135
| 0
| 0.005652
| 0.287869
| 442,767
| 11,006
| 146
| 40.229602
| 0.818225
| 0.015091
| 0
| 0.771618
| 1
| 0.000109
| 0.081502
| 0.024207
| 0
| 0
| 0
| 0
| 0
| 1
| 0.132978
| false
| 0.015029
| 0.024396
| 0.02897
| 0.268896
| 0.07482
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
62d4d15d238c5de06d4e73aed78057ff7986a4de
| 16,760
|
py
|
Python
|
hallo/test/modules/permission_control/test_find_permission_mask.py
|
joshcoales/Hallo
|
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
|
[
"MIT"
] | 1
|
2018-05-19T22:27:20.000Z
|
2018-05-19T22:27:20.000Z
|
hallo/test/modules/permission_control/test_find_permission_mask.py
|
joshcoales/Hallo
|
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
|
[
"MIT"
] | 75
|
2015-09-26T18:07:18.000Z
|
2022-01-04T07:15:11.000Z
|
hallo/test/modules/permission_control/test_find_permission_mask.py
|
SpangleLabs/Hallo
|
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
|
[
"MIT"
] | 1
|
2021-04-10T12:02:47.000Z
|
2021-04-10T12:02:47.000Z
|
import hallo.modules.permission_control
from hallo.hallo import Hallo
from hallo.modules.permission_control import Permissions
from hallo.permission_mask import PermissionMask
from hallo.test.server_mock import ServerMock
from hallo.user_group import UserGroup
def test_3_fail(hallo_getter):
test_hallo = hallo_getter({"permission_control"})
perm_cont = Permissions()
try:
perm_cont.find_permission_mask(["a", "b", "c"], test_hallo.test_user, test_hallo.test_chan)
assert False, "Exception should be thrown if more than 2 arguments passed."
except hallo.modules.permission_control.PermissionControlException as e:
assert "error" in str(e).lower()
assert "too many filters" in str(e).lower()
def test_2_no_server(hallo_getter):
test_hallo = hallo_getter({"permission_control"})
perm_cont = Permissions()
try:
perm_cont.find_permission_mask(
["channel=chan1", "user=user1"], test_hallo.test_user, test_hallo.test_chan
)
assert False, "Exception should be thrown if 2 arguments and neither is server."
except hallo.modules.permission_control.PermissionControlException as e:
assert "error" in str(e).lower()
assert "no server name found" in str(e).lower()
def test_2_no_server_by_name(hallo_getter):
test_hallo = hallo_getter({"permission_control"})
perm_cont = Permissions()
try:
perm_cont.find_permission_mask(
["server=no_server_by_name", "chan=test_chan1"], test_hallo.test_user, test_hallo.test_user
)
assert False, "Exception should be thrown if server does not exist."
except hallo.modules.permission_control.PermissionControlException as e:
assert "error" in str(e).lower()
assert "no server exists by that name" in str(e).lower()
def test_2_server_chan(hallo_getter):
test_hallo = hallo_getter({"permission_control"})
perm_cont = Permissions()
# Set up a test server and channel
serv1 = ServerMock(test_hallo)
serv1.name = "test_serv1"
test_hallo.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
# Get permission mask of given channel
data = perm_cont.find_permission_mask(
["server=test_serv1", "channel=test_chan1"], test_hallo.test_user, test_hallo.test_chan
)
assert perm1 == data, "Did not find the correct permission mask."
def test_2_server_user(hallo_getter):
test_hallo = hallo_getter({"permission_control"})
perm_cont = Permissions()
# Set up a test server and user
serv1 = ServerMock(test_hallo)
serv1.name = "test_serv1"
test_hallo.add_server(serv1)
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm1 = PermissionMask()
user1.permission_mask = perm1
# Get permission mask of given channel
data = perm_cont.find_permission_mask(
["server=test_serv1", "user=test_user1"], test_hallo.test_user, test_hallo.test_chan
)
assert perm1 == data, "Did not find the correct permission mask."
def test_2_server_no_chan_user(hallo_getter):
test_hallo = hallo_getter({"permission_control"})
perm_cont = Permissions()
# Set up a test server and channel and user
serv1 = ServerMock(test_hallo)
serv1.name = "test_serv1"
test_hallo.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
# Get permission mask of given channel
try:
perm_cont.find_permission_mask(["server=test_serv1", "core"], user1, chan1)
assert False, "Should have failed to find any permission mask."
except hallo.modules.permission_control.PermissionControlException as e:
assert "error" in str(e).lower()
assert "server but not channel or user" in str(e).lower()
def test_1_hallo():
perm_cont = Permissions()
# Set up a test hallo and server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
# Get permission of hallo
data = perm_cont.find_permission_mask(["hallo"], user1, chan1)
assert data == perm3, "Did not find the correct permission mask."
def test_1_server():
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
# Get permissions of current server
data = perm_cont.find_permission_mask(["server"], user1, chan1)
assert data == perm0, "Did not find the correct permission mask."
def test_1_server_no_name():
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
# Get permissions of current server
try:
perm_cont.find_permission_mask(["server=test_serv2"], user1, chan1)
assert False, "Find permission mask should have failed."
except hallo.modules.permission_control.PermissionControlException as e:
assert "error" in str(e).lower()
assert "no server exists by that name" in str(e).lower()
def test_1_server_name(hallo_getter):
test_hallo = hallo_getter({"permission_control"})
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
# Get permissions of current server
data = perm_cont.find_permission_mask(["server=test_serv1"], user1, chan1)
assert data == perm0, "Did not find correct permission mask"
def test_1_channel():
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
# Get permissions of current channel
data = perm_cont.find_permission_mask(["channel"], user1, chan1)
assert data == perm1, "Did not find the correct permission mask."
def test_1_channel_privmsg():
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
# Try to get permissions of current channel from a privmsg
try:
perm_cont.find_permission_mask(["channel"], user1, None)
assert False, "Should not have managed to get permission mask."
except hallo.modules.permission_control.PermissionControlException as e:
assert "error" in str(e).lower()
assert "can't set generic channel permissions in a privmsg" in str(e).lower()
def test_1_channel_name():
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
chan2 = serv1.get_channel_by_address("test_chan2".lower(), "test_chan2")
perm4 = PermissionMask()
chan2.permission_mask = perm4
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
# Get permissions of current channel
data = perm_cont.find_permission_mask(["channel=test_chan2"], user1, chan1)
assert data == perm4, "Did not find the correct permission mask."
def test_1_user_group_no_name():
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
group1 = UserGroup("test_group1", hallo1)
perm4 = PermissionMask()
group1.permission_mask = perm4
hallo1.add_user_group(group1)
# Try to get permissions of non-existent user group
try:
perm_cont.find_permission_mask(["user_group=test_group2"], user1, chan1)
assert False, "Find permission mask should have failed."
except hallo.modules.permission_control.PermissionControlException as e:
assert "error" in str(e).lower()
assert "no user group exists by that name" in str(e).lower()
def test_1_user_group_name():
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
group1 = UserGroup("test_group1", hallo1)
perm4 = PermissionMask()
group1.permission_mask = perm4
hallo1.add_user_group(group1)
# Get permissions of specified user group
data = perm_cont.find_permission_mask(["user_group=test_group1"], user1, chan1)
assert data == perm4, "Did not find the correct permission mask."
def test_1_user_name():
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
user2 = serv1.get_user_by_address("test_user2".lower(), "test_user2")
perm4 = PermissionMask()
user2.permission_mask = perm4
# Get permissions of specified user
data = perm_cont.find_permission_mask(["user=test_user2"], user1, chan1)
assert data == perm4, "Did not find the correct permission mask."
def test_1_user_just_name():
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
chan1.add_user(user1)
user2 = serv1.get_user_by_address("test_user2".lower(), "test_user2")
perm4 = PermissionMask()
user2.permission_mask = perm4
chan1.add_user(user2)
# Get permissions of specified user in channel
data = perm_cont.find_permission_mask(["test_user2"], user1, chan1)
assert data == perm4, "Did not find the correct permission mask."
def test_1_user_just_name_not_in_channel():
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
chan1.add_user(user1)
user2 = serv1.get_user_by_address("test_user2".lower(), "test_user2")
perm4 = PermissionMask()
user2.permission_mask = perm4
# Get permissions of specified user group
try:
perm_cont.find_permission_mask(["test_user2"], user1, chan1)
assert False, "Find permission mask should have failed."
except hallo.modules.permission_control.PermissionControlException as e:
assert "error" in str(e).lower()
assert "i can't find that permission mask" in str(e).lower()
def test_1_user_just_name_privmsg():
perm_cont = Permissions()
# Set up a test server and channel and user
hallo1 = Hallo()
perm3 = PermissionMask()
hallo1.permission_mask = perm3
serv1 = ServerMock(hallo1)
serv1.name = "test_serv1"
perm0 = PermissionMask()
serv1.permission_mask = perm0
hallo1.add_server(serv1)
chan1 = serv1.get_channel_by_address("test_chan1".lower(), "test_chan1")
perm1 = PermissionMask()
chan1.permission_mask = perm1
user1 = serv1.get_user_by_address("test_user1".lower(), "test_user1")
perm2 = PermissionMask()
user1.permission_mask = perm2
chan1.add_user(user1)
user2 = serv1.get_user_by_address("test_user2".lower(), "test_user2")
perm4 = PermissionMask()
user2.permission_mask = perm4
# Get permissions of specified user group
try:
perm_cont.find_permission_mask(["test_user2"], user1, None)
assert False, "Find permission mask should have failed."
except hallo.modules.permission_control.PermissionControlException as e:
assert "error" in str(e).lower()
assert "i can't find that permission mask" in str(e).lower()
| 38.617512
| 103
| 0.707757
| 2,156
| 16,760
| 5.264378
| 0.050557
| 0.128282
| 0.040088
| 0.036828
| 0.928282
| 0.919912
| 0.909515
| 0.887665
| 0.865727
| 0.855947
| 0
| 0.038737
| 0.1929
| 16,760
| 433
| 104
| 38.706697
| 0.800325
| 0.075955
| 0
| 0.818182
| 0
| 0
| 0.160412
| 0.0044
| 0
| 0
| 0
| 0
| 0.101928
| 1
| 0.052342
| false
| 0.002755
| 0.016529
| 0
| 0.068871
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1a12d0a7e33c43a50be88445769392ad553e804f
| 81
|
py
|
Python
|
python/module/spatial/__init__.py
|
DerThorsten/spatial
|
851cb9f840fbd79d5a152bc21c7f94619a94220a
|
[
"MIT"
] | null | null | null |
python/module/spatial/__init__.py
|
DerThorsten/spatial
|
851cb9f840fbd79d5a152bc21c7f94619a94220a
|
[
"MIT"
] | null | null | null |
python/module/spatial/__init__.py
|
DerThorsten/spatial
|
851cb9f840fbd79d5a152bc21c7f94619a94220a
|
[
"MIT"
] | 1
|
2019-08-01T12:03:55.000Z
|
2019-08-01T12:03:55.000Z
|
from . _spatial import *
def pure_python():
"""
hello
"""
pass
| 9
| 24
| 0.506173
| 8
| 81
| 4.875
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.358025
| 81
| 9
| 25
| 9
| 0.75
| 0.061728
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
3db6d2606affab161fdaec54b8f7b7aedfe2a3ed
| 181
|
py
|
Python
|
federatedscope/vertical_fl/worker/__init__.py
|
alibaba/FederatedScope
|
fcf6d237624769ea094cfd68803901622f14fc23
|
[
"Apache-2.0"
] | 9
|
2022-03-24T07:59:37.000Z
|
2022-03-31T06:47:52.000Z
|
federatedscope/vertical_fl/worker/__init__.py
|
alibaba/FederatedScope
|
fcf6d237624769ea094cfd68803901622f14fc23
|
[
"Apache-2.0"
] | 1
|
2022-03-28T13:52:17.000Z
|
2022-03-28T13:52:17.000Z
|
federatedscope/vertical_fl/worker/__init__.py
|
alibaba/FederatedScope
|
fcf6d237624769ea094cfd68803901622f14fc23
|
[
"Apache-2.0"
] | null | null | null |
from federatedscope.vertical_fl.worker.vertical_client import vFLClient
from federatedscope.vertical_fl.worker.vertical_server import vFLServer
__all__ = ['vFLServer', 'vFLClient']
| 45.25
| 71
| 0.856354
| 21
| 181
| 7
| 0.52381
| 0.244898
| 0.353742
| 0.380952
| 0.571429
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066298
| 181
| 4
| 72
| 45.25
| 0.869822
| 0
| 0
| 0
| 0
| 0
| 0.098901
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3dee2552b4e4e50f8c978d7589eb8ec8e8832fae
| 257
|
py
|
Python
|
example/prj_name/pkg_name/__init__.py
|
glimix/build-capi
|
80f8faebe493108012dcec3c7685a0350cb02fda
|
[
"MIT"
] | null | null | null |
example/prj_name/pkg_name/__init__.py
|
glimix/build-capi
|
80f8faebe493108012dcec3c7685a0350cb02fda
|
[
"MIT"
] | null | null | null |
example/prj_name/pkg_name/__init__.py
|
glimix/build-capi
|
80f8faebe493108012dcec3c7685a0350cb02fda
|
[
"MIT"
] | null | null | null |
def get_include():
import pkg_name
from os.path import join, dirname
return join(dirname(pkg_name.__file__), 'include')
def get_lib():
import pkg_name
from os.path import join, dirname
return join(dirname(pkg_name.__file__), 'lib')
| 25.7
| 54
| 0.708171
| 38
| 257
| 4.421053
| 0.368421
| 0.166667
| 0.154762
| 0.202381
| 0.809524
| 0.809524
| 0.809524
| 0.809524
| 0.809524
| 0.809524
| 0
| 0
| 0.190661
| 257
| 9
| 55
| 28.555556
| 0.807692
| 0
| 0
| 0.5
| 0
| 0
| 0.038911
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
9a88abe58a578ee096c2f5b33d94e3c7791aaf73
| 6,269
|
py
|
Python
|
loldib/getratings/models/NA/na_nami/na_nami_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nami/na_nami_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nami/na_nami_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Nami_Mid_Aatrox(Ratings):
pass
class NA_Nami_Mid_Ahri(Ratings):
pass
class NA_Nami_Mid_Akali(Ratings):
pass
class NA_Nami_Mid_Alistar(Ratings):
pass
class NA_Nami_Mid_Amumu(Ratings):
pass
class NA_Nami_Mid_Anivia(Ratings):
pass
class NA_Nami_Mid_Annie(Ratings):
pass
class NA_Nami_Mid_Ashe(Ratings):
pass
class NA_Nami_Mid_AurelionSol(Ratings):
pass
class NA_Nami_Mid_Azir(Ratings):
pass
class NA_Nami_Mid_Bard(Ratings):
pass
class NA_Nami_Mid_Blitzcrank(Ratings):
pass
class NA_Nami_Mid_Brand(Ratings):
pass
class NA_Nami_Mid_Braum(Ratings):
pass
class NA_Nami_Mid_Caitlyn(Ratings):
pass
class NA_Nami_Mid_Camille(Ratings):
pass
class NA_Nami_Mid_Cassiopeia(Ratings):
pass
class NA_Nami_Mid_Chogath(Ratings):
pass
class NA_Nami_Mid_Corki(Ratings):
pass
class NA_Nami_Mid_Darius(Ratings):
pass
class NA_Nami_Mid_Diana(Ratings):
pass
class NA_Nami_Mid_Draven(Ratings):
pass
class NA_Nami_Mid_DrMundo(Ratings):
pass
class NA_Nami_Mid_Ekko(Ratings):
pass
class NA_Nami_Mid_Elise(Ratings):
pass
class NA_Nami_Mid_Evelynn(Ratings):
pass
class NA_Nami_Mid_Ezreal(Ratings):
pass
class NA_Nami_Mid_Fiddlesticks(Ratings):
pass
class NA_Nami_Mid_Fiora(Ratings):
pass
class NA_Nami_Mid_Fizz(Ratings):
pass
class NA_Nami_Mid_Galio(Ratings):
pass
class NA_Nami_Mid_Gangplank(Ratings):
pass
class NA_Nami_Mid_Garen(Ratings):
pass
class NA_Nami_Mid_Gnar(Ratings):
pass
class NA_Nami_Mid_Gragas(Ratings):
pass
class NA_Nami_Mid_Graves(Ratings):
pass
class NA_Nami_Mid_Hecarim(Ratings):
pass
class NA_Nami_Mid_Heimerdinger(Ratings):
pass
class NA_Nami_Mid_Illaoi(Ratings):
pass
class NA_Nami_Mid_Irelia(Ratings):
pass
class NA_Nami_Mid_Ivern(Ratings):
pass
class NA_Nami_Mid_Janna(Ratings):
pass
class NA_Nami_Mid_JarvanIV(Ratings):
pass
class NA_Nami_Mid_Jax(Ratings):
pass
class NA_Nami_Mid_Jayce(Ratings):
pass
class NA_Nami_Mid_Jhin(Ratings):
pass
class NA_Nami_Mid_Jinx(Ratings):
pass
class NA_Nami_Mid_Kalista(Ratings):
pass
class NA_Nami_Mid_Karma(Ratings):
pass
class NA_Nami_Mid_Karthus(Ratings):
pass
class NA_Nami_Mid_Kassadin(Ratings):
pass
class NA_Nami_Mid_Katarina(Ratings):
pass
class NA_Nami_Mid_Kayle(Ratings):
pass
class NA_Nami_Mid_Kayn(Ratings):
pass
class NA_Nami_Mid_Kennen(Ratings):
pass
class NA_Nami_Mid_Khazix(Ratings):
pass
class NA_Nami_Mid_Kindred(Ratings):
pass
class NA_Nami_Mid_Kled(Ratings):
pass
class NA_Nami_Mid_KogMaw(Ratings):
pass
class NA_Nami_Mid_Leblanc(Ratings):
pass
class NA_Nami_Mid_LeeSin(Ratings):
pass
class NA_Nami_Mid_Leona(Ratings):
pass
class NA_Nami_Mid_Lissandra(Ratings):
pass
class NA_Nami_Mid_Lucian(Ratings):
pass
class NA_Nami_Mid_Lulu(Ratings):
pass
class NA_Nami_Mid_Lux(Ratings):
pass
class NA_Nami_Mid_Malphite(Ratings):
pass
class NA_Nami_Mid_Malzahar(Ratings):
pass
class NA_Nami_Mid_Maokai(Ratings):
pass
class NA_Nami_Mid_MasterYi(Ratings):
pass
class NA_Nami_Mid_MissFortune(Ratings):
pass
class NA_Nami_Mid_MonkeyKing(Ratings):
pass
class NA_Nami_Mid_Mordekaiser(Ratings):
pass
class NA_Nami_Mid_Morgana(Ratings):
pass
class NA_Nami_Mid_Nami(Ratings):
pass
class NA_Nami_Mid_Nasus(Ratings):
pass
class NA_Nami_Mid_Nautilus(Ratings):
pass
class NA_Nami_Mid_Nidalee(Ratings):
pass
class NA_Nami_Mid_Nocturne(Ratings):
pass
class NA_Nami_Mid_Nunu(Ratings):
pass
class NA_Nami_Mid_Olaf(Ratings):
pass
class NA_Nami_Mid_Orianna(Ratings):
pass
class NA_Nami_Mid_Ornn(Ratings):
pass
class NA_Nami_Mid_Pantheon(Ratings):
pass
class NA_Nami_Mid_Poppy(Ratings):
pass
class NA_Nami_Mid_Quinn(Ratings):
pass
class NA_Nami_Mid_Rakan(Ratings):
pass
class NA_Nami_Mid_Rammus(Ratings):
pass
class NA_Nami_Mid_RekSai(Ratings):
pass
class NA_Nami_Mid_Renekton(Ratings):
pass
class NA_Nami_Mid_Rengar(Ratings):
pass
class NA_Nami_Mid_Riven(Ratings):
pass
class NA_Nami_Mid_Rumble(Ratings):
pass
class NA_Nami_Mid_Ryze(Ratings):
pass
class NA_Nami_Mid_Sejuani(Ratings):
pass
class NA_Nami_Mid_Shaco(Ratings):
pass
class NA_Nami_Mid_Shen(Ratings):
pass
class NA_Nami_Mid_Shyvana(Ratings):
pass
class NA_Nami_Mid_Singed(Ratings):
pass
class NA_Nami_Mid_Sion(Ratings):
pass
class NA_Nami_Mid_Sivir(Ratings):
pass
class NA_Nami_Mid_Skarner(Ratings):
pass
class NA_Nami_Mid_Sona(Ratings):
pass
class NA_Nami_Mid_Soraka(Ratings):
pass
class NA_Nami_Mid_Swain(Ratings):
pass
class NA_Nami_Mid_Syndra(Ratings):
pass
class NA_Nami_Mid_TahmKench(Ratings):
pass
class NA_Nami_Mid_Taliyah(Ratings):
pass
class NA_Nami_Mid_Talon(Ratings):
pass
class NA_Nami_Mid_Taric(Ratings):
pass
class NA_Nami_Mid_Teemo(Ratings):
pass
class NA_Nami_Mid_Thresh(Ratings):
pass
class NA_Nami_Mid_Tristana(Ratings):
pass
class NA_Nami_Mid_Trundle(Ratings):
pass
class NA_Nami_Mid_Tryndamere(Ratings):
pass
class NA_Nami_Mid_TwistedFate(Ratings):
pass
class NA_Nami_Mid_Twitch(Ratings):
pass
class NA_Nami_Mid_Udyr(Ratings):
pass
class NA_Nami_Mid_Urgot(Ratings):
pass
class NA_Nami_Mid_Varus(Ratings):
pass
class NA_Nami_Mid_Vayne(Ratings):
pass
class NA_Nami_Mid_Veigar(Ratings):
pass
class NA_Nami_Mid_Velkoz(Ratings):
pass
class NA_Nami_Mid_Vi(Ratings):
pass
class NA_Nami_Mid_Viktor(Ratings):
pass
class NA_Nami_Mid_Vladimir(Ratings):
pass
class NA_Nami_Mid_Volibear(Ratings):
pass
class NA_Nami_Mid_Warwick(Ratings):
pass
class NA_Nami_Mid_Xayah(Ratings):
pass
class NA_Nami_Mid_Xerath(Ratings):
pass
class NA_Nami_Mid_XinZhao(Ratings):
pass
class NA_Nami_Mid_Yasuo(Ratings):
pass
class NA_Nami_Mid_Yorick(Ratings):
pass
class NA_Nami_Mid_Zac(Ratings):
pass
class NA_Nami_Mid_Zed(Ratings):
pass
class NA_Nami_Mid_Ziggs(Ratings):
pass
class NA_Nami_Mid_Zilean(Ratings):
pass
class NA_Nami_Mid_Zyra(Ratings):
pass
| 15.033573
| 46
| 0.75642
| 972
| 6,269
| 4.452675
| 0.151235
| 0.223198
| 0.350739
| 0.446396
| 0.791359
| 0.791359
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177221
| 6,269
| 416
| 47
| 15.069712
| 0.839085
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
9a9984c9635ff489d7b64f27c2a9e89886b4c9ea
| 327
|
py
|
Python
|
linear_attention_transformer/__init__.py
|
lucidrains/linear-attention
|
24ecf20b11a7c8ddbc15e33a30f0be0cc73b145d
|
[
"MIT"
] | 361
|
2020-06-05T00:34:35.000Z
|
2022-03-29T02:23:16.000Z
|
linear_attention_transformer/__init__.py
|
lucidrains/linear-attention
|
24ecf20b11a7c8ddbc15e33a30f0be0cc73b145d
|
[
"MIT"
] | 11
|
2020-06-14T17:32:35.000Z
|
2021-10-20T22:49:28.000Z
|
linear_attention_transformer/__init__.py
|
lucidrains/linear-attention
|
24ecf20b11a7c8ddbc15e33a30f0be0cc73b145d
|
[
"MIT"
] | 32
|
2020-06-13T22:19:27.000Z
|
2022-03-23T03:41:39.000Z
|
from linear_attention_transformer.linear_attention_transformer import LinearAttentionTransformer, LinearAttentionTransformerLM, LinformerSettings, LinformerContextSettings
from linear_attention_transformer.autoregressive_wrapper import AutoregressiveWrapper
from linear_attention_transformer.images import ImageLinearAttention
| 81.75
| 171
| 0.93578
| 27
| 327
| 11
| 0.518519
| 0.20202
| 0.350168
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045872
| 327
| 3
| 172
| 109
| 0.951923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
9ac3e6b86ea2283c43005cf28260afc9daab1e4a
| 37,725
|
py
|
Python
|
test/integration/test_projects.py
|
Violet26/cfapi
|
c5af1ea54538d20ea1458b4eaf038a7d73fd042d
|
[
"MIT"
] | 75
|
2015-01-06T14:35:37.000Z
|
2021-11-21T16:11:51.000Z
|
test/integration/test_projects.py
|
Violet26/cfapi
|
c5af1ea54538d20ea1458b4eaf038a7d73fd042d
|
[
"MIT"
] | 158
|
2015-01-05T22:42:23.000Z
|
2021-01-29T20:24:55.000Z
|
test/integration/test_projects.py
|
Violet26/cfapi
|
c5af1ea54538d20ea1458b4eaf038a7d73fd042d
|
[
"MIT"
] | 56
|
2015-01-14T01:37:20.000Z
|
2021-04-30T17:47:11.000Z
|
# -- coding: utf-8 --
import json
from datetime import datetime, timedelta
from test.factories import ProjectFactory, OrganizationFactory, IssueFactory
from test.harness import IntegrationTest
from app import db, Issue
class TestProjects(IntegrationTest):
def test_all_projects_order(self):
'''
Test that projects gets returned in order of last_updated
'''
ProjectFactory(name=u'Project 1', last_updated='Mon, 01 Jan 2010 00:00:00 GMT')
ProjectFactory(name=u'Project 2', last_updated='Tue, 01 Jan 2011 00:00:00 GMT')
ProjectFactory(name=u'Non Github Project', last_updated='Wed, 01 Jan 2013 00:00:00', github_details=None)
ProjectFactory(name=u'Project 3', last_updated='Thu, 01 Jan 2014 00:00:00 GMT')
db.session.commit()
response = self.app.get('/api/projects')
response = json.loads(response.data)
self.assertEqual(response['objects'][0]['name'], u'Project 3')
self.assertEqual(response['objects'][1]['name'], u'Non Github Project')
self.assertEqual(response['objects'][2]['name'], u'Project 2')
self.assertEqual(response['objects'][3]['name'], u'Project 1')
def test_projects(self):
ProjectFactory()
db.session.commit()
response = self.app.get('/api/projects')
response = json.loads(response.data)
assert isinstance(response, dict)
assert isinstance(response['pages'], dict)
assert isinstance(response['total'], int)
assert isinstance(response['objects'], list)
assert isinstance(response['objects'][0]['categories'], unicode)
assert isinstance(response['objects'][0]['tags'], list)
assert isinstance(response['objects'][0]['code_url'], unicode)
assert isinstance(response['objects'][0]['description'], unicode)
assert isinstance(response['objects'][0]['github_details'], dict)
assert isinstance(response['objects'][0]['id'], int)
assert isinstance(response['objects'][0]['api_url'], unicode)
assert isinstance(response['objects'][0]['link_url'], unicode)
assert isinstance(response['objects'][0]['name'], unicode)
assert isinstance(response['objects'][0]['organization'], dict)
assert isinstance(response['objects'][0]['organization_name'], unicode)
assert isinstance(response['objects'][0]['type'], unicode)
assert isinstance(response['objects'][0]['status'], unicode)
assert isinstance(response['objects'][0]['languages'], list)
def test_project_search_nonexisting_text(self):
''' Searching for non-existing text in the project and org/project
endpoints returns no results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'Coder')
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 0)
self.assertEqual(len(project_response['objects']), 0)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 0)
self.assertEqual(len(org_project_response['objects']), 0)
def test_project_search_existing_text(self):
''' Searching for existing text in the project and org/project endpoints
returns expected results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby')
ProjectFactory(organization_name=organization.name, description=u'python')
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 1)
self.assertEqual(len(project_response['objects']), 1)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 1)
self.assertEqual(len(org_project_response['objects']), 1)
def test_project_search_escaped_text(self):
''' Searching for escaped text in the project and org/project endpoints
returns expected results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'What\'s My \'District')
ProjectFactory(organization_name=organization.name, description=u'Cöde%%for%%Ameriça')
db.session.commit()
project_response = self.app.get('/api/projects?q=What\'s My \'District')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 1)
self.assertEqual(len(project_response['objects']), 1)
org_project_response = self.app.get("/api/organizations/Code-for-San-Francisco/projects?q='District")
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 1)
self.assertEqual(len(org_project_response['objects']), 1)
project_response = self.app.get('/api/projects?q=%Ameriça')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 1)
self.assertEqual(len(project_response['objects']), 1)
org_project_response = self.app.get("/api/organizations/Code-for-San-Francisco/projects?q=Cöde%")
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 1)
self.assertEqual(len(org_project_response['objects']), 1)
def test_project_search_existing_phrase(self):
''' Searching for an existing phrase in the project and org/project endpoints
returns expected results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby on rails')
ProjectFactory(organization_name=organization.name, description=u'i love lamp')
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby on rails')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 1)
self.assertEqual(len(project_response['objects']), 1)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby on rails')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 1)
self.assertEqual(len(org_project_response['objects']), 1)
def test_project_search_existing_part_of_phrase(self):
''' Searching for a partial phrase in the project and org/project endpoints
returns expected results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby on rails')
ProjectFactory(organization_name=organization.name, description=u'i love lamp')
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 1)
self.assertEqual(len(project_response['objects']), 1)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 1)
self.assertEqual(len(org_project_response['objects']), 1)
def test_project_search_nonexisting_phrase(self):
''' Searching for a term that is not part of an existing phrase in the project and
org/project endpoints returns no results
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby on rails')
db.session.commit()
project_response = self.app.get('/api/projects?q=joomla')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 0)
self.assertEqual(len(project_response['objects']), 0)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=joomla')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 0)
self.assertEqual(len(org_project_response['objects']), 0)
def test_project_search_order_by_relevance(self):
''' Search results from the project and org/project endpoints are returned
in order of relevance
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
self.assertEqual(project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
self.assertEqual(org_project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
def test_project_search_order_by_relevance_requested(self):
''' Search results from the project and org/project endpoints are returned
in order of relevance when explicitly requested
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby&sort_by=relevance')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
self.assertEqual(project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby&sort_by=relevance')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
self.assertEqual(org_project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
def test_project_search_order_by_last_updated(self):
''' Search results from the project and org/project endpoints are returned
in order of last_updated, if requested
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby&sort_by=last_updated')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
self.assertEqual(project_response['objects'][0]['description'], 'ruby')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby&sort_by=last_updated')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
self.assertEqual(org_project_response['objects'][0]['description'], 'ruby')
def test_project_search_order_by_last_updated_sort_desc(self):
''' Search results from the project and org/project endpoints are returned
in descending order of last_updated, if requested
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby&sort_by=last_updated&sort_dir=desc')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
self.assertEqual(project_response['objects'][0]['description'], 'ruby')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby&sort_by=last_updated&sort_dir=desc')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
self.assertEqual(org_project_response['objects'][0]['description'], 'ruby')
def test_project_search_order_by_last_updated_sort_asc(self):
''' Search results from the project and org/project endpoints are returned
in ascending order of last_updated, if requested
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=ruby&sort_by=last_updated&sort_dir=asc')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
self.assertEqual(project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby&sort_by=last_updated&sort_dir=asc')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
self.assertEqual(org_project_response['objects'][0]['description'], 'ruby ruby ruby ruby ruby')
def test_project_search_ranked_order(self):
''' Search results from the project and org/project endpoints are returned
with correct ranking values
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, status=u'TEST', last_updated=datetime.now() - timedelta(10000))
ProjectFactory(organization_name=organization.name, description=u'testing a new thing', last_updated=datetime.now() - timedelta(1))
ProjectFactory(organization_name=organization.name, tags=[u'test,tags,what,ever'], last_updated=datetime.now() - timedelta(100))
ProjectFactory(organization_name=organization.name, last_updated=datetime.now())
db.session.commit()
project_response = self.app.get('/api/projects?q=TEST')
project_response = json.loads(project_response.data)
self.assertEqual(project_response['total'], 3)
self.assertEqual(project_response['objects'][0]['status'], u'TEST')
self.assertEqual(project_response['objects'][1]['tags'], [u'test,tags,what,ever'])
self.assertEqual(project_response['objects'][2]['description'], u'testing a new thing')
def test_project_return_only_ids(self):
''' Search results from the project and org/project endpoints are returned
as only IDs if requested
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
project_one = ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
project_two = ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_one_id = project_one.id
project_two_id = project_two.id
project_response = self.app.get('/api/projects?q=ruby&only_ids=true')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(len(project_response["objects"]), 2)
assert isinstance(project_response['objects'][0], int)
assert isinstance(project_response['objects'][1], int)
self.assertEqual(project_response['objects'][0], project_one_id)
self.assertEqual(project_response['objects'][1], project_two_id)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=ruby&only_ids=true')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(len(org_project_response["objects"]), 2)
assert isinstance(org_project_response['objects'][0], int)
assert isinstance(org_project_response['objects'][1], int)
self.assertEqual(org_project_response['objects'][0], project_one_id)
self.assertEqual(org_project_response['objects'][1], project_two_id)
def test_project_search_empty_string(self):
''' Searching an empty string on the project and org/project endpoints returns all projects
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=')
project_response = json.loads(project_response.data)
assert isinstance(project_response['total'], int)
assert isinstance(project_response['objects'], list)
self.assertEqual(project_response['total'], 2)
self.assertEqual(len(project_response['objects']), 2)
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=')
org_project_response = json.loads(org_project_response.data)
assert isinstance(org_project_response['total'], int)
assert isinstance(org_project_response['objects'], list)
self.assertEqual(org_project_response['total'], 2)
self.assertEqual(len(org_project_response['objects']), 2)
def test_project_search_tsv_body_not_in_response(self):
''' The tsv_body field is not in the response from the project and org/project endpoints
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, description=u'ruby ruby ruby ruby ruby', last_updated=datetime.now() - timedelta(10))
ProjectFactory(organization_name=organization.name, description=u'ruby', last_updated=datetime.now() - timedelta(1))
db.session.commit()
project_response = self.app.get('/api/projects?q=')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 2)
self.assertFalse('tsv_body' in project_response['objects'][0])
self.assertFalse('tsv_body' in project_response['objects'][1])
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=')
org_project_response = json.loads(org_project_response.data)
self.assertEqual(len(org_project_response['objects']), 2)
self.assertFalse('tsv_body' in org_project_response['objects'][0])
self.assertFalse('tsv_body' in org_project_response['objects'][1])
def test_project_orgs_dont_include_tsv(self):
OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=u"Code for San Francisco")
db.session.commit()
response = self.app.get('/api/projects')
response = json.loads(response.data)
self.assertFalse('tsv_body' in response['objects'][0]['organization'])
def test_project_search_includes_status(self):
''' The status field is included in search results from the project and org/project endpoints
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, status=u'Beta')
ProjectFactory(organization_name=organization.name, status=u'Alpha')
db.session.commit()
project_response = self.app.get('/api/projects?q=alpha')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 1)
self.assertEqual(project_response['objects'][0]['status'], 'Alpha')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=alpha')
org_project_response = json.loads(org_project_response.data)
self.assertEqual(len(org_project_response['objects']), 1)
self.assertEqual(org_project_response['objects'][0]['status'], 'Alpha')
def test_project_search_includes_name(self):
''' The name field is included in search results from the project and org/project endpoints
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, name=u'My Cool Project')
ProjectFactory(organization_name=organization.name, name=u'My Dumb Project')
db.session.commit()
project_response = self.app.get('/api/projects?q=cool')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 1)
self.assertEqual(project_response['objects'][0]['name'], 'My Cool Project')
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=cool')
org_project_response = json.loads(org_project_response.data)
self.assertEqual(len(org_project_response['objects']), 1)
self.assertEqual(org_project_response['objects'][0]['name'], 'My Cool Project')
def test_project_search_includes_tags(self):
'''
The tags field is included in search results from the project and org/project endpoints
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, tags=['mapping', 'philly'])
ProjectFactory(organization_name=organization.name, tags=['food stamps', 'health'])
db.session.commit()
project_response = self.app.get('/api/projects?q=stamps')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 1)
self.assertEqual(project_response['objects'][0]['tags'], ['food stamps', 'health'])
org_project_response = self.app.get('/api/organizations/Code-for-San-Francisco/projects?q=stamps')
org_project_response = json.loads(org_project_response.data)
self.assertEqual(len(org_project_response['objects']), 1)
self.assertEqual(org_project_response['objects'][0]['tags'], ['food stamps', 'health'])
def test_project_search_includes_organization_name(self):
'''
The organization name is included in the project search
'''
organization = OrganizationFactory(name=u"Code for San Francisco")
ProjectFactory(organization_name=organization.name, name=u"Project One")
ProjectFactory(organization_name=organization.name, name=u"Project Two", description=u"America")
organization = OrganizationFactory(name=u"Code for America")
ProjectFactory(organization_name=organization.name, name=u"Project Three")
ProjectFactory(organization_name=organization.name, name=u"Project Four", tags=u"San Francisco")
db.session.commit()
# Test that org_name matches return before project name
project_response = self.app.get('/api/projects?q=Code+for+San+Francisco')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 3)
self.assertEqual(project_response['objects'][0]['name'], u'Project One')
self.assertEqual(project_response['objects'][1]['name'], u'Project Two')
self.assertEqual(project_response['objects'][2]['name'], u'Project Four')
self.assertTrue('San Francisco' in project_response['objects'][2]['tags'])
# Test that org name matches return before project description
project_response = self.app.get('/api/projects?q=Code for America')
project_response = json.loads(project_response.data)
self.assertEqual(len(project_response['objects']), 3)
self.assertEqual(project_response['objects'][0]['name'], u'Project Three')
self.assertEqual(project_response['objects'][1]['name'], u'Project Four')
self.assertEqual(project_response['objects'][2]['name'], u'Project Two')
self.assertEqual(project_response['objects'][2]['description'], u'America')
def test_project_organzation_type_filter(self):
'''
Test searching for projects from certain types of organizations.
'''
brigade = OrganizationFactory(name=u'Brigade Org', type=u'Brigade, midwest')
code_for_all = OrganizationFactory(name=u'Code for All Org', type=u'Code for All')
gov_org = OrganizationFactory(name=u'Gov Org', type=u'Government')
brigade_project = ProjectFactory(name=u'Today Brigade project', organization_name=brigade.name)
code_for_all_project = ProjectFactory(name=u'Yesterday Code for All project', organization_name=code_for_all.name, last_updated=datetime.now() - timedelta(days=1))
gov_project = ProjectFactory(name=u'Two days ago Gov project', organization_name=gov_org.name, last_updated=datetime.now() - timedelta(days=2))
brigade_project2 = ProjectFactory(name=u'Three days ago Brigade project', organization_name=brigade.name, last_updated=datetime.now() - timedelta(days=3))
code_for_all_project2 = ProjectFactory(name=u'Four days ago Code for All project', organization_name=code_for_all.name, last_updated=datetime.now() - timedelta(days=4))
gov_project2 = ProjectFactory(name=u'Five days ago Gov project', organization_name=gov_org.name, last_updated=datetime.now() - timedelta(days=5))
db.session.add(brigade_project)
db.session.add(code_for_all_project)
db.session.add(gov_project)
db.session.add(brigade_project2)
db.session.add(code_for_all_project2)
db.session.add(gov_project2)
db.session.commit()
# Test they return in order of last_updated
response = self.app.get('/api/projects')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 6)
self.assertEqual(response['objects'][0]['name'], 'Today Brigade project')
self.assertEqual(response['objects'][1]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][2]['name'], 'Two days ago Gov project')
self.assertEqual(response['objects'][3]['name'], 'Three days ago Brigade project')
self.assertEqual(response['objects'][4]['name'], 'Four days ago Code for All project')
self.assertEqual(response['objects'][5]['name'], 'Five days ago Gov project')
# Test they return in order of last_updated, no matter the search order
response = self.app.get('/api/projects?organization_type=Government,Code+for+All,Brigade')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 6)
self.assertEqual(response['objects'][0]['name'], 'Today Brigade project')
self.assertEqual(response['objects'][1]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][2]['name'], 'Two days ago Gov project')
self.assertEqual(response['objects'][3]['name'], 'Three days ago Brigade project')
self.assertEqual(response['objects'][4]['name'], 'Four days ago Code for All project')
self.assertEqual(response['objects'][5]['name'], 'Five days ago Gov project')
response = self.app.get('/api/projects?organization_type=Brigade,Code+for+All')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 4)
self.assertEqual(response['objects'][0]['name'], 'Today Brigade project')
self.assertEqual(response['objects'][1]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][2]['name'], 'Three days ago Brigade project')
self.assertEqual(response['objects'][3]['name'], 'Four days ago Code for All project')
# # Different order, same results
response = self.app.get('/api/projects?organization_type=Code+for+All,Brigade')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 4)
self.assertEqual(response['objects'][0]['name'], 'Today Brigade project')
self.assertEqual(response['objects'][1]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][2]['name'], 'Three days ago Brigade project')
self.assertEqual(response['objects'][3]['name'], 'Four days ago Code for All project')
response = self.app.get('/api/projects?organization_type=Code+for+All,Government')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 4)
self.assertEqual(response['objects'][0]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][1]['name'], 'Two days ago Gov project')
self.assertEqual(response['objects'][2]['name'], 'Four days ago Code for All project')
self.assertEqual(response['objects'][3]['name'], 'Five days ago Gov project')
# # Different order, same results
response = self.app.get('/api/projects?organization_type=Government,Code+for+All')
self.assertEqual(response.status_code, 200)
response = json.loads(response.data)
self.assertEqual(response['total'], 4)
self.assertEqual(response['objects'][0]['name'], 'Yesterday Code for All project')
self.assertEqual(response['objects'][1]['name'], 'Two days ago Gov project')
self.assertEqual(response['objects'][2]['name'], 'Four days ago Code for All project')
self.assertEqual(response['objects'][3]['name'], 'Five days ago Gov project')
def test_project_cascading_deletes(self):
''' Test that issues get deleted when their parent
project and org is deleted
'''
# set up test objects and delete a project
organization = OrganizationFactory(name=u'TEST ORG')
db.session.flush()
project = ProjectFactory(organization_name=organization.name, name=u'TEST PROJECT')
db.session.flush()
issue = IssueFactory(title=u'TEST ISSUE', project_id=project.id)
another_issue = IssueFactory(title=u'ANOTHER TEST ISSUE', project_id=project.id)
a_third_issue = IssueFactory(title=u'A THIRD TEST ISSUE', project_id=project.id)
db.session.commit()
# make sure the issues are in the db
issues = db.session.query(Issue).all()
self.assertTrue(len(issues) == 3)
db.session.execute('DELETE FROM project')
db.session.commit()
issues = db.session.query(Issue).all()
self.assertFalse(len(issues))
# delete an organization
project = ProjectFactory(organization_name=organization.name, name=u'TEST PROJECT')
db.session.flush()
issue = IssueFactory(title=u'TEST ISSUE', project_id=project.id)
another_issue = IssueFactory(title=u'ANOTHER TEST ISSUE', project_id=project.id)
a_third_issue = IssueFactory(title=u'A THIRD TEST ISSUE', project_id=project.id)
db.session.add(issue)
db.session.add(another_issue)
db.session.add(a_third_issue)
db.session.commit()
# make sure the issues are in the db
issues = db.session.query(Issue).all()
self.assertTrue(len(issues) == 3)
db.session.execute('DELETE FROM organization')
db.session.commit()
issues = db.session.query(Issue).all()
self.assertFalse(len(issues))
def test_include_issues(self):
""" Test the include_issues flag """
project = ProjectFactory()
db.session.commit()
IssueFactory(project_id=project.id)
db.session.commit()
got = self.app.get("/api/projects?include_issues=True")
project = json.loads(got.data)['objects'][0]
self.assertTrue(isinstance(project['issues'], list))
got = self.app.get("/api/projects?include_issues=False")
project = json.loads(got.data)['objects'][0]
self.assertFalse(isinstance(project['issues'], list))
self.assertEqual("http://localhost/api/projects/1/issues", project["issues"])
got = self.app.get("/api/projects")
project = json.loads(got.data)['objects'][0]
self.assertFalse(isinstance(project['issues'], list))
self.assertEqual("http://localhost/api/projects/1/issues", project["issues"])
| 58.945313
| 176
| 0.700305
| 4,581
| 37,725
| 5.615586
| 0.050207
| 0.156851
| 0.085364
| 0.025773
| 0.899631
| 0.874908
| 0.835024
| 0.794869
| 0.765598
| 0.743635
| 0
| 0.008702
| 0.1745
| 37,725
| 639
| 177
| 59.037559
| 0.817353
| 0.0677
| 0
| 0.641548
| 0
| 0.010183
| 0.197911
| 0.053813
| 0
| 0
| 0
| 0
| 0.452138
| 1
| 0.050917
| false
| 0
| 0.010183
| 0
| 0.063136
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ac67cfd6a47123a1d106e8ab5c77557462c64ff
| 181
|
py
|
Python
|
tests/unit/test_utils/tests_example_datasets/test_example_get_games.py
|
RelevanceAI/RelevanceAI
|
a0542f35153d9c842f3d2cd0955d6b07f6dfc07b
|
[
"Apache-2.0"
] | 21
|
2021-11-23T13:01:36.000Z
|
2022-03-23T03:45:30.000Z
|
tests/unit/test_utils/tests_example_datasets/test_example_get_games.py
|
RelevanceAI/RelevanceAI
|
a0542f35153d9c842f3d2cd0955d6b07f6dfc07b
|
[
"Apache-2.0"
] | 217
|
2021-11-23T00:11:01.000Z
|
2022-03-30T08:11:49.000Z
|
tests/unit/test_utils/tests_example_datasets/test_example_get_games.py
|
RelevanceAI/RelevanceAI
|
a0542f35153d9c842f3d2cd0955d6b07f6dfc07b
|
[
"Apache-2.0"
] | 4
|
2022-01-04T01:48:30.000Z
|
2022-02-11T03:19:32.000Z
|
import pytest
def test_get_games_dataset_subset():
from relevanceai.utils.datasets import get_games_dataset
assert len(get_games_dataset(number_of_documents=100)) == 100
| 22.625
| 65
| 0.80663
| 26
| 181
| 5.230769
| 0.692308
| 0.176471
| 0.330882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037975
| 0.127072
| 181
| 7
| 66
| 25.857143
| 0.822785
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b11b2411f73efd25fb3f22151db8fafae90d2c78
| 149
|
py
|
Python
|
deepmath/base_classes/__init__.py
|
mathraim/deepmath
|
1f0a75b26763d62b1d0cdcf1355cf7218ddcb09a
|
[
"MIT"
] | 2
|
2019-05-18T22:47:38.000Z
|
2019-05-19T00:27:13.000Z
|
deepmath/base_classes/__init__.py
|
mathraim/deepmath
|
1f0a75b26763d62b1d0cdcf1355cf7218ddcb09a
|
[
"MIT"
] | null | null | null |
deepmath/base_classes/__init__.py
|
mathraim/deepmath
|
1f0a75b26763d62b1d0cdcf1355cf7218ddcb09a
|
[
"MIT"
] | null | null | null |
from deepmath.base_classes.layer import Layer
from deepmath.base_classes.network import Network
from deepmath.base_classes.optimizer import Optimizer
| 49.666667
| 53
| 0.885906
| 21
| 149
| 6.142857
| 0.380952
| 0.27907
| 0.372093
| 0.534884
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073826
| 149
| 3
| 53
| 49.666667
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b163a81f4fabfeaa2e0f8b6ed361d817321e22cd
| 133
|
py
|
Python
|
python/shared_global/main.py
|
mbr0wn/snippets
|
f8cc86d73cf3373348135be26c6478798a639a21
|
[
"WTFPL"
] | null | null | null |
python/shared_global/main.py
|
mbr0wn/snippets
|
f8cc86d73cf3373348135be26c6478798a639a21
|
[
"WTFPL"
] | null | null | null |
python/shared_global/main.py
|
mbr0wn/snippets
|
f8cc86d73cf3373348135be26c6478798a639a21
|
[
"WTFPL"
] | null | null | null |
import shared_mod
import mod1
print "mod1: bar == ", shared_mod.f.bar
shared_mod.f.bar = 7
print "mod1: bar == ", shared_mod.f.bar
| 16.625
| 39
| 0.699248
| 24
| 133
| 3.708333
| 0.333333
| 0.404494
| 0.404494
| 0.438202
| 0.707865
| 0.561798
| 0.561798
| 0
| 0
| 0
| 0
| 0.035714
| 0.157895
| 133
| 7
| 40
| 19
| 0.758929
| 0
| 0
| 0.4
| 0
| 0
| 0.195489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.4
| null | null | 0.4
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
493a16c5ba6adf27c833360189db84331a67a7c8
| 4,321
|
py
|
Python
|
tests/test_node_wrapping.py
|
StevenCostiou/reflectivipy
|
750ed93cfb463304958e590d895c76169caa4b98
|
[
"MIT"
] | 10
|
2019-01-18T17:45:18.000Z
|
2019-10-05T08:58:17.000Z
|
tests/test_node_wrapping.py
|
StevenCostiou/reflectivipy
|
750ed93cfb463304958e590d895c76169caa4b98
|
[
"MIT"
] | null | null | null |
tests/test_node_wrapping.py
|
StevenCostiou/reflectivipy
|
750ed93cfb463304958e590d895c76169caa4b98
|
[
"MIT"
] | null | null | null |
import pytest
import ast
from .ReflectivityExample import *
import reflectivipy
from reflectivipy import MetaLink
@pytest.fixture(autouse=True)
def setup():
reflectivipy.uninstall_all()
def test_wrap_expr():
node = expr_sample_node()
assert type(node) is ast.Expr
transformation = node.wrapper.flat_wrap()
assert len(transformation) == 1
assert transformation[0] is node
link = MetaLink(ReflectivityExample(), 'tag_exec_', 'before', [])
node.links.append(link)
assert type(node) is ast.Expr
transformation = node.wrapper.flat_wrap()
assert len(transformation) == 1
assert transformation[0] is node
def test_wrap_call():
node = call_sample_node().value
assert type(node) is ast.Call
transformation = node.wrapper.flat_wrap()
assert len(transformation) == 1
assert transformation[0] is node
link = MetaLink(ReflectivityExample(), 'tag_exec_', 'before', [])
node.links.append(link)
assert type(node) is ast.Call
transformation = node.wrapper.flat_wrap()
assert len(transformation) == 4
assert type(transformation[0]) is ast.Assign
assert transformation[0].value is node.args[0]
assert transformation[0] is not node
assert len(transformation[3].value.args) == 1
assert type(transformation[3].value.args[0]) is ast.Name
assert transformation[3].value.args[0].id is node.args[0].temp_name
assert transformation[3].value.func.value.id is node.func.value.temp_name
def test_wrap_call_in_assign():
node = method_with_args_sample_node().body[0].body[0]
link = MetaLink(ReflectivityExample(), 'tag_exec_', 'before', [])
node.value.links.append(link)
assert type(node) is ast.Assign
assert type(node.value) is ast.Call
transformation = node.wrapper.flat_wrap()
assert len(transformation) == 5
assert type(transformation[0]) == ast.Assign
assert transformation[0].value is node.value.args[0]
assert type(transformation[1]) is ast.Assign
assert transformation[1].value.id == 'self'
assert transformation[0] is not node
assert len(transformation[3].value.args) == 1
assert type(transformation[3].value.args[0]) is ast.Name
assert transformation[3].value.args[0].id is node.value.args[0].temp_name
assert transformation[3].value.func.value.id is node.value.func.value.temp_name
def test_wrap_complex_expr_call():
node = complex_expr_call_sample_node()
link = MetaLink(ReflectivityExample(), 'tag_exec_', 'before', [])
node.value.args[0].links.append(link)
transformation = node.wrapper.flat_wrap()
assert len(transformation) == 6
assert type(transformation[3]) is ast.Assign
assert transformation[3].value.rf_id is node.value.args[0].rf_id
assert transformation[3] is not node
def test_call_receiver_flattening():
node = call_with_complex_receiver_sample_node()
link = MetaLink(ReflectivityExample(), 'tag_exec_', 'before', [])
node.value.links.append(link)
transformation = node.wrapper.flat_wrap()
assert len(transformation) == 4
assert transformation[1].value.func.value.id == 'self'
assert transformation[3].value.func.value.id == transformation[1].targets[0].id
def test_call_flattening():
node = call_with_complex_receiver_sample_node()
link = MetaLink(ReflectivityExample(), 'tag_exec_', 'before', [])
node.value.func.value.links.append(link)
transformation = node.wrapper.flat_wrap()
assert len(transformation) == 5
assert transformation[1].value.id == 'self'
assert transformation[3].value.func.value.id == transformation[1].targets[0].id
assert transformation[4].value.func.value.id == transformation[3].targets[0].id
def test_wrap_assign():
node = sample_node()
assert type(node) is ast.Assign
transformation = node.wrapper.flat_wrap()
assert len(transformation) == 1
assert transformation[0] is node
link = MetaLink(ReflectivityExample(), 'tag_exec_', 'before', [])
node.links.append(link)
assert type(node) is ast.Assign
transformation = node.wrapper.flat_wrap()
assert len(transformation) == 3
assert type(transformation[0]) is ast.Assign
assert transformation[0].value is node.value
assert transformation[0] is not node
def test_flatten_children():
pass
| 31.311594
| 83
| 0.715112
| 578
| 4,321
| 5.216263
| 0.103806
| 0.145937
| 0.091542
| 0.096186
| 0.822222
| 0.781095
| 0.763516
| 0.738308
| 0.72272
| 0.697512
| 0
| 0.01686
| 0.162694
| 4,321
| 137
| 84
| 31.540146
| 0.816473
| 0
| 0
| 0.57732
| 0
| 0
| 0.027077
| 0
| 0
| 0
| 0
| 0
| 0.505155
| 1
| 0.092784
| false
| 0.010309
| 0.051546
| 0
| 0.14433
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
493dfa9daf12b9dbaa44646b218a2be60c66ee2d
| 11,524
|
py
|
Python
|
stubs.min/System/Security/AccessControl_parts/DirectoryObjectSecurity.py
|
ricardyn/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | 1
|
2021-02-02T13:39:16.000Z
|
2021-02-02T13:39:16.000Z
|
stubs.min/System/Security/AccessControl_parts/DirectoryObjectSecurity.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
stubs.min/System/Security/AccessControl_parts/DirectoryObjectSecurity.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
class DirectoryObjectSecurity(ObjectSecurity):
""" Provides the ability to control access to directory objects without direct manipulation of Access Control Lists (ACLs). """
def AccessRuleFactory(self,identityReference,accessMask,isInherited,inheritanceFlags,propagationFlags,type,objectType=None,inheritedObjectType=None):
"""
AccessRuleFactory(self: DirectoryObjectSecurity,identityReference: IdentityReference,accessMask: int,isInherited: bool,inheritanceFlags: InheritanceFlags,propagationFlags: PropagationFlags,type: AccessControlType,objectType: Guid,inheritedObjectType: Guid) -> AccessRule
Initializes a new instance of the System.Security.AccessControl.AccessRule
class with the specified values.
identityReference: The identity to which the access rule applies. It must be an object that can
be cast as a System.Security.Principal.SecurityIdentifier.
accessMask: The access mask of this rule. The access mask is a 32-bit collection of
anonymous bits,the meaning of which is defined by the individual integrators.
isInherited: true if this rule is inherited from a parent container.
inheritanceFlags: Specifies the inheritance properties of the access rule.
propagationFlags: Specifies whether inherited access rules are automatically propagated. The
propagation flags are ignored if inheritanceFlags is set to
System.Security.AccessControl.InheritanceFlags.None.
type: Specifies the valid access control type.
objectType: The identity of the class of objects to which the new access rule applies.
inheritedObjectType: The identity of the class of child objects which can inherit the new access
rule.
Returns: The System.Security.AccessControl.AccessRule object that this method creates.
"""
pass
def AddAccessRule(self,*args):
"""
AddAccessRule(self: DirectoryObjectSecurity,rule: ObjectAccessRule)
Adds the specified access rule to the Discretionary Access Control List (DACL)
associated with this System.Security.AccessControl.DirectoryObjectSecurity
object.
rule: The access rule to add.
"""
pass
def AddAuditRule(self,*args):
"""
AddAuditRule(self: DirectoryObjectSecurity,rule: ObjectAuditRule)
Adds the specified audit rule to the System Access Control List (SACL)
associated with this System.Security.AccessControl.DirectoryObjectSecurity
object.
rule: The audit rule to add.
"""
pass
def AuditRuleFactory(self,identityReference,accessMask,isInherited,inheritanceFlags,propagationFlags,flags,objectType=None,inheritedObjectType=None):
"""
AuditRuleFactory(self: DirectoryObjectSecurity,identityReference: IdentityReference,accessMask: int,isInherited: bool,inheritanceFlags: InheritanceFlags,propagationFlags: PropagationFlags,flags: AuditFlags,objectType: Guid,inheritedObjectType: Guid) -> AuditRule
Initializes a new instance of the System.Security.AccessControl.AuditRule class
with the specified values.
identityReference: The identity to which the audit rule applies. It must be an object that can be
cast as a System.Security.Principal.SecurityIdentifier.
accessMask: The access mask of this rule. The access mask is a 32-bit collection of
anonymous bits,the meaning of which is defined by the individual integrators.
isInherited: true if this rule is inherited from a parent container.
inheritanceFlags: Specifies the inheritance properties of the audit rule.
propagationFlags: Specifies whether inherited audit rules are automatically propagated. The
propagation flags are ignored if inheritanceFlags is set to
System.Security.AccessControl.InheritanceFlags.None.
flags: Specifies the conditions for which the rule is audited.
objectType: The identity of the class of objects to which the new audit rule applies.
inheritedObjectType: The identity of the class of child objects which can inherit the new audit rule.
Returns: The System.Security.AccessControl.AuditRule object that this method creates.
"""
pass
def GetAccessRules(self,includeExplicit,includeInherited,targetType):
"""
GetAccessRules(self: DirectoryObjectSecurity,includeExplicit: bool,includeInherited: bool,targetType: Type) -> AuthorizationRuleCollection
Gets a collection of the access rules associated with the specified security
identifier.
includeExplicit: true to include access rules explicitly set for the object.
includeInherited: true to include inherited access rules.
targetType: The security identifier for which to retrieve access rules. This must be an
object that can be cast as a System.Security.Principal.SecurityIdentifier
object.
Returns: The collection of access rules associated with the specified
System.Security.Principal.SecurityIdentifier object.
"""
pass
def GetAuditRules(self,includeExplicit,includeInherited,targetType):
"""
GetAuditRules(self: DirectoryObjectSecurity,includeExplicit: bool,includeInherited: bool,targetType: Type) -> AuthorizationRuleCollection
Gets a collection of the audit rules associated with the specified security
identifier.
includeExplicit: true to include audit rules explicitly set for the object.
includeInherited: true to include inherited audit rules.
targetType: The security identifier for which to retrieve audit rules. This must be an
object that can be cast as a System.Security.Principal.SecurityIdentifier
object.
Returns: The collection of audit rules associated with the specified
System.Security.Principal.SecurityIdentifier object.
"""
pass
def RemoveAccessRule(self,*args):
"""
RemoveAccessRule(self: DirectoryObjectSecurity,rule: ObjectAccessRule) -> bool
Removes access rules that contain the same security identifier and access mask
as the specified access rule from the Discretionary Access Control List (DACL)
associated with this System.Security.AccessControl.DirectoryObjectSecurity
object.
rule: The access rule to remove.
Returns: true if the access rule was successfully removed; otherwise,false.
"""
pass
def RemoveAccessRuleAll(self,*args):
"""
RemoveAccessRuleAll(self: DirectoryObjectSecurity,rule: ObjectAccessRule)
Removes all access rules that have the same security identifier as the
specified access rule from the Discretionary Access Control List (DACL)
associated with this System.Security.AccessControl.DirectoryObjectSecurity
object.
rule: The access rule to remove.
"""
pass
def RemoveAccessRuleSpecific(self,*args):
"""
RemoveAccessRuleSpecific(self: DirectoryObjectSecurity,rule: ObjectAccessRule)
Removes all access rules that exactly match the specified access rule from the
Discretionary Access Control List (DACL) associated with this
System.Security.AccessControl.DirectoryObjectSecurity object.
rule: The access rule to remove.
"""
pass
def RemoveAuditRule(self,*args):
"""
RemoveAuditRule(self: DirectoryObjectSecurity,rule: ObjectAuditRule) -> bool
Removes audit rules that contain the same security identifier and access mask
as the specified audit rule from the System Access Control List (SACL)
associated with this System.Security.AccessControl.CommonObjectSecurity object.
rule: The audit rule to remove.
Returns: true if the audit rule was successfully removed; otherwise,false.
"""
pass
def RemoveAuditRuleAll(self,*args):
"""
RemoveAuditRuleAll(self: DirectoryObjectSecurity,rule: ObjectAuditRule)
Removes all audit rules that have the same security identifier as the specified
audit rule from the System Access Control List (SACL) associated with this
System.Security.AccessControl.DirectoryObjectSecurity object.
rule: The audit rule to remove.
"""
pass
def RemoveAuditRuleSpecific(self,*args):
"""
RemoveAuditRuleSpecific(self: DirectoryObjectSecurity,rule: ObjectAuditRule)
Removes all audit rules that exactly match the specified audit rule from the
System Access Control List (SACL) associated with this
System.Security.AccessControl.DirectoryObjectSecurity object.
rule: The audit rule to remove.
"""
pass
def ResetAccessRule(self,*args):
"""
ResetAccessRule(self: DirectoryObjectSecurity,rule: ObjectAccessRule)
Removes all access rules in the Discretionary Access Control List (DACL)
associated with this System.Security.AccessControl.DirectoryObjectSecurity
object and then adds the specified access rule.
rule: The access rule to reset.
"""
pass
def SetAccessRule(self,*args):
"""
SetAccessRule(self: DirectoryObjectSecurity,rule: ObjectAccessRule)
Removes all access rules that contain the same security identifier and
qualifier as the specified access rule in the Discretionary Access Control List
(DACL) associated with this
System.Security.AccessControl.DirectoryObjectSecurity object and then adds the
specified access rule.
rule: The access rule to set.
"""
pass
def SetAuditRule(self,*args):
"""
SetAuditRule(self: DirectoryObjectSecurity,rule: ObjectAuditRule)
Removes all audit rules that contain the same security identifier and qualifier
as the specified audit rule in the System Access Control List (SACL) associated
with this System.Security.AccessControl.DirectoryObjectSecurity object and then
adds the specified audit rule.
rule: The audit rule to set.
"""
pass
@staticmethod
def __new__(self,*args): #cannot find CLR constructor
"""
__new__(cls: type)
__new__(cls: type,securityDescriptor: CommonSecurityDescriptor)
"""
pass
AccessRulesModified=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a Boolean value that specifies whether the access rules associated with this System.Security.AccessControl.ObjectSecurity object have been modified.
"""
AuditRulesModified=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a Boolean value that specifies whether the audit rules associated with this System.Security.AccessControl.ObjectSecurity object have been modified.
"""
GroupModified=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a Boolean value that specifies whether the group associated with the securable object has been modified.
"""
IsContainer=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a Boolean value that specifies whether this System.Security.AccessControl.ObjectSecurity object is a container object.
"""
IsDS=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a Boolean value that specifies whether this System.Security.AccessControl.ObjectSecurity object is a directory object.
"""
OwnerModified=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a Boolean value that specifies whether the owner of the securable object has been modified.
"""
| 44.153257
| 274
| 0.746789
| 1,316
| 11,524
| 6.530395
| 0.138298
| 0.043984
| 0.065976
| 0.054108
| 0.800326
| 0.770538
| 0.730742
| 0.715616
| 0.696765
| 0.624738
| 0
| 0.000432
| 0.195852
| 11,524
| 260
| 275
| 44.323077
| 0.926945
| 0.722753
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0
| 0
| 0.575
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b8e262c7a2c4af2a5374ec6767adf09353366f0c
| 5,012
|
py
|
Python
|
tests/ad/profiles/test_profiles_api.py
|
Rogdham/pyTenable
|
79f3f7360f8ef31b964f1db99d0c7b8a0bc25d7a
|
[
"MIT"
] | 1
|
2022-03-01T17:17:19.000Z
|
2022-03-01T17:17:19.000Z
|
tests/ad/profiles/test_profiles_api.py
|
Rogdham/pyTenable
|
79f3f7360f8ef31b964f1db99d0c7b8a0bc25d7a
|
[
"MIT"
] | 25
|
2021-11-16T18:41:36.000Z
|
2022-03-25T05:43:31.000Z
|
tests/ad/profiles/test_profiles_api.py
|
Rogdham/pyTenable
|
79f3f7360f8ef31b964f1db99d0c7b8a0bc25d7a
|
[
"MIT"
] | 2
|
2022-03-02T12:24:40.000Z
|
2022-03-29T05:12:04.000Z
|
import responses
from tests.ad.conftest import RE_BASE
@responses.activate
def test_profiles_list(api):
responses.add(responses.GET,
f'{RE_BASE}/profiles',
json=[{
'id': 1,
'name': 'profile name',
'deleted': False,
'directories': [1, 2],
'dirty': True,
'hasEverBeenCommitted': True
}]
)
resp = api.profiles.list()
assert isinstance(resp, list)
assert len(resp) == 1
assert resp[0]['id'] == 1
assert resp[0]['name'] == 'profile name'
assert resp[0]['deleted'] is False
assert resp[0]['directories'] == [1, 2]
assert resp[0]['dirty'] is True
assert resp[0]['has_ever_been_committed'] is True
@responses.activate
def test_profiles_create(api):
responses.add(responses.POST,
f'{RE_BASE}/profiles',
json=[{
'id': 1,
'name': 'profile name',
'deleted': False,
'directories': [1, 2],
'dirty': True,
'hasEverBeenCommitted': True
}]
)
resp = api.profiles.create(name='profile name',
directories=[1, 2])
assert isinstance(resp, list)
assert len(resp) == 1
assert resp[0]['id'] == 1
assert resp[0]['name'] == 'profile name'
assert resp[0]['deleted'] is False
assert resp[0]['directories'] == [1, 2]
assert resp[0]['dirty'] is True
assert resp[0]['has_ever_been_committed'] is True
@responses.activate
def test_profiles_details(api):
responses.add(responses.GET,
f'{RE_BASE}/profiles/1',
json={
'id': 1,
'name': 'profile name',
'deleted': False,
'directories': [1, 2],
'dirty': True,
'hasEverBeenCommitted': True
}
)
resp = api.profiles.details(profile_id='1')
assert isinstance(resp, dict)
assert resp['id'] == 1
assert resp['name'] == 'profile name'
assert resp['deleted'] is False
assert resp['directories'] == [1, 2]
assert resp['dirty'] is True
assert resp['has_ever_been_committed'] is True
@responses.activate
def test_profiles_update(api):
responses.add(responses.PATCH,
f'{RE_BASE}/profiles/1',
json={
'id': 1,
'name': 'profile name',
'deleted': False,
'directories': [1, 2],
'dirty': True,
'hasEverBeenCommitted': True
}
)
resp = api.profiles.update(profile_id='1',
name='profile name',
deleted=True,
directories=[1, 2])
assert isinstance(resp, dict)
assert resp['id'] == 1
assert resp['name'] == 'profile name'
assert resp['deleted'] is False
assert resp['directories'] == [1, 2]
assert resp['dirty'] is True
assert resp['has_ever_been_committed'] is True
@responses.activate
def test_profiles_delete(api):
responses.add(responses.DELETE,
f'{RE_BASE}/profiles/1',
json=None
)
resp = api.profiles.delete(profile_id='1')
assert resp is None
@responses.activate
def test_profiles_copy_profile(api):
responses.add(responses.POST,
f'{RE_BASE}/profiles/from/1',
json={
'id': 1,
'name': 'copied profile',
'deleted': False,
'directories': [1, 2],
'dirty': True,
'hasEverBeenCommitted': True
}
)
resp = api.profiles.copy_profile(from_id='1',
name='copied profile',
directories=[1, 2])
assert isinstance(resp, dict)
assert resp['id'] == 1
assert resp['name'] == 'copied profile'
assert resp['deleted'] is False
assert resp['directories'] == [1, 2]
assert resp['dirty'] is True
assert resp['has_ever_been_committed'] is True
@responses.activate
def test_profiles_commit(api):
responses.add(responses.POST,
f'{RE_BASE}/profiles/1/commit',
json=None
)
resp = api.profiles.commit(profile_id='1')
assert resp is None
@responses.activate
def test_profiles_unstage(api):
responses.add(responses.POST,
f'{RE_BASE}/profiles/1/unstage',
json=None
)
resp = api.profiles.unstage(profile_id='1')
assert resp is None
| 31.923567
| 59
| 0.48763
| 503
| 5,012
| 4.763419
| 0.101392
| 0.13773
| 0.070534
| 0.080134
| 0.882721
| 0.823456
| 0.799249
| 0.788397
| 0.788397
| 0.736227
| 0
| 0.020308
| 0.390862
| 5,012
| 156
| 60
| 32.128205
| 0.764494
| 0
| 0
| 0.719424
| 0
| 0
| 0.169393
| 0.038907
| 0
| 0
| 0
| 0
| 0.28777
| 1
| 0.057554
| false
| 0
| 0.014388
| 0
| 0.071942
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77198b8f46726c1942cfb66f7f167bef43169b05
| 10,361
|
py
|
Python
|
src/python/test/cgp/test_cgp.py
|
konopczynski/nifty
|
dc02ac60febaabfaf9b2ee5a854bb61436ebdc97
|
[
"MIT"
] | 38
|
2016-06-29T07:42:50.000Z
|
2021-12-09T09:25:25.000Z
|
src/python/test/cgp/test_cgp.py
|
tbullmann/nifty
|
00119fd4753817b931272d6d3120b6ebd334882a
|
[
"MIT"
] | 62
|
2016-07-27T16:07:53.000Z
|
2022-03-30T17:24:36.000Z
|
src/python/test/cgp/test_cgp.py
|
tbullmann/nifty
|
00119fd4753817b931272d6d3120b6ebd334882a
|
[
"MIT"
] | 20
|
2016-01-25T21:21:52.000Z
|
2021-12-09T09:25:16.000Z
|
import nifty.cgp as ncgp
import nifty.graph.rag as nrag
import unittest
import nifty
import unittest
import nifty.cgp as ncgp
import numpy
numpy.random.seed(42)
class TestCgp2d(unittest.TestCase):
def test_corner_case_3x3_grid_a(self):
assertEq = self.assertEqual
# 4 one cells are active
# but still no junction
seg = [
[1,1,2],
[1,3,1],
[1,1,1]
]
seg = numpy.array(seg,dtype='uint32')
tGrid = ncgp.TopologicalGrid2D(seg)
numberOfCells = tGrid.numberOfCells
assertEq(numberOfCells,[0,2,3])
tShape = tGrid.topologicalGridShape
assertEq(tShape, [5,5])
shape = tGrid.shape
assertEq(shape, [3,3])
# check the bounds
bounds = tGrid.extractCellsBounds()
bounds0 = bounds[0]
bounds1 = bounds[1]
boundedBy1 = bounds0.reverseMapping()
boundedBy2 = bounds1.reverseMapping()
assertEq(len(bounds0),0)
assertEq(len(bounds1),2)
assertEq(len(boundedBy1),2)
assertEq(len(boundedBy2),3)
# check the geometry
geometryFS = tGrid.extractCellsGeometry(fill=True, sort1Cells=True)
geometryF = tGrid.extractCellsGeometry(fill=True, sort1Cells=False)
geometryS = tGrid.extractCellsGeometry(fill=False, sort1Cells=True)
geometry = tGrid.extractCellsGeometry(fill=False, sort1Cells=False)
geos = [geometryFS,geometryF,geometryS,geometry]
for geo in geos:
for c in [0,1,2]:
g = geo[c]
assert len(g) == numberOfCells[c]
def test_corner_case_3x3_grid_b(self):
assertEq = self.assertEqual
seg = [
[1,1,1],
[1,2,1],
[1,1,1]
]
seg = numpy.array(seg,dtype='uint32')
tGrid = ncgp.TopologicalGrid2D(seg)
numberOfCells = tGrid.numberOfCells
assertEq(numberOfCells,[0,1,2])
tShape = tGrid.topologicalGridShape
assertEq(tShape, [5,5])
shape = tGrid.shape
assertEq(shape, [3,3])
# check the bounds
bounds = tGrid.extractCellsBounds()
bounds0 = bounds[0]
bounds1 = bounds[1]
boundedBy1 = bounds0.reverseMapping()
boundedBy2 = bounds1.reverseMapping()
assertEq(len(bounds0),0)
assertEq(len(bounds1),1)
assertEq(len(boundedBy1),1)
assertEq(len(boundedBy2),2)
# check the geometry
geometryFS = tGrid.extractCellsGeometry(fill=True, sort1Cells=True)
geometryF = tGrid.extractCellsGeometry(fill=True, sort1Cells=False)
geometryS = tGrid.extractCellsGeometry(fill=False, sort1Cells=True)
geometry = tGrid.extractCellsGeometry(fill=False, sort1Cells=False)
geos = [geometryFS,geometryF,geometryS,geometry]
for geo in geos:
for c in [0,1,2]:
g = geo[c]
assert len(g) == numberOfCells[c]
def test_corner_case_3x3_grid_c(self):
assertEq = self.assertEqual
seg = [
[1,1,3],
[1,2,3],
[1,1,3]
]
seg = numpy.array(seg,dtype='uint32')
tGrid = ncgp.TopologicalGrid2D(seg)
numberOfCells = tGrid.numberOfCells
assertEq(numberOfCells,[2,4,3])
tShape = tGrid.topologicalGridShape
assertEq(tShape, [5,5])
shape = tGrid.shape
assertEq(shape, [3,3])
# check the bounds
bounds = tGrid.extractCellsBounds()
bounds0 = bounds[0]
bounds1 = bounds[1]
boundedBy1 = bounds0.reverseMapping()
boundedBy2 = bounds1.reverseMapping()
assertEq(len(bounds0),2)
assertEq(len(bounds1),4)
assertEq(len(boundedBy1),4)
assertEq(len(boundedBy2),3)
# check the geometry
geometryFS = tGrid.extractCellsGeometry(fill=True, sort1Cells=True)
geometryF = tGrid.extractCellsGeometry(fill=True, sort1Cells=False)
geometryS = tGrid.extractCellsGeometry(fill=False, sort1Cells=True)
geometry = tGrid.extractCellsGeometry(fill=False, sort1Cells=False)
geos = [geometryFS,geometryF,geometryS,geometry]
for geo in geos:
for c in [0,1,2]:
g = geo[c]
assert len(g) == numberOfCells[c]
def test_corner_case_3x3_grid_d(self):
assertEq = self.assertEqual
seg = [
[1,1,1],
[1,2,1],
[1,1,3]
]
# 01234
# --------------------
# 0 |1|1|1| 0
# 1 |-*-*-| 1
# 2 |1|2|1| 2
# 3 |-*-*-| 3
# 4 |1|1|3| 4
# ----------------------
# 01234
seg = numpy.array(seg,dtype='uint32').T
tGrid = ncgp.TopologicalGrid2D(seg)
numberOfCells = tGrid.numberOfCells
assertEq(numberOfCells,[0,2,3])
tShape = tGrid.topologicalGridShape
assertEq(tShape, [5,5])
shape = tGrid.shape
assertEq(shape, [3,3])
# check the bounds
bounds = tGrid.extractCellsBounds()
bounds0 = bounds[0]
bounds1 = bounds[1]
boundedBy1 = bounds0.reverseMapping()
boundedBy2 = bounds1.reverseMapping()
assertEq(len(bounds0),0)
assertEq(len(bounds1),2)
assertEq(len(boundedBy1),2)
assertEq(len(boundedBy2),3)
# check the geometry
#print(seg)
geometryFS = tGrid.extractCellsGeometry(fill=True, sort1Cells=True)
geometryF = tGrid.extractCellsGeometry(fill=True, sort1Cells=False)
geometryS = tGrid.extractCellsGeometry(fill=False, sort1Cells=True)
geometry = tGrid.extractCellsGeometry(fill=False, sort1Cells=False)
geos = [geometryFS,geometryF,geometryS,geometry]
for geo in geos:
for c in [0,1,2]:
g = geo[c]
assert len(g) == numberOfCells[c]
def test_randomized_big(self):
for x in range(100):
assertEq = self.assertEqual
shape = (10, 20)
size = shape[0]*shape[1]
labels = numpy.random.randint(0, 4,size=size).reshape(shape)
gg = nifty.graph.undirectedGridGraph(shape)
cc = nifty.graph.connectedComponentsFromNodeLabels(gg, labels.ravel())
cc = cc.reshape(shape) + 1
cc = numpy.require(cc, dtype='uint32')
tGrid = ncgp.TopologicalGrid2D(cc)
numberOfCells = tGrid.numberOfCells
assertEq(numberOfCells[2], cc.max())
# check the bounds
bounds = tGrid.extractCellsBounds()
boundedBy = {
1:bounds[0].reverseMapping(),
2:bounds[1].reverseMapping(),
}
try:
# check the geometry
geometryFS = tGrid.extractCellsGeometry(fill=True, sort1Cells=True)
geometryF = tGrid.extractCellsGeometry(fill=True, sort1Cells=False)
geometryS = tGrid.extractCellsGeometry(fill=False, sort1Cells=True)
geometry = tGrid.extractCellsGeometry(fill=False, sort1Cells=False)
geos = [geometryFS,geometryF,geometryS,geometry]
except:
print(cc)
import sys
sys.exit()
def test_randomized_medium(self):
for x in range(1000):
assertEq = self.assertEqual
shape = (7, 7)
size = shape[0]*shape[1]
labels = numpy.random.randint(0, 4,size=size).reshape(shape)
#print(labels)
gg = nifty.graph.undirectedGridGraph(shape)
cc = nifty.graph.connectedComponentsFromNodeLabels(gg, labels.ravel())
cc = cc.reshape(shape) + 1
cc = numpy.require(cc, dtype='uint32')
tGrid = ncgp.TopologicalGrid2D(cc)
numberOfCells = tGrid.numberOfCells
assertEq(numberOfCells[2], cc.max())
# check the bounds
bounds = tGrid.extractCellsBounds()
boundedBy = {
1:bounds[0].reverseMapping(),
2:bounds[1].reverseMapping(),
}
try:
# check the geometry
geometryFS = tGrid.extractCellsGeometry(fill=True, sort1Cells=True)
geometryF = tGrid.extractCellsGeometry(fill=True, sort1Cells=False)
geometryS = tGrid.extractCellsGeometry(fill=False, sort1Cells=True)
geometry = tGrid.extractCellsGeometry(fill=False, sort1Cells=False)
geos = [geometryFS,geometryF,geometryS,geometry]
except:
print(cc)
import sys
sys.exit()
def test_randomized_small(self):
for x in range(3000):
assertEq = self.assertEqual
shape = (4, 3)
size = shape[0]*shape[1]
labels = numpy.random.randint(1, 5,size=size).reshape(shape)
#print(labels)
gg = nifty.graph.undirectedGridGraph(shape)
cc = nifty.graph.connectedComponentsFromNodeLabels(gg, labels.ravel())
cc = cc.reshape(shape) + 1
cc = numpy.require(cc, dtype='uint32')
tGrid = ncgp.TopologicalGrid2D(cc)
numberOfCells = tGrid.numberOfCells
assertEq(numberOfCells[2], cc.max())
# check the bounds
bounds = tGrid.extractCellsBounds()
boundedBy = {
1:bounds[0].reverseMapping(),
2:bounds[1].reverseMapping(),
}
try:
# check the geometry
geometryFS = tGrid.extractCellsGeometry(fill=True, sort1Cells=True)
geometryF = tGrid.extractCellsGeometry(fill=True, sort1Cells=False)
geometryS = tGrid.extractCellsGeometry(fill=False, sort1Cells=True)
geometry = tGrid.extractCellsGeometry(fill=False, sort1Cells=False)
geos = [geometryFS,geometryF,geometryS,geometry]
except:
print(cc)
print("labels")
print(labels)
import sys
sys.exit()
if __name__ == '__main__':
unittest.main()
| 29.434659
| 85
| 0.564328
| 1,017
| 10,361
| 5.715831
| 0.110128
| 0.12042
| 0.139687
| 0.079477
| 0.912266
| 0.904524
| 0.886633
| 0.881129
| 0.881129
| 0.874247
| 0
| 0.040109
| 0.328636
| 10,361
| 351
| 86
| 29.518519
| 0.795572
| 0.044783
| 0
| 0.812227
| 0
| 0
| 0.005675
| 0
| 0
| 0
| 0
| 0
| 0.183406
| 1
| 0.030568
| false
| 0
| 0.043668
| 0
| 0.078603
| 0.021834
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
622ba57ce21ef79c4493b86023a5cd2d14b83914
| 11,448
|
py
|
Python
|
examples/unsupervised_learning/query_generation/example_query_generation.py
|
pepelawycliffe/sentence-transformers
|
4c20567623e7baee905ac460311a5899107c372b
|
[
"Apache-2.0"
] | 3
|
2021-03-15T05:32:43.000Z
|
2021-12-14T07:29:57.000Z
|
examples/unsupervised_learning/query_generation/example_query_generation.py
|
pepelawycliffe/sentence-transformers
|
4c20567623e7baee905ac460311a5899107c372b
|
[
"Apache-2.0"
] | null | null | null |
examples/unsupervised_learning/query_generation/example_query_generation.py
|
pepelawycliffe/sentence-transformers
|
4c20567623e7baee905ac460311a5899107c372b
|
[
"Apache-2.0"
] | null | null | null |
import torch
import numpy as np
import random
from transformers import T5Tokenizer, T5ForConditionalGeneration
#Set all seeds to make output deterministic
torch.manual_seed(0)
np.random.seed(0)
random.seed(0)
#Paragraphs for which we want to generate queries
paragraphs = [
"Python is an interpreted, high-level and general-purpose programming language. Python's design philosophy emphasizes code readability with its notable use of significant whitespace. Its language constructs and object-oriented approach aim to help programmers write clear, logical code for small and large-scale projects.",
"Python is dynamically-typed and garbage-collected. It supports multiple programming paradigms, including structured (particularly, procedural), object-oriented and functional programming. Python is often described as a \"batteries included\" language due to its comprehensive standard library.",
"Python was created in the late 1980s, and first released in 1991, by Guido van Rossum as a successor to the ABC programming language. Python 2.0, released in 2000, introduced new features, such as list comprehensions, and a garbage collection system with reference counting, and was discontinued with version 2.7 in 2020. Python 3.0, released in 2008, was a major revision of the language that is not completely backward-compatible and much Python 2 code does not run unmodified on Python 3. With Python 2's end-of-life (and pip having dropped support in 2021), only Python 3.6.x and later are supported, with older versions still supporting e.g. Windows 7 (and old installers not restricted to 64-bit Windows).",
"Python interpreters are supported for mainstream operating systems and available for a few more (and in the past supported many more). A global community of programmers develops and maintains CPython, a free and open-source reference implementation. A non-profit organization, the Python Software Foundation, manages and directs resources for Python and CPython development.",
"As of January 2021, Python ranks third in TIOBE’s index of most popular programming languages, behind C and Java, having previously gained second place and their award for the most popularity gain for 2020.",
"Java is a class-based, object-oriented programming language that is designed to have as few implementation dependencies as possible. It is a general-purpose programming language intended to let application developers write once, run anywhere (WORA), meaning that compiled Java code can run on all platforms that support Java without the need for recompilation. Java applications are typically compiled to bytecode that can run on any Java virtual machine (JVM) regardless of the underlying computer architecture. The syntax of Java is similar to C and C++, but has fewer low-level facilities than either of them. The Java runtime provides dynamic capabilities (such as reflection and runtime code modification) that are typically not available in traditional compiled languages. As of 2019, Java was one of the most popular programming languages in use according to GitHub, particularly for client-server web applications, with a reported 9 million developers.",
"Java was originally developed by James Gosling at Sun Microsystems (which has since been acquired by Oracle) and released in 1995 as a core component of Sun Microsystems' Java platform. The original and reference implementation Java compilers, virtual machines, and class libraries were originally released by Sun under proprietary licenses. As of May 2007, in compliance with the specifications of the Java Community Process, Sun had relicensed most of its Java technologies under the GNU General Public License. Oracle offers its own HotSpot Java Virtual Machine, however the official reference implementation is the OpenJDK JVM which is free open source software and used by most developers and is the default JVM for almost all Linux distributions.",
"As of September 2020, the latest version is Java 15, with Java 11, a currently supported long-term support (LTS) version, released on September 25, 2018. Oracle released the last zero-cost public update for the legacy version Java 8 LTS in January 2019 for commercial use, although it will otherwise still support Java 8 with public updates for personal use indefinitely. Other vendors have begun to offer zero-cost builds of OpenJDK 8 and 11 that are still receiving security and other upgrades.",
"Oracle (and others) highly recommend uninstalling outdated versions of Java because of serious risks due to unresolved security issues. Since Java 9, 10, 12, 13, and 14 are no longer supported, Oracle advises its users to immediately transition to the latest version (currently Java 15) or an LTS release."
]
# For available models for query generation, see: https://huggingface.co/BeIR/
# Here, we use a T5-large model was trained on the MS MARCO dataset
tokenizer = T5Tokenizer.from_pretrained('BeIR/query-gen-msmarco-t5-large')
model = T5ForConditionalGeneration.from_pretrained('BeIR/query-gen-msmarco-t5-large')
model.eval()
#Select the device
device = 'cuda' if torch.cuda.is_available() else 'cpu'
model.to(device)
#Iterate over the paragraphs and generate for each some queries
with torch.no_grad():
for para in paragraphs:
input_ids = tokenizer.encode(para, return_tensors='pt').to(device)
outputs = model.generate(
input_ids=input_ids,
max_length=64,
do_sample=True,
top_p=0.95,
num_return_sequences=3)
print("\nParagraph:")
print(para)
print("\nGenerated Queries:")
for i in range(len(outputs)):
query = tokenizer.decode(outputs[i], skip_special_tokens=True)
print(f'{i + 1}: {query}')
"""
Output of the script:
Paragraph:
Python is an interpreted, high-level and general-purpose programming language. Python's design philosophy emphasizes code readability with its notable use of significant whitespace. Its language constructs and object-oriented approach aim to help programmers write clear, logical code for small and large-scale projects.
Generated Queries:
1: what is python language used for
2: what is python programming
3: what language do i use for scripts
Paragraph:
Python is dynamically-typed and garbage-collected. It supports multiple programming paradigms, including structured (particularly, procedural), object-oriented and functional programming. Python is often described as a "batteries included" language due to its comprehensive standard library.
Generated Queries:
1: what is python language
2: what programming paradigms do python support
3: what programming languages use python
Paragraph:
Python was created in the late 1980s, and first released in 1991, by Guido van Rossum as a successor to the ABC programming language. Python 2.0, released in 2000, introduced new features, such as list comprehensions, and a garbage collection system with reference counting, and was discontinued with version 2.7 in 2020. Python 3.0, released in 2008, was a major revision of the language that is not completely backward-compatible and much Python 2 code does not run unmodified on Python 3. With Python 2's end-of-life (and pip having dropped support in 2021), only Python 3.6.x and later are supported, with older versions still supporting e.g. Windows 7 (and old installers not restricted to 64-bit Windows).
Generated Queries:
1: what year did python start
2: when does the next python update release
3: when did python come out?
Paragraph:
Python interpreters are supported for mainstream operating systems and available for a few more (and in the past supported many more). A global community of programmers develops and maintains CPython, a free and open-source reference implementation. A non-profit organization, the Python Software Foundation, manages and directs resources for Python and CPython development.
Generated Queries:
1: what platform is python available on
2: what is python used for
3: what is python?
Paragraph:
As of January 2021, Python ranks third in TIOBE’s index of most popular programming languages, behind C and Java, having previously gained second place and their award for the most popularity gain for 2020.
Generated Queries:
1: what is the most used programming language in the world
2: what is python language
3: what is the most popular programming language in the world?
Paragraph:
Java is a class-based, object-oriented programming language that is designed to have as few implementation dependencies as possible. It is a general-purpose programming language intended to let application developers write once, run anywhere (WORA), meaning that compiled Java code can run on all platforms that support Java without the need for recompilation. Java applications are typically compiled to bytecode that can run on any Java virtual machine (JVM) regardless of the underlying computer architecture. The syntax of Java is similar to C and C++, but has fewer low-level facilities than either of them. The Java runtime provides dynamic capabilities (such as reflection and runtime code modification) that are typically not available in traditional compiled languages. As of 2019, Java was one of the most popular programming languages in use according to GitHub, particularly for client-server web applications, with a reported 9 million developers.
Generated Queries:
1: java how java works
2: what language is similar to java
3: what is java language
Paragraph:
Java was originally developed by James Gosling at Sun Microsystems (which has since been acquired by Oracle) and released in 1995 as a core component of Sun Microsystems' Java platform. The original and reference implementation Java compilers, virtual machines, and class libraries were originally released by Sun under proprietary licenses. As of May 2007, in compliance with the specifications of the Java Community Process, Sun had relicensed most of its Java technologies under the GNU General Public License. Oracle offers its own HotSpot Java Virtual Machine, however the official reference implementation is the OpenJDK JVM which is free open source software and used by most developers and is the default JVM for almost all Linux distributions.
Generated Queries:
1: what is java created by
2: when was java introduced to linux
3: who developed java?
Paragraph:
As of September 2020, the latest version is Java 15, with Java 11, a currently supported long-term support (LTS) version, released on September 25, 2018. Oracle released the last zero-cost public update for the legacy version Java 8 LTS in January 2019 for commercial use, although it will otherwise still support Java 8 with public updates for personal use indefinitely. Other vendors have begun to offer zero-cost builds of OpenJDK 8 and 11 that are still receiving security and other upgrades.
Generated Queries:
1: what is the latest version of java
2: what is the latest java version
3: what is the latest version of java
Paragraph:
Oracle (and others) highly recommend uninstalling outdated versions of Java because of serious risks due to unresolved security issues. Since Java 9, 10, 12, 13, and 14 are no longer supported, Oracle advises its users to immediately transition to the latest version (currently Java 15) or an LTS release.
Generated Queries:
1: why is oracle not supported
2: what version is oracle used in
3: which java version is obsolete
"""
| 84.8
| 963
| 0.798218
| 1,775
| 11,448
| 5.139155
| 0.246761
| 0.008551
| 0.016773
| 0.016115
| 0.833151
| 0.824271
| 0.819557
| 0.805306
| 0.805306
| 0.79544
| 0
| 0.023909
| 0.159679
| 11,448
| 134
| 964
| 85.432836
| 0.924324
| 0.027254
| 0
| 0
| 0
| 0.236842
| 0.828897
| 0.011382
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.105263
| 0
| 0.105263
| 0.105263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6274d3f637f5974e072d8e706d7f1b7714bcc2e6
| 232
|
py
|
Python
|
bitskins/exceptions.py
|
Shubbler/bitskins-python
|
3e71008d425c2535b4dbdad9b7b91cb74a1d4aa8
|
[
"MIT"
] | null | null | null |
bitskins/exceptions.py
|
Shubbler/bitskins-python
|
3e71008d425c2535b4dbdad9b7b91cb74a1d4aa8
|
[
"MIT"
] | null | null | null |
bitskins/exceptions.py
|
Shubbler/bitskins-python
|
3e71008d425c2535b4dbdad9b7b91cb74a1d4aa8
|
[
"MIT"
] | null | null | null |
class APIException(Exception):
def __init__(self, message):
super().__init__(message)
self.message = message
class UserException(Exception):
def __init__(self, message):
super().__init__(message)
self.message = message
| 19.333333
| 31
| 0.74569
| 26
| 232
| 6.038462
| 0.346154
| 0.280255
| 0.203822
| 0.254777
| 0.77707
| 0.77707
| 0.77707
| 0.77707
| 0.77707
| 0.77707
| 0
| 0
| 0.133621
| 232
| 12
| 32
| 19.333333
| 0.781095
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
0297c0e71e66e3972b4f0bd7f1641ed21575d246
| 140
|
py
|
Python
|
0x07-python-test_driven_development/6-main.py
|
malu17/alx-higher_level_programming
|
75a24d98c51116b737f339697c75855e34254d3a
|
[
"MIT"
] | 1
|
2022-02-07T12:13:18.000Z
|
2022-02-07T12:13:18.000Z
|
0x07-python-test_driven_development/6-main.py
|
malu17/alx-higher_level_programming
|
75a24d98c51116b737f339697c75855e34254d3a
|
[
"MIT"
] | null | null | null |
0x07-python-test_driven_development/6-main.py
|
malu17/alx-higher_level_programming
|
75a24d98c51116b737f339697c75855e34254d3a
|
[
"MIT"
] | 4
|
2018-05-01T05:04:15.000Z
|
2020-04-28T22:22:54.000Z
|
#!/usr/bin/python3
max_integer = __import__('6-max_integer').max_integer
print(max_integer([1, 2, 3, 4]))
print(max_integer([1, 3, 4, 2]))
| 23.333333
| 53
| 0.692857
| 25
| 140
| 3.52
| 0.48
| 0.568182
| 0.340909
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07874
| 0.092857
| 140
| 5
| 54
| 28
| 0.614173
| 0.121429
| 0
| 0
| 0
| 0
| 0.106557
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
f310a55abbe3b9ee53c9b9523c4ff6cbda0c7ead
| 288
|
py
|
Python
|
rastervision/core/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | 4
|
2019-03-11T12:38:15.000Z
|
2021-04-06T14:57:52.000Z
|
rastervision/core/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | null | null | null |
rastervision/core/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | 1
|
2020-04-27T15:21:53.000Z
|
2020-04-27T15:21:53.000Z
|
# flake8: noqa
from rastervision.core.box import *
from rastervision.core.class_map import *
from rastervision.core.command_io_definition import *
from rastervision.core.config import *
from rastervision.core.raster_stats import RasterStats
from rastervision.core.training_data import *
| 32
| 54
| 0.836806
| 38
| 288
| 6.210526
| 0.473684
| 0.40678
| 0.508475
| 0.440678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003846
| 0.097222
| 288
| 8
| 55
| 36
| 0.903846
| 0.041667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b85486a017d3e3f9ef1213d893acc673e8d93065
| 83
|
py
|
Python
|
fetchr/__init__.py
|
fetch-r/py-sdk
|
03fc212458705dfc9c4cd851f412701b4b6f6777
|
[
"MIT"
] | null | null | null |
fetchr/__init__.py
|
fetch-r/py-sdk
|
03fc212458705dfc9c4cd851f412701b4b6f6777
|
[
"MIT"
] | null | null | null |
fetchr/__init__.py
|
fetch-r/py-sdk
|
03fc212458705dfc9c4cd851f412701b4b6f6777
|
[
"MIT"
] | null | null | null |
from .fetchr_client import FetchRClient
from .fetchr_client import FetchRException
| 27.666667
| 42
| 0.879518
| 10
| 83
| 7.1
| 0.6
| 0.28169
| 0.450704
| 0.619718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096386
| 83
| 2
| 43
| 41.5
| 0.946667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b86f55aad6c1e1ada9493e905f64abb624e8179c
| 26,007
|
py
|
Python
|
swagger_client/apis/addon_api.py
|
scubawhere/scubawhere-api-python-client
|
9f8578e251492c7667f785df7b7c9d66e71f5c8e
|
[
"Apache-2.0"
] | null | null | null |
swagger_client/apis/addon_api.py
|
scubawhere/scubawhere-api-python-client
|
9f8578e251492c7667f785df7b7c9d66e71f5c8e
|
[
"Apache-2.0"
] | null | null | null |
swagger_client/apis/addon_api.py
|
scubawhere/scubawhere-api-python-client
|
9f8578e251492c7667f785df7b7c9d66e71f5c8e
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Scubawhere API Documentation
This is the documentation for scubawhere's RMS API. This API is only to be used by authorized parties with valid auth tokens. [Learn about scubawhere](http://www.scubawhere.com) to become an authorized consumer of our API
OpenAPI spec version: 1.0.0
Contact: bryan@scubawhere.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class AddonApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_addon(self, name, base_prices, **kwargs):
"""
Create a new addon
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_addon(name, base_prices, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: Name of the type of addon (required)
:param int base_prices: Prices for addon (required)
:param str description: Description of the addon
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_addon_with_http_info(name, base_prices, **kwargs)
else:
(data) = self.add_addon_with_http_info(name, base_prices, **kwargs)
return data
def add_addon_with_http_info(self, name, base_prices, **kwargs):
"""
Create a new addon
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_addon_with_http_info(name, base_prices, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: Name of the type of addon (required)
:param int base_prices: Prices for addon (required)
:param str description: Description of the addon
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'base_prices', 'description']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_addon" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `add_addon`")
# verify the required parameter 'base_prices' is set
if ('base_prices' not in params) or (params['base_prices'] is None):
raise ValueError("Missing the required parameter `base_prices` when calling `add_addon`")
resource_path = '/addon/add'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'name' in params:
query_params['name'] = params['name']
if 'description' in params:
query_params['description'] = params['description']
if 'base_prices' in params:
query_params['base_prices'] = params['base_prices']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def delete_addon(self, **kwargs):
"""
Delete an addon by ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_addon(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int body: ID of the Addon
:return: InlineResponse2003
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_addon_with_http_info(**kwargs)
else:
(data) = self.delete_addon_with_http_info(**kwargs)
return data
def delete_addon_with_http_info(self, **kwargs):
"""
Delete an addon by ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_addon_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int body: ID of the Addon
:return: InlineResponse2003
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_addon" % key
)
params[key] = val
del params['kwargs']
resource_path = '/addon/delete'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'body' in params:
query_params['body'] = params['body']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2003',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_addon(self, id, **kwargs):
"""
Retrieve an addon by ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_addon(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: ID of the addon to be retrieved (required)
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_addon_with_http_info(id, **kwargs)
else:
(data) = self.get_addon_with_http_info(id, **kwargs)
return data
def get_addon_with_http_info(self, id, **kwargs):
"""
Retrieve an addon by ID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_addon_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: ID of the addon to be retrieved (required)
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_addon" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_addon`")
resource_path = '/addon'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_all_addons(self, **kwargs):
"""
Retrieve all addons
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_addons(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Addon]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_all_addons_with_http_info(**kwargs)
else:
(data) = self.get_all_addons_with_http_info(**kwargs)
return data
def get_all_addons_with_http_info(self, **kwargs):
"""
Retrieve all addons
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_addons_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Addon]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_addons" % key
)
params[key] = val
del params['kwargs']
resource_path = '/addon/all'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Addon]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_all_with_trashed_addons(self, **kwargs):
"""
Retrieve all addons including any deleted models
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_with_trashed_addons(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Addon]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_all_with_trashed_addons_with_http_info(**kwargs)
else:
(data) = self.get_all_with_trashed_addons_with_http_info(**kwargs)
return data
def get_all_with_trashed_addons_with_http_info(self, **kwargs):
"""
Retrieve all addons including any deleted models
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_with_trashed_addons_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Addon]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_with_trashed_addons" % key
)
params[key] = val
del params['kwargs']
resource_path = '/addon/all-with-trashed'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Addon]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def update_addon(self, id, **kwargs):
"""
Update an Addon
Updates the addon by id using the specified fields
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_addon(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: ID of the Addon to be updated (required)
:param str name: Name of the Addon
:param str description: Description of the Addon
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_addon_with_http_info(id, **kwargs)
else:
(data) = self.update_addon_with_http_info(id, **kwargs)
return data
def update_addon_with_http_info(self, id, **kwargs):
"""
Update an Addon
Updates the addon by id using the specified fields
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_addon_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: ID of the Addon to be updated (required)
:param str name: Name of the Addon
:param str description: Description of the Addon
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'name', 'description']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_addon" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_addon`")
resource_path = '/addon/edit'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'id' in params:
query_params['id'] = params['id']
if 'name' in params:
query_params['name'] = params['name']
if 'description' in params:
query_params['description'] = params['description']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 38.301915
| 227
| 0.557465
| 2,683
| 26,007
| 5.203876
| 0.084234
| 0.068758
| 0.024065
| 0.030941
| 0.892207
| 0.878671
| 0.877023
| 0.8632
| 0.847658
| 0.834336
| 0
| 0.003498
| 0.362441
| 26,007
| 678
| 228
| 38.358407
| 0.83855
| 0.349214
| 0
| 0.754839
| 0
| 0
| 0.13955
| 0.029414
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041935
| false
| 0
| 0.022581
| 0
| 0.125806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b21d33d2be09a0dbb2b2e8419007e762ed151253
| 86,869
|
py
|
Python
|
myems-api/core/combinedequipment.py
|
FanZhen2002/myems
|
222d40a6cf3f81efbe37977963cd806d7f154766
|
[
"MIT"
] | 2
|
2021-09-04T03:39:26.000Z
|
2021-09-05T00:29:20.000Z
|
myems-api/core/combinedequipment.py
|
FanZhen2002/myems
|
222d40a6cf3f81efbe37977963cd806d7f154766
|
[
"MIT"
] | 4
|
2021-09-04T07:50:41.000Z
|
2021-09-05T09:35:20.000Z
|
myems-api/core/combinedequipment.py
|
FanZhen2002/myems
|
222d40a6cf3f81efbe37977963cd806d7f154766
|
[
"MIT"
] | 2
|
2021-09-03T00:01:17.000Z
|
2021-09-06T06:38:20.000Z
|
import falcon
import simplejson as json
import mysql.connector
import config
import uuid
from core.useractivity import user_logger, access_control
class CombinedEquipmentCollection:
@staticmethod
def __init__():
""" Initializes CombinedEquipmentCollection"""
pass
@staticmethod
def on_options(req, resp):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp):
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
query = (" SELECT id, name, uuid "
" FROM tbl_cost_centers ")
cursor.execute(query)
rows_cost_centers = cursor.fetchall()
cost_center_dict = dict()
if rows_cost_centers is not None and len(rows_cost_centers) > 0:
for row in rows_cost_centers:
cost_center_dict[row['id']] = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid, "
" is_input_counted, is_output_counted, "
" cost_center_id, description "
" FROM tbl_combined_equipments "
" ORDER BY id ")
cursor.execute(query)
rows_combined_equipments = cursor.fetchall()
result = list()
if rows_combined_equipments is not None and len(rows_combined_equipments) > 0:
for row in rows_combined_equipments:
cost_center = cost_center_dict.get(row['cost_center_id'], None)
meta_result = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid'],
"is_input_counted": bool(row['is_input_counted']),
"is_output_counted": bool(row['is_output_counted']),
"cost_center": cost_center,
"description": row['description'],
"qrcode": 'combinedequipment:' + row['uuid']}
result.append(meta_result)
cursor.close()
cnx.disconnect()
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR', description=ex)
new_values = json.loads(raw_json)
if 'name' not in new_values['data'].keys() or \
not isinstance(new_values['data']['name'], str) or \
len(str.strip(new_values['data']['name'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_NAME')
name = str.strip(new_values['data']['name'])
if 'is_input_counted' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_input_counted'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_INPUT_COUNTED_VALUE')
is_input_counted = new_values['data']['is_input_counted']
if 'is_output_counted' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_output_counted'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_OUTPUT_COUNTED_VALUE')
is_output_counted = new_values['data']['is_output_counted']
if 'cost_center_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['cost_center_id'], int) or \
new_values['data']['cost_center_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COST_CENTER_ID')
cost_center_id = new_values['data']['cost_center_id']
if 'description' in new_values['data'].keys() and \
new_values['data']['description'] is not None and \
len(str(new_values['data']['description'])) > 0:
description = str.strip(new_values['data']['description'])
else:
description = None
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE name = %s ", (name,))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.BAD_REQUEST',
description='API.COMBINED_EQUIPMENT_NAME_IS_ALREADY_IN_USE')
if cost_center_id is not None:
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE id = %s ",
(new_values['data']['cost_center_id'],))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COST_CENTER_NOT_FOUND')
add_values = (" INSERT INTO tbl_combined_equipments "
" (name, uuid, is_input_counted, is_output_counted, "
" cost_center_id, description) "
" VALUES (%s, %s, %s, %s, %s, %s) ")
cursor.execute(add_values, (name,
str(uuid.uuid4()),
is_input_counted,
is_output_counted,
cost_center_id,
description))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(new_id)
class CombinedEquipmentItem:
@staticmethod
def __init__():
"""Initializes CombinedEquipmentItem"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
query = (" SELECT id, name, uuid "
" FROM tbl_cost_centers ")
cursor.execute(query)
rows_cost_centers = cursor.fetchall()
cost_center_dict = dict()
if rows_cost_centers is not None and len(rows_cost_centers) > 0:
for row in rows_cost_centers:
cost_center_dict[row['id']] = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid, "
" is_input_counted, is_output_counted, "
" cost_center_id, description "
" FROM tbl_combined_equipments "
" WHERE id = %s ")
cursor.execute(query, (id_,))
row = cursor.fetchone()
cursor.close()
cnx.disconnect()
if row is None:
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
else:
cost_center = cost_center_dict.get(row['cost_center_id'], None)
meta_result = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid'],
"is_input_counted": bool(row['is_input_counted']),
"is_output_counted": bool(row['is_output_counted']),
"cost_center": cost_center,
"description": row['description'],
"qrcode": 'combinedequipment:' + row['uuid']}
resp.text = json.dumps(meta_result)
@staticmethod
@user_logger
def on_delete(req, resp, id_):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
# check relation with space
cursor.execute(" SELECT space_id "
" FROM tbl_spaces_combined_equipments "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_combined_equipments = cursor.fetchall()
if rows_combined_equipments is not None and len(rows_combined_equipments) > 0:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_SPACES')
# check relation with meter
cursor.execute(" SELECT meter_id "
" FROM tbl_combined_equipments_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_meters = cursor.fetchall()
if rows_meters is not None and len(rows_meters) > 0:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_METER')
# check relation with offline meter
cursor.execute(" SELECT offline_meter_id "
" FROM tbl_combined_equipments_offline_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_offline_meters = cursor.fetchall()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_OFFLINE_METER')
# check relation with virtual meter
cursor.execute(" SELECT virtual_meter_id "
" FROM tbl_combined_equipments_virtual_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_virtual_meters = cursor.fetchall()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.BAD_REQUEST',
description='API.THERE_IS_RELATION_WITH_VIRTUAL_METER')
# delete all associated parameters
cursor.execute(" DELETE FROM tbl_combined_equipments_parameters WHERE combined_equipment_id = %s ", (id_,))
cnx.commit()
cursor.execute(" DELETE FROM tbl_combined_equipments WHERE id = %s ", (id_,))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
@staticmethod
@user_logger
def on_put(req, resp, id_):
"""Handles PUT requests"""
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
new_values = json.loads(raw_json)
if 'name' not in new_values['data'].keys() or \
not isinstance(new_values['data']['name'], str) or \
len(str.strip(new_values['data']['name'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_NAME')
name = str.strip(new_values['data']['name'])
if 'is_input_counted' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_input_counted'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_INPUT_COUNTED_VALUE')
is_input_counted = new_values['data']['is_input_counted']
if 'is_output_counted' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_output_counted'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_OUTPUT_COUNTED_VALUE')
is_output_counted = new_values['data']['is_output_counted']
if 'cost_center_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['cost_center_id'], int) or \
new_values['data']['cost_center_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COST_CENTER_ID')
cost_center_id = new_values['data']['cost_center_id']
if 'description' in new_values['data'].keys() and \
new_values['data']['description'] is not None and \
len(str(new_values['data']['description'])) > 0:
description = str.strip(new_values['data']['description'])
else:
description = None
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE name = %s AND id != %s ", (name, id_))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.BAD_REQUEST',
description='API.COMBINED_EQUIPMENT_NAME_IS_ALREADY_IN_USE')
cursor.execute(" SELECT name "
" FROM tbl_cost_centers "
" WHERE id = %s ",
(new_values['data']['cost_center_id'],))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COST_CENTER_NOT_FOUND')
update_row = (" UPDATE tbl_combined_equipments "
" SET name = %s, is_input_counted = %s, is_output_counted = %s, "
" cost_center_id = %s, description = %s "
" WHERE id = %s ")
cursor.execute(update_row, (name,
is_input_counted,
is_output_counted,
cost_center_id,
description,
id_))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_200
# Clone a Combined Equipment
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles PUT requests"""
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
new_values = json.loads(raw_json)
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT name, is_input_counted, is_output_counted, "
" cost_center_id, description "
" FROM tbl_combined_equipments "
" WHERE id = %s ")
cursor.execute(query, (id_,))
row = cursor.fetchone()
if row is None:
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
else:
add_values = (" INSERT INTO tbl_combined_equipments "
" (name, uuid, is_input_counted, is_output_counted, "
" cost_center_id, description) "
" VALUES (%s, %s, %s, %s, %s, %s) ")
cursor.execute(add_values, (row['name'] + ' Copy',
str(uuid.uuid4()),
row['is_input_counted'],
row['is_output_counted'],
row['cost_center_id'],
row['description']))
new_id = cursor.lastrowid
cnx.commit()
# clone relation with meter
cursor.execute(" SELECT meter_id, is_output "
" FROM tbl_combined_equipments_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_meters = cursor.fetchall()
if rows_meters is not None and len(rows_meters) > 0:
add_values = (" INSERT INTO tbl_combined_equipments_meters (combined_equipment_id, meter_id, is_output) "
" VALUES ")
for row in rows_meters:
add_values += " (" + str(new_id) + ","
add_values += str(row['meter_id']) + ","
add_values += str(bool(row['is_output'])) + "), "
# trim ", " at the end of string and then execute
cursor.execute(add_values[:-2])
cnx.commit()
# clone relation with offline meter
cursor.execute(" SELECT offline_meter_id, is_output "
" FROM tbl_combined_equipments_offline_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_offline_meters = cursor.fetchall()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
add_values = (" INSERT INTO tbl_combined_equipments_offline_meters "
" (combined_equipment_id, offline_meter_id, is_output) "
" VALUES ")
for row in rows_offline_meters:
add_values += " (" + str(new_id) + ","
add_values += "'" + str(row['offline_meter_id']) + "',"
add_values += str(bool(row['is_output'])) + "), "
# trim ", " at the end of string and then execute
cursor.execute(add_values[:-2])
cnx.commit()
# clone relation with virtual meter
cursor.execute(" SELECT virtual_meter_id, is_output "
" FROM tbl_combined_equipments_virtual_meters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_virtual_meters = cursor.fetchall()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
add_values = (" INSERT INTO tbl_combined_equipments_virtual_meters "
" (combined_equipment_id, virtual_meter_id, is_output) "
" VALUES ")
for row in rows_virtual_meters:
add_values += " (" + str(new_id) + ","
add_values += str(row['virtual_meter_id']) + ","
add_values += str(bool(row['is_output'])) + "), "
# trim ", " at the end of string and then execute
cursor.execute(add_values[:-2])
cnx.commit()
# clone parameters
cursor.execute(" SELECT name, parameter_type, constant, point_id, numerator_meter_uuid, denominator_meter_uuid "
" FROM tbl_combined_equipments_parameters "
" WHERE combined_equipment_id = %s ",
(id_,))
rows_parameters = cursor.fetchall()
if rows_parameters is not None and len(rows_parameters) > 0:
add_values = (" INSERT INTO tbl_combined_equipments_parameters"
" (combined_equipment_id, name, parameter_type, constant, point_id, "
" numerator_meter_uuid, denominator_meter_uuid) "
" VALUES ")
for row in rows_parameters:
add_values += " (" + str(new_id) + ","
add_values += "'" + str(row['name']) + "',"
add_values += "'" + str(row['parameter_type']) + "',"
if row['constant'] is not None:
add_values += "'" + str(row['constant']) + "',"
else:
add_values += "null, "
if row['point_id'] is not None:
add_values += str(row['point_id']) + ","
else:
add_values += "null, "
if row['numerator_meter_uuid'] is not None:
add_values += "'" + row['numerator_meter_uuid'] + "',"
else:
add_values += "null, "
if row['denominator_meter_uuid'] is not None:
add_values += "'" + row['denominator_meter_uuid'] + "'), "
else:
add_values += "null), "
# trim ", " at the end of string and then execute
cursor.execute(add_values[:-2])
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(new_id)
class CombinedEquipmentEquipmentCollection:
@staticmethod
def __init__():
"""Initializes CombinedEquipmentEquipmentCollection"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT e.id, e.name, e.uuid "
" FROM tbl_combined_equipments c, tbl_combined_equipments_equipments ce, tbl_equipments e "
" WHERE ce.combined_equipment_id = c.id AND e.id = ce.equipment_id AND c.id = %s "
" ORDER BY e.id ")
cursor.execute(query, (id_,))
rows = cursor.fetchall()
result = list()
if rows is not None and len(rows) > 0:
for row in rows:
meta_result = {"id": row[0], "name": row[1], "uuid": row[2]}
result.append(meta_result)
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
new_values = json.loads(raw_json)
if 'equipment_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['equipment_id'], int) or \
new_values['data']['equipment_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_EQUIPMENT_ID')
equipment_id = new_values['data']['equipment_id']
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" from tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_equipments "
" WHERE id = %s ", (equipment_id,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.EQUIPMENT_NOT_FOUND')
query = (" SELECT id "
" FROM tbl_combined_equipments_equipments "
" WHERE combined_equipment_id = %s AND equipment_id = %s")
cursor.execute(query, (id_, equipment_id,))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR',
description='API.COMBINED_EQUIPMENT_EQUIPMENT_RELATION_EXISTS')
add_row = (" INSERT INTO tbl_combined_equipments_equipments (combined_equipment_id, equipment_id) "
" VALUES (%s, %s) ")
cursor.execute(add_row, (id_, equipment_id,))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(id_) + '/equipments/' + str(equipment_id)
class CombinedEquipmentEquipmentItem:
@staticmethod
def __init__():
"""Initializes CombinedEquipmentEquipmentItem"""
pass
@staticmethod
def on_options(req, resp, id_, eid):
resp.status = falcon.HTTP_200
@staticmethod
@user_logger
def on_delete(req, resp, id_, eid):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not eid.isdigit() or int(eid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_equipments "
" WHERE id = %s ", (eid,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT id "
" FROM tbl_combined_equipments_equipments "
" WHERE combined_equipment_id = %s AND equipment_id = %s ", (id_, eid))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_EQUIPMENT_RELATION_NOT_FOUND')
cursor.execute(" DELETE FROM tbl_combined_equipments_equipments "
" WHERE combined_equipment_id = %s AND equipment_id = %s ", (id_, eid))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
class CombinedEquipmentParameterCollection:
@staticmethod
def __init__():
"""Initializes CombinedEquipmentParameterCollection"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT id, name "
" FROM tbl_points ")
cursor.execute(query)
rows_points = cursor.fetchall()
point_dict = dict()
if rows_points is not None and len(rows_points) > 0:
for row in rows_points:
point_dict[row['id']] = {"id": row['id'],
"name": row['name']}
query = (" SELECT id, name, uuid "
" FROM tbl_meters ")
cursor.execute(query)
rows_meters = cursor.fetchall()
meter_dict = dict()
if rows_meters is not None and len(rows_meters) > 0:
for row in rows_meters:
meter_dict[row['uuid']] = {"type": 'meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_offline_meters ")
cursor.execute(query)
rows_offline_meters = cursor.fetchall()
offline_meter_dict = dict()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
for row in rows_offline_meters:
offline_meter_dict[row['uuid']] = {"type": 'offline_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_virtual_meters ")
cursor.execute(query)
rows_virtual_meters = cursor.fetchall()
virtual_meter_dict = dict()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
for row in rows_virtual_meters:
virtual_meter_dict[row['uuid']] = {"type": 'virtual_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, parameter_type, "
" constant, point_id, numerator_meter_uuid, denominator_meter_uuid "
" FROM tbl_combined_equipments_parameters "
" WHERE combined_equipment_id = %s "
" ORDER BY id ")
cursor.execute(query, (id_, ))
rows_parameters = cursor.fetchall()
result = list()
if rows_parameters is not None and len(rows_parameters) > 0:
for row in rows_parameters:
constant = None
point = None
numerator_meter = None
denominator_meter = None
if row['parameter_type'] == 'point':
point = point_dict.get(row['point_id'], None)
constant = None
numerator_meter = None
denominator_meter = None
elif row['parameter_type'] == 'constant':
constant = row['constant']
point = None
numerator_meter = None
denominator_meter = None
elif row['parameter_type'] == 'fraction':
constant = None
point = None
# find numerator meter by uuid
numerator_meter = meter_dict.get(row['numerator_meter_uuid'], None)
if numerator_meter is None:
numerator_meter = virtual_meter_dict.get(row['numerator_meter_uuid'], None)
if numerator_meter is None:
numerator_meter = offline_meter_dict.get(row['numerator_meter_uuid'], None)
# find denominator meter by uuid
denominator_meter = meter_dict.get(row['denominator_meter_uuid'], None)
if denominator_meter is None:
denominator_meter = virtual_meter_dict.get(row['denominator_meter_uuid'], None)
if denominator_meter is None:
denominator_meter = offline_meter_dict.get(row['denominator_meter_uuid'], None)
meta_result = {"id": row['id'],
"name": row['name'],
"parameter_type": row['parameter_type'],
"constant": constant,
"point": point,
"numerator_meter": numerator_meter,
"denominator_meter": denominator_meter}
result.append(meta_result)
cursor.close()
cnx.disconnect()
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles POST requests"""
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR', description=ex)
new_values = json.loads(raw_json)
if 'name' not in new_values['data'].keys() or \
not isinstance(new_values['data']['name'], str) or \
len(str.strip(new_values['data']['name'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_NAME')
name = str.strip(new_values['data']['name'])
if 'parameter_type' not in new_values['data'].keys() or \
not isinstance(new_values['data']['parameter_type'], str) or \
len(str.strip(new_values['data']['parameter_type'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_TYPE')
parameter_type = str.strip(new_values['data']['parameter_type'])
if parameter_type not in ('constant', 'point', 'fraction'):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_TYPE')
constant = None
if 'constant' in new_values['data'].keys():
if new_values['data']['constant'] is not None and \
isinstance(new_values['data']['constant'], str) and \
len(str.strip(new_values['data']['constant'])) > 0:
constant = str.strip(new_values['data']['constant'])
point_id = None
if 'point_id' in new_values['data'].keys():
if new_values['data']['point_id'] is not None and \
new_values['data']['point_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_POINT_ID')
point_id = new_values['data']['point_id']
numerator_meter_uuid = None
if 'numerator_meter_uuid' in new_values['data'].keys():
if new_values['data']['numerator_meter_uuid'] is not None and \
isinstance(new_values['data']['numerator_meter_uuid'], str) and \
len(str.strip(new_values['data']['numerator_meter_uuid'])) > 0:
numerator_meter_uuid = str.strip(new_values['data']['numerator_meter_uuid'])
denominator_meter_uuid = None
if 'denominator_meter_uuid' in new_values['data'].keys():
if new_values['data']['denominator_meter_uuid'] is not None and \
isinstance(new_values['data']['denominator_meter_uuid'], str) and \
len(str.strip(new_values['data']['denominator_meter_uuid'])) > 0:
denominator_meter_uuid = str.strip(new_values['data']['denominator_meter_uuid'])
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments_parameters "
" WHERE name = %s AND combined_equipment_id = %s ", (name, id_))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.COMBINED_EQUIPMENT_PARAMETER_NAME_IS_ALREADY_IN_USE')
# validate by parameter type
if parameter_type == 'point':
if point_id is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_POINT_ID')
query = (" SELECT id, name "
" FROM tbl_points "
" WHERE id = %s ")
cursor.execute(query, (point_id, ))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.POINT_NOT_FOUND')
elif parameter_type == 'constant':
if constant is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_CONSTANT_VALUE')
elif parameter_type == 'fraction':
query = (" SELECT id, name, uuid "
" FROM tbl_meters ")
cursor.execute(query)
rows_meters = cursor.fetchall()
meter_dict = dict()
if rows_meters is not None and len(rows_meters) > 0:
for row in rows_meters:
meter_dict[row['uuid']] = {"type": 'meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_offline_meters ")
cursor.execute(query)
rows_offline_meters = cursor.fetchall()
offline_meter_dict = dict()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
for row in rows_offline_meters:
offline_meter_dict[row['uuid']] = {"type": 'offline_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_virtual_meters ")
cursor.execute(query)
rows_virtual_meters = cursor.fetchall()
virtual_meter_dict = dict()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
for row in rows_virtual_meters:
virtual_meter_dict[row['uuid']] = {"type": 'virtual_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
# validate numerator meter uuid
if meter_dict.get(numerator_meter_uuid) is None and \
virtual_meter_dict.get(numerator_meter_uuid) is None and \
offline_meter_dict.get(numerator_meter_uuid) is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_NUMERATOR_METER_UUID')
# validate denominator meter uuid
if denominator_meter_uuid == numerator_meter_uuid:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_DENOMINATOR_METER_UUID')
if denominator_meter_uuid not in meter_dict and \
denominator_meter_uuid not in virtual_meter_dict and \
denominator_meter_uuid not in offline_meter_dict:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_DENOMINATOR_METER_UUID')
add_values = (" INSERT INTO tbl_combined_equipments_parameters "
" (combined_equipment_id, name, parameter_type, constant, "
" point_id, numerator_meter_uuid, denominator_meter_uuid) "
" VALUES (%s, %s, %s, %s, %s, %s, %s) ")
cursor.execute(add_values, (id_,
name,
parameter_type,
constant,
point_id,
numerator_meter_uuid,
denominator_meter_uuid))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(id_) + 'parameters/' + str(new_id)
class CombinedEquipmentParameterItem:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentParameterItem"""
pass
@staticmethod
def on_options(req, resp, id_, pid):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_, pid):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not pid.isdigit() or int(pid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
query = (" SELECT id, name "
" FROM tbl_points ")
cursor.execute(query)
rows_points = cursor.fetchall()
point_dict = dict()
if rows_points is not None and len(rows_points) > 0:
for row in rows_points:
point_dict[row['id']] = {"id": row['id'],
"name": row['name']}
query = (" SELECT id, name, uuid "
" FROM tbl_meters ")
cursor.execute(query)
rows_meters = cursor.fetchall()
meter_dict = dict()
if rows_meters is not None and len(rows_meters) > 0:
for row in rows_meters:
meter_dict[row['uuid']] = {"type": 'meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_offline_meters ")
cursor.execute(query)
rows_offline_meters = cursor.fetchall()
offline_meter_dict = dict()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
for row in rows_offline_meters:
offline_meter_dict[row['uuid']] = {"type": 'offline_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_virtual_meters ")
cursor.execute(query)
rows_virtual_meters = cursor.fetchall()
virtual_meter_dict = dict()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
for row in rows_virtual_meters:
virtual_meter_dict[row['uuid']] = {"type": 'virtual_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, parameter_type, "
" constant, point_id, numerator_meter_uuid, denominator_meter_uuid "
" FROM tbl_combined_equipments_parameters "
" WHERE combined_equipment_id = %s AND id = %s ")
cursor.execute(query, (id_, pid))
row = cursor.fetchone()
cursor.close()
cnx.disconnect()
if row is None:
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_PARAMETER_NOT_FOUND_OR_NOT_MATCH')
else:
constant = None
point = None
numerator_meter = None
denominator_meter = None
if row['parameter_type'] == 'point':
point = point_dict.get(row['point_id'], None)
constant = None
numerator_meter = None
denominator_meter = None
elif row['parameter_type'] == 'constant':
constant = row['constant']
point = None
numerator_meter = None
denominator_meter = None
elif row['parameter_type'] == 'fraction':
constant = None
point = None
# find numerator meter by uuid
numerator_meter = meter_dict.get(row['numerator_meter_uuid'], None)
if numerator_meter is None:
numerator_meter = virtual_meter_dict.get(row['numerator_meter_uuid'], None)
if numerator_meter is None:
numerator_meter = offline_meter_dict.get(row['numerator_meter_uuid'], None)
# find denominator meter by uuid
denominator_meter = meter_dict.get(row['denominator_meter_uuid'], None)
if denominator_meter is None:
denominator_meter = virtual_meter_dict.get(row['denominator_meter_uuid'], None)
if denominator_meter is None:
denominator_meter = offline_meter_dict.get(row['denominator_meter_uuid'], None)
meta_result = {"id": row['id'],
"name": row['name'],
"parameter_type": row['parameter_type'],
"constant": constant,
"point": point,
"numerator_meter": numerator_meter,
"denominator_meter": denominator_meter}
resp.text = json.dumps(meta_result)
@staticmethod
@user_logger
def on_delete(req, resp, id_, pid):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not pid.isdigit() or int(pid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ",
(id_,))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments_parameters "
" WHERE combined_equipment_id = %s AND id = %s ",
(id_, pid,))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_PARAMETER_NOT_FOUND_OR_NOT_MATCH')
cursor.execute(" DELETE FROM tbl_combined_equipments_parameters "
" WHERE id = %s ", (pid, ))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
@staticmethod
@user_logger
def on_put(req, resp, id_, pid):
"""Handles PUT requests"""
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not pid.isdigit() or int(pid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_ID')
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR', description=ex)
new_values = json.loads(raw_json)
if 'name' not in new_values['data'].keys() or \
not isinstance(new_values['data']['name'], str) or \
len(str.strip(new_values['data']['name'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_NAME')
name = str.strip(new_values['data']['name'])
if 'parameter_type' not in new_values['data'].keys() or \
not isinstance(new_values['data']['parameter_type'], str) or \
len(str.strip(new_values['data']['parameter_type'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_TYPE')
parameter_type = str.strip(new_values['data']['parameter_type'])
if parameter_type not in ('constant', 'point', 'fraction'):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_PARAMETER_TYPE')
constant = None
if 'constant' in new_values['data'].keys():
if new_values['data']['constant'] is not None and \
isinstance(new_values['data']['constant'], str) and \
len(str.strip(new_values['data']['constant'])) > 0:
constant = str.strip(new_values['data']['constant'])
point_id = None
if 'point_id' in new_values['data'].keys():
if new_values['data']['point_id'] is not None and \
new_values['data']['point_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_POINT_ID')
point_id = new_values['data']['point_id']
numerator_meter_uuid = None
if 'numerator_meter_uuid' in new_values['data'].keys():
if new_values['data']['numerator_meter_uuid'] is not None and \
isinstance(new_values['data']['numerator_meter_uuid'], str) and \
len(str.strip(new_values['data']['numerator_meter_uuid'])) > 0:
numerator_meter_uuid = str.strip(new_values['data']['numerator_meter_uuid'])
denominator_meter_uuid = None
if 'denominator_meter_uuid' in new_values['data'].keys():
if new_values['data']['denominator_meter_uuid'] is not None and \
isinstance(new_values['data']['denominator_meter_uuid'], str) and \
len(str.strip(new_values['data']['denominator_meter_uuid'])) > 0:
denominator_meter_uuid = str.strip(new_values['data']['denominator_meter_uuid'])
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments_parameters "
" WHERE combined_equipment_id = %s AND id = %s ",
(id_, pid,))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400,
title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_PARAMETER_NOT_FOUND_OR_NOT_MATCH')
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments_parameters "
" WHERE name = %s AND combined_equipment_id = %s AND id != %s ", (name, id_, pid))
row = cursor.fetchone()
if row is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.COMBINED_EQUIPMENT_PARAMETER_NAME_IS_ALREADY_IN_USE')
# validate by parameter type
if parameter_type == 'point':
if point_id is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_POINT_ID')
query = (" SELECT id, name "
" FROM tbl_points "
" WHERE id = %s ")
cursor.execute(query, (point_id, ))
row = cursor.fetchone()
if row is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.POINT_NOT_FOUND')
elif parameter_type == 'constant':
if constant is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_CONSTANT_VALUE')
elif parameter_type == 'fraction':
query = (" SELECT id, name, uuid "
" FROM tbl_meters ")
cursor.execute(query)
rows_meters = cursor.fetchall()
meter_dict = dict()
if rows_meters is not None and len(rows_meters) > 0:
for row in rows_meters:
meter_dict[row['uuid']] = {"type": 'meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_offline_meters ")
cursor.execute(query)
rows_offline_meters = cursor.fetchall()
offline_meter_dict = dict()
if rows_offline_meters is not None and len(rows_offline_meters) > 0:
for row in rows_offline_meters:
offline_meter_dict[row['uuid']] = {"type": 'offline_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT id, name, uuid "
" FROM tbl_virtual_meters ")
cursor.execute(query)
rows_virtual_meters = cursor.fetchall()
virtual_meter_dict = dict()
if rows_virtual_meters is not None and len(rows_virtual_meters) > 0:
for row in rows_virtual_meters:
virtual_meter_dict[row['uuid']] = {"type": 'virtual_meter',
"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
# validate numerator meter uuid
if meter_dict.get(numerator_meter_uuid) is None and \
virtual_meter_dict.get(numerator_meter_uuid) is None and \
offline_meter_dict.get(numerator_meter_uuid) is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_NUMERATOR_METER_UUID')
# validate denominator meter uuid
if denominator_meter_uuid == numerator_meter_uuid:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_DENOMINATOR_METER_UUID')
if denominator_meter_uuid not in meter_dict and \
denominator_meter_uuid not in virtual_meter_dict and \
denominator_meter_uuid not in offline_meter_dict:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_DENOMINATOR_METER_UUID')
add_values = (" UPDATE tbl_combined_equipments_parameters "
" SET name = %s , parameter_type = %s, constant = %s, "
" point_id = %s, numerator_meter_uuid = %s, denominator_meter_uuid =%s "
" WHERE id = %s ")
cursor.execute(add_values, (name,
parameter_type,
constant,
point_id,
numerator_meter_uuid,
denominator_meter_uuid,
pid))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_200
class CombinedEquipmentMeterCollection:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentMeterCollection"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT id, name, uuid "
" FROM tbl_energy_categories ")
cursor.execute(query)
rows_energy_categories = cursor.fetchall()
energy_category_dict = dict()
if rows_energy_categories is not None and len(rows_energy_categories) > 0:
for row in rows_energy_categories:
energy_category_dict[row['id']] = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT m.id, m.name, m.uuid, m.energy_category_id, em.is_output "
" FROM tbl_combined_equipments e, tbl_combined_equipments_meters em, tbl_meters m "
" WHERE em.combined_equipment_id = e.id AND m.id = em.meter_id AND e.id = %s "
" ORDER BY m.id ")
cursor.execute(query, (id_,))
rows = cursor.fetchall()
result = list()
if rows is not None and len(rows) > 0:
for row in rows:
energy_category = energy_category_dict.get(row['energy_category_id'], None)
meta_result = {"id": row['id'], "name": row['name'], "uuid": row['uuid'],
"energy_category": energy_category,
"is_output": bool(row['is_output'])}
result.append(meta_result)
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
new_values = json.loads(raw_json)
if 'meter_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['meter_id'], int) or \
new_values['data']['meter_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_METER_ID')
meter_id = new_values['data']['meter_id']
if 'is_output' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_output'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_OUTPUT_VALUE')
is_output = new_values['data']['is_output']
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" from tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_meters "
" WHERE id = %s ", (meter_id,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.METER_NOT_FOUND')
query = (" SELECT id "
" FROM tbl_combined_equipments_meters "
" WHERE combined_equipment_id = %s AND meter_id = %s")
cursor.execute(query, (id_, meter_id,))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR',
description='API.COMBINED_EQUIPMENT_METER_RELATION_EXISTS')
add_row = (" INSERT INTO tbl_combined_equipments_meters (combined_equipment_id, meter_id, is_output ) "
" VALUES (%s, %s, %s) ")
cursor.execute(add_row, (id_, meter_id, is_output))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(id_) + '/meters/' + str(meter_id)
class CombinedEquipmentMeterItem:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentMeterItem"""
pass
@staticmethod
def on_options(req, resp, id_, mid):
resp.status = falcon.HTTP_200
@staticmethod
@user_logger
def on_delete(req, resp, id_, mid):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not mid.isdigit() or int(mid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_METER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_meters "
" WHERE id = %s ", (mid,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.METER_NOT_FOUND')
cursor.execute(" SELECT id "
" FROM tbl_combined_equipments_meters "
" WHERE combined_equipment_id = %s AND meter_id = %s ", (id_, mid))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_METER_RELATION_NOT_FOUND')
cursor.execute(" DELETE FROM tbl_combined_equipments_meters "
" WHERE combined_equipment_id = %s AND meter_id = %s ", (id_, mid))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
class CombinedEquipmentOfflineMeterCollection:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentOfflineMeterCollection"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT id, name, uuid "
" FROM tbl_energy_categories ")
cursor.execute(query)
rows_energy_categories = cursor.fetchall()
energy_category_dict = dict()
if rows_energy_categories is not None and len(rows_energy_categories) > 0:
for row in rows_energy_categories:
energy_category_dict[row['id']] = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT m.id, m.name, m.uuid, m.energy_category_id, em.is_output "
" FROM tbl_combined_equipments e, tbl_combined_equipments_offline_meters em, tbl_offline_meters m "
" WHERE em.combined_equipment_id = e.id AND m.id = em.offline_meter_id AND e.id = %s "
" ORDER BY m.id ")
cursor.execute(query, (id_,))
rows = cursor.fetchall()
result = list()
if rows is not None and len(rows) > 0:
for row in rows:
energy_category = energy_category_dict.get(row['energy_category_id'], None)
meta_result = {"id": row['id'], "name": row['name'], "uuid": row['uuid'],
"energy_category": energy_category,
"is_output": bool(row['is_output'])}
result.append(meta_result)
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
new_values = json.loads(raw_json)
if 'offline_meter_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['offline_meter_id'], int) or \
new_values['data']['offline_meter_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_OFFLINE_METER_ID')
offline_meter_id = new_values['data']['offline_meter_id']
if 'is_output' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_output'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_OUTPUT_VALUE')
is_output = new_values['data']['is_output']
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" from tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_offline_meters "
" WHERE id = %s ", (offline_meter_id,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.OFFLINE_METER_NOT_FOUND')
query = (" SELECT id "
" FROM tbl_combined_equipments_offline_meters "
" WHERE combined_equipment_id = %s AND offline_meter_id = %s")
cursor.execute(query, (id_, offline_meter_id,))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR',
description='API.COMBINED_EQUIPMENT_OFFLINE_METER_RELATION_EXISTS')
add_row = (" INSERT INTO tbl_combined_equipments_offline_meters "
" (combined_equipment_id, offline_meter_id, is_output ) "
" VALUES (%s, %s, %s) ")
cursor.execute(add_row, (id_, offline_meter_id, is_output))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(id_) + '/offlinemeters/' + str(offline_meter_id)
class CombinedEquipmentOfflineMeterItem:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentOfflineMeterItem"""
pass
@staticmethod
def on_options(req, resp, id_, mid):
resp.status = falcon.HTTP_200
@staticmethod
@user_logger
def on_delete(req, resp, id_, mid):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not mid.isdigit() or int(mid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_OFFLINE_METER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_offline_meters "
" WHERE id = %s ", (mid,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.OFFLINE_METER_NOT_FOUND')
cursor.execute(" SELECT id "
" FROM tbl_combined_equipments_offline_meters "
" WHERE combined_equipment_id = %s AND offline_meter_id = %s ", (id_, mid))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_OFFLINE_METER_RELATION_NOT_FOUND')
cursor.execute(" DELETE FROM tbl_combined_equipments_offline_meters "
" WHERE combined_equipment_id = %s AND offline_meter_id = %s ", (id_, mid))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
class CombinedEquipmentVirtualMeterCollection:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentVirtualMeterCollection"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor(dictionary=True)
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
query = (" SELECT id, name, uuid "
" FROM tbl_energy_categories ")
cursor.execute(query)
rows_energy_categories = cursor.fetchall()
energy_category_dict = dict()
if rows_energy_categories is not None and len(rows_energy_categories) > 0:
for row in rows_energy_categories:
energy_category_dict[row['id']] = {"id": row['id'],
"name": row['name'],
"uuid": row['uuid']}
query = (" SELECT m.id, m.name, m.uuid, m.energy_category_id, em.is_output "
" FROM tbl_combined_equipments e, tbl_combined_equipments_virtual_meters em, tbl_virtual_meters m "
" WHERE em.combined_equipment_id = e.id AND m.id = em.virtual_meter_id AND e.id = %s "
" ORDER BY m.id ")
cursor.execute(query, (id_,))
rows = cursor.fetchall()
result = list()
if rows is not None and len(rows) > 0:
for row in rows:
energy_category = energy_category_dict.get(row['energy_category_id'], None)
meta_result = {"id": row['id'], "name": row['name'], "uuid": row['uuid'],
"energy_category": energy_category,
"is_output": bool(row['is_output'])}
result.append(meta_result)
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp, id_):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
new_values = json.loads(raw_json)
if 'virtual_meter_id' not in new_values['data'].keys() or \
not isinstance(new_values['data']['virtual_meter_id'], int) or \
new_values['data']['virtual_meter_id'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_VIRTUAL_METER_ID')
virtual_meter_id = new_values['data']['virtual_meter_id']
if 'is_output' not in new_values['data'].keys() or \
not isinstance(new_values['data']['is_output'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_IS_OUTPUT_VALUE')
is_output = new_values['data']['is_output']
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" from tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_virtual_meters "
" WHERE id = %s ", (virtual_meter_id,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.VIRTUAL_METER_NOT_FOUND')
query = (" SELECT id "
" FROM tbl_combined_equipments_virtual_meters "
" WHERE combined_equipment_id = %s AND virtual_meter_id = %s")
cursor.execute(query, (id_, virtual_meter_id,))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR',
description='API.COMBINED_EQUIPMENT_VIRTUAL_METER_RELATION_EXISTS')
add_row = (" INSERT INTO tbl_combined_equipments_virtual_meters "
" (combined_equipment_id, virtual_meter_id, is_output ) "
" VALUES (%s, %s, %s) ")
cursor.execute(add_row, (id_, virtual_meter_id, is_output))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/combinedequipments/' + str(id_) + '/virtualmeters/' + str(virtual_meter_id)
class CombinedEquipmentVirtualMeterItem:
@staticmethod
def __init__():
""""Initializes CombinedEquipmentVirtualMeterItem"""
pass
@staticmethod
def on_options(req, resp, id_, mid):
resp.status = falcon.HTTP_200
@staticmethod
@user_logger
def on_delete(req, resp, id_, mid):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_COMBINED_EQUIPMENT_ID')
if not mid.isdigit() or int(mid) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_VIRTUAL_METER_ID')
cnx = mysql.connector.connect(**config.myems_system_db)
cursor = cnx.cursor()
cursor.execute(" SELECT name "
" FROM tbl_combined_equipments "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_NOT_FOUND')
cursor.execute(" SELECT name "
" FROM tbl_virtual_meters "
" WHERE id = %s ", (mid,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.VIRTUAL_METER_NOT_FOUND')
cursor.execute(" SELECT id "
" FROM tbl_combined_equipments_virtual_meters "
" WHERE combined_equipment_id = %s AND virtual_meter_id = %s ", (id_, mid))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.COMBINED_EQUIPMENT_VIRTUAL_METER_RELATION_NOT_FOUND')
cursor.execute(" DELETE FROM tbl_combined_equipments_virtual_meters "
" WHERE combined_equipment_id = %s AND virtual_meter_id = %s ", (id_, mid))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
| 44.163193
| 120
| 0.534517
| 8,968
| 86,869
| 4.914251
| 0.022302
| 0.033582
| 0.054911
| 0.071385
| 0.94552
| 0.939189
| 0.927753
| 0.918722
| 0.911869
| 0.906968
| 0
| 0.009838
| 0.36227
| 86,869
| 1,966
| 121
| 44.185656
| 0.785682
| 0.017509
| 0
| 0.897214
| 0
| 0.003096
| 0.22128
| 0.086871
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029102
| false
| 0.00743
| 0.003715
| 0
| 0.040248
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b27c4501c5cbb513b2e6305015a23c28e275a1c5
| 2,579
|
py
|
Python
|
src/nncomp_molecule/criterions/focal_loss.py
|
k-fujikawa/Kaggle-BMS-Molecular-Translation
|
5503572686ed6c4082e276d9e17078185249be9e
|
[
"MIT"
] | 3
|
2021-08-29T21:07:37.000Z
|
2022-03-30T07:46:57.000Z
|
src/nncomp_molecule/criterions/focal_loss.py
|
k-fujikawa/Kaggle-BMS-Molecular-Translation-
|
5503572686ed6c4082e276d9e17078185249be9e
|
[
"MIT"
] | null | null | null |
src/nncomp_molecule/criterions/focal_loss.py
|
k-fujikawa/Kaggle-BMS-Molecular-Translation-
|
5503572686ed6c4082e276d9e17078185249be9e
|
[
"MIT"
] | 1
|
2022-03-30T10:20:25.000Z
|
2022-03-30T10:20:25.000Z
|
import torch
import nncomp.registry as R
@R.CriterionRegistry.add
class FocalLoss(torch.nn.Module):
def __init__(self, alpha=0.25, gamma=2.0, ignore_index=0):
super().__init__()
self.alpha = alpha
self.gamma = gamma
self.ignore_index = ignore_index
self.lossfunc = torch.nn.BCEWithLogitsLoss(reduction="none")
def forward(self, input: torch.Tensor, target: torch.Tensor):
n_output = input.shape[-1]
one_hot_target = torch.eye(n_output, device=input.device)[target]
bceloss = self.lossfunc(input, one_hot_target)
probs = torch.sigmoid(input)
probs_gt = torch.where(
one_hot_target == 1,
probs,
1 - probs,
)
modulator = torch.pow(1 - probs_gt, self.gamma)
weighted_loss = torch.where(
one_hot_target == 1,
self.alpha * modulator * bceloss,
(1 - self.alpha) * modulator * bceloss
)
weighted_loss = torch.where(
(target != self.ignore_index)[:, :, None].expand(input.shape),
weighted_loss,
torch.zeros_like(weighted_loss),
)
weighted_loss = weighted_loss.sum(dim=(1, 2))
weighted_loss = weighted_loss.mean()
return weighted_loss
@R.CriterionRegistry.add
class FocalLossEx(torch.nn.Module):
def __init__(self, alpha=0.25, gamma=2.0, ignore_index=0):
super().__init__()
self.alpha = alpha
self.gamma = gamma
self.ignore_index = ignore_index
self.lossfunc = torch.nn.BCEWithLogitsLoss(reduction="none")
def forward(self, input: torch.Tensor, target: torch.Tensor):
n_output = input.shape[-1]
one_hot_target = torch.eye(n_output, device=input.device)[target]
bceloss = self.lossfunc(input, one_hot_target)
probs = torch.sigmoid(input)
probs_gt = torch.where(
one_hot_target == 1,
probs,
1 - probs,
)
modulator = torch.pow(1 - probs_gt, self.gamma)
breakpoint()
weighted_loss = torch.where(
one_hot_target == 1,
self.alpha * modulator * bceloss,
(1 - self.alpha) * modulator * bceloss
)
weighted_loss = torch.where(
(target != self.ignore_index)[:, :, None].expand(input.shape),
weighted_loss,
torch.zeros_like(weighted_loss),
)
weighted_loss = weighted_loss.sum(dim=(1, 2))
weighted_loss = weighted_loss.mean()
return weighted_loss
| 32.64557
| 74
| 0.59558
| 302
| 2,579
| 4.860927
| 0.188742
| 0.147139
| 0.065395
| 0.098093
| 0.920981
| 0.920981
| 0.920981
| 0.920981
| 0.920981
| 0.920981
| 0
| 0.015359
| 0.293137
| 2,579
| 78
| 75
| 33.064103
| 0.789907
| 0
| 0
| 0.835821
| 0
| 0
| 0.003102
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059701
| false
| 0
| 0.029851
| 0
| 0.149254
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b28b3b4ae7d4ae1d7b2be6fc19c6a4dddb34dfdc
| 4,167
|
py
|
Python
|
tests/test_optimize.py
|
ad-lib27/cs207-FinalProject
|
9d7026c3579a7ab397b428343d8426a7c15b9f22
|
[
"MIT"
] | null | null | null |
tests/test_optimize.py
|
ad-lib27/cs207-FinalProject
|
9d7026c3579a7ab397b428343d8426a7c15b9f22
|
[
"MIT"
] | 9
|
2019-10-30T00:56:55.000Z
|
2019-12-10T16:47:10.000Z
|
tests/test_optimize.py
|
ad-lib27/cs207-FinalProject
|
9d7026c3579a7ab397b428343d8426a7c15b9f22
|
[
"MIT"
] | 1
|
2019-12-27T03:56:49.000Z
|
2019-12-27T03:56:49.000Z
|
import numpy as np
from itertools import product as prod
import math
import pytest
from adlib27.autodiff import AutoDiff as AD
from adlib27.elem_function import sin
from adlib27.optimize import optimize
#test optimize
def test_optimize():
assert optimize([-1.0, 0.0, 1.0], ['x'], '3*x') == {'global maximum': {'input range': ([1.0], [1.0]), 'value range': (3.0, 3.0), 'inflection type': 'endpoint'}, 'global minimum': {'input range': ([-1.0], [-1.0]), 'value range': (-3.0, -3.0), 'inflection type': 'endpoint'}, 'all critical points': [{'variables': ['x'], 'input range': ([-1.0], [-1.0]), 'value range': (-3.0, -3.0), 'inflection type': 'endpoint'}, {'input range': ([1.0], [1.0]), 'value range': (3.0, 3.0), 'inflection type': 'endpoint'}]}
assert optimize(list(np.linspace(-2, 2, 10)), ['a','b'], 'sin(a)*cos(b)') == {'global maximum': {'input range': ([1.5555555555555554, -0.22222222222222232], [1.9999999999999998, 0.2222222222222221]), 'value range': (0.9752968029145749, 0.8869378807444431), 'inflection type': 'critical point'}, 'global minimum': {'input range': ([-2.0, -0.22222222222222232], [-1.5555555555555556, 0.2222222222222221]), 'value range': (-0.8869378807444431, -0.9752968029145749), 'inflection type': 'critical point'}, 'all critical points': [{'variables': ['a', 'b'], 'input range': ([-2.0, -2.0], [-2.0, -2.0]), 'value range': (0.37840124765396416, 0.37840124765396416), 'inflection type': 'endpoint'}, {'input range': ([2.0, 2.0], [2.0, 2.0]), 'value range': (-0.37840124765396416, -0.37840124765396416), 'inflection type': 'endpoint'}, {'variables': ['a', 'b'], 'input range': ([-2.0, -0.22222222222222232], [-1.5555555555555556, 0.2222222222222221]), 'value range': (-0.8869378807444431, -0.9752968029145749), 'inflection type': 'critical point'}, {'variables': ['a', 'b'], 'input range': ([-2.0, 2.0], [-1.5555555555555556, 2.4444444444444446]), 'value range': (0.37840124765396416, 0.41609850595854114), 'inflection type': 'critical point'}, {'variables': ['a', 'b'], 'input range': ([-0.22222222222222232, 1.5555555555555554], [0.2222222222222221, 1.9999999999999998]), 'value range': (-0.003358901551262173, -0.09171782372139398), 'inflection type': 'critical point'}, {'variables': ['a', 'b'], 'input range': ([-0.22222222222222232, 1.5555555555555554], [0.2222222222222221, 1.9999999999999998]), 'value range': (-0.003358901551262173, -0.09171782372139398), 'inflection type': 'critical point'}, {'variables': ['a', 'b'], 'input range': ([-0.22222222222222232, -2.0], [0.2222222222222221, -1.5555555555555556]), 'value range': (0.09171782372139398, 0.003358901551262124), 'inflection type': 'critical point'}, {'variables': ['a', 'b'], 'input range': ([-0.22222222222222232, 1.5555555555555554], [0.2222222222222221, 1.9999999999999998]), 'value range': (-0.003358901551262173, -0.09171782372139398), 'inflection type': 'critical point'}, {'variables': ['a', 'b'], 'input range': ([1.5555555555555554, -0.22222222222222232], [1.9999999999999998, 0.2222222222222221]), 'value range': (0.9752968029145749, 0.8869378807444431), 'inflection type': 'critical point'}, {'variables': ['a', 'b'], 'input range': ([1.5555555555555554, -0.22222222222222232], [1.9999999999999998, 0.2222222222222221]), 'value range': (0.9752968029145749, 0.8869378807444431), 'inflection type': 'critical point'}, {'variables': ['a', 'b'], 'input range': ([1.5555555555555554, 2.0], [1.9999999999999998, 2.4444444444444446]), 'value range': (-0.41609850595854114, -0.37840124765396416), 'inflection type': 'critical point'}, {'variables': ['a', 'b'], 'input range': ([2.0, -2.0], [2.4444444444444446, -1.5555555555555556]), 'value range': (-0.37840124765396416, -0.013857857569814412), 'inflection type': 'critical point'}, {'variables': ['a', 'b'], 'input range': ([-2.0, -0.22222222222222232], [-1.5555555555555556, 0.2222222222222221]), 'value range': (-0.8869378807444431, -0.9752968029145749), 'inflection type': 'critical point'}, {'variables': ['a', 'b'], 'input range': ([2.0, 1.5555555555555554], [2.4444444444444446, 1.9999999999999998]), 'value range': (0.013857857569814613, 0.37840124765396416), 'inflection type': 'critical point'}]}
| 320.538462
| 3,417
| 0.682505
| 495
| 4,167
| 5.741414
| 0.121212
| 0.070373
| 0.061928
| 0.133005
| 0.808586
| 0.737509
| 0.715341
| 0.715341
| 0.715341
| 0.715341
| 0
| 0.381693
| 0.090233
| 4,167
| 13
| 3,417
| 320.538462
| 0.367977
| 0.00312
| 0
| 0
| 0
| 0
| 0.300915
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.1
| true
| 0
| 0.7
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 13
|
b28bbc5d1f8a36a1d72f8fde37db650fc449e909
| 7,501
|
py
|
Python
|
zerver/webhooks/splunk/tests.py
|
DD2480-group7-2020/zulip
|
9a1e18bcf383c38c35da168563a7345768c6d784
|
[
"Apache-2.0"
] | 1
|
2020-03-17T14:58:50.000Z
|
2020-03-17T14:58:50.000Z
|
zerver/webhooks/splunk/tests.py
|
DD2480-group7-2020/zulip
|
9a1e18bcf383c38c35da168563a7345768c6d784
|
[
"Apache-2.0"
] | 2
|
2020-09-07T22:32:24.000Z
|
2021-05-08T18:17:53.000Z
|
zerver/webhooks/splunk/tests.py
|
DD2480-group7-2020/zulip
|
9a1e18bcf383c38c35da168563a7345768c6d784
|
[
"Apache-2.0"
] | 1
|
2020-07-16T06:00:10.000Z
|
2020-07-16T06:00:10.000Z
|
# -*- coding: utf-8 -*-
from zerver.lib.test_classes import WebhookTestCase
class SplunkHookTests(WebhookTestCase):
STREAM_NAME = 'splunk'
URL_TEMPLATE = "/api/v1/external/splunk?api_key={api_key}&stream={stream}"
FIXTURE_DIR_NAME = 'splunk'
def test_splunk_search_one_result(self) -> None:
self.url = self.build_webhook_url(topic=u"New Search Alert")
# define the expected message contents
expected_topic = u"New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
# using fixture named splunk_search_one_result, execute this test
self.send_and_test_stream_message('search_one_result',
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded")
def test_splunk_short_search_name(self) -> None:
# don't provide a topic so the search name is used instead
expected_topic = u"This search's name isn't that long"
expected_message = """
Splunk alert from saved search:
* **Search**: [This search's name isn't that long](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.send_and_test_stream_message('short_search_name',
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded")
def test_splunk_long_search_name(self) -> None:
# don't provide a topic so the search name is used instead
expected_topic = u"this-search's-got-47-words-37-sentences-58-words-we-wanna..."
expected_message = """
Splunk alert from saved search:
* **Search**: [this-search's-got-47-words-37-sentences-58-words-we-wanna-know-details-of-the-search-time-of-the-search-and-any-other-kind-of-thing-you-gotta-say-pertaining-to-and-about-the-search-I-want-to-know-authenticated-user's-name-and-any-other-kind-of-thing-you-gotta-say](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.send_and_test_stream_message('long_search_name',
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded")
def test_splunk_missing_results_link(self) -> None:
self.url = self.build_webhook_url(topic=u"New Search Alert")
expected_topic = u"New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](Missing results_link)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.send_and_test_stream_message('missing_results_link',
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded")
def test_splunk_missing_search_name(self) -> None:
self.url = self.build_webhook_url(topic=u"New Search Alert")
expected_topic = u"New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [Missing search_name](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.send_and_test_stream_message('missing_search_name',
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded")
def test_splunk_missing_host(self) -> None:
self.url = self.build_webhook_url(topic=u"New Search Alert")
expected_topic = u"New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: Missing host
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.send_and_test_stream_message('missing_host',
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded")
def test_splunk_missing_source(self) -> None:
self.url = self.build_webhook_url(topic=u"New Search Alert")
expected_topic = u"New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `Missing source`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.send_and_test_stream_message('missing_source',
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded")
def test_splunk_missing_raw(self) -> None:
self.url = self.build_webhook_url(topic=u"New Search Alert")
expected_topic = u"New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Missing _raw`
""".strip()
self.send_and_test_stream_message('missing_raw',
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded")
def get_body(self, fixture_name: str) -> str:
return self.webhook_fixture_data("splunk", fixture_name, file_type="json")
| 48.083333
| 445
| 0.628849
| 967
| 7,501
| 4.648397
| 0.149948
| 0.05673
| 0.024027
| 0.040044
| 0.858287
| 0.858287
| 0.852058
| 0.852058
| 0.83604
| 0.821357
| 0
| 0.055516
| 0.243568
| 7,501
| 155
| 446
| 48.393548
| 0.736694
| 0.031462
| 0
| 0.726496
| 0
| 0.128205
| 0.526381
| 0.074666
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.008547
| 0.008547
| 0.128205
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b28c64c03c9363dc7727e32c59eb2d4c26c928a1
| 3,924
|
py
|
Python
|
tests/modules/epp/test_epp_login.py
|
bladeroot/heppy
|
b597916ff80890ca057b17cdd156e90bbbd9a87a
|
[
"BSD-3-Clause"
] | 20
|
2016-06-02T20:29:29.000Z
|
2022-01-31T07:47:02.000Z
|
tests/modules/epp/test_epp_login.py
|
bladeroot/heppy
|
b597916ff80890ca057b17cdd156e90bbbd9a87a
|
[
"BSD-3-Clause"
] | 1
|
2018-10-09T16:09:24.000Z
|
2018-10-10T08:17:42.000Z
|
tests/modules/epp/test_epp_login.py
|
bladeroot/heppy
|
b597916ff80890ca057b17cdd156e90bbbd9a87a
|
[
"BSD-3-Clause"
] | 7
|
2018-04-11T16:05:06.000Z
|
2020-01-28T16:30:40.000Z
|
#!/usr/bin/env python
import unittest
from ..TestCase import TestCase
class TestEppLogin(TestCase):
def test_render_epp_login_request_min(self):
self.assertRequest('''<?xml version="1.0" ?>
<epp xmlns="urn:ietf:params:xml:ns:epp-1.0">
<command>
<login>
<clID>ClientX</clID>
<pw>2fooBar</pw>
<options>
<version>1.0</version>
<lang>en</lang>
</options>
<svcs>
<objURI>urn:ietf:params:xml:ns:obj1</objURI>
<objURI>urn:ietf:params:xml:ns:obj2</objURI>
<objURI>urn:ietf:params:xml:ns:obj3</objURI>
</svcs>
</login>
<clTRID>AA-00</clTRID>
</command>
</epp>''', {
'command': 'epp:login',
'clID': 'ClientX',
'pw': '2fooBar',
'objURIs': [
'urn:ietf:params:xml:ns:obj1',
'urn:ietf:params:xml:ns:obj2',
'urn:ietf:params:xml:ns:obj3'
],
})
def test_render_epp_login_request(self):
self.assertRequest('''<?xml version="1.0" ?>
<epp xmlns="urn:ietf:params:xml:ns:epp-1.0">
<command>
<login>
<clID>ClientX</clID>
<pw>2fooBar</pw>
<newPW>bar-FOO2</newPW>
<options>
<version>4.2</version>
<lang>ua</lang>
</options>
<svcs>
<objURI>urn:ietf:params:xml:ns:obj1</objURI>
<objURI>urn:ietf:params:xml:ns:obj2</objURI>
<objURI>urn:ietf:params:xml:ns:obj3</objURI>
<svcExtension>
<extURI>http://custom/obj1ext-1.0</extURI>
<extURI>http://custom/obj1ext-2.0</extURI>
</svcExtension>
</svcs>
</login>
<clTRID>AA-00</clTRID>
</command>
</epp>''', {
'command': 'epp:login',
'clID': 'ClientX',
'pw': '2fooBar',
'newPW': 'bar-FOO2',
'version': 4.2,
'lang': 'ua',
'objURIs': [
'urn:ietf:params:xml:ns:obj1',
'urn:ietf:params:xml:ns:obj2',
'urn:ietf:params:xml:ns:obj3'
],
'extURIs': [
'http://custom/obj1ext-1.0',
'http://custom/obj1ext-2.0'
]
})
def test_render_epp_login_request_alt(self):
self.assertRequest('''<?xml version="1.0" ?>
<epp xmlns="urn:ietf:params:xml:ns:epp-1.0">
<command>
<login>
<clID>ClientX</clID>
<pw>2fooBar</pw>
<newPW>bar-FOO2</newPW>
<options>
<version>4.2</version>
<lang>ua</lang>
</options>
<svcs>
<objURI>urn:ietf:params:xml:ns:obj1</objURI>
<objURI>urn:ietf:params:xml:ns:obj2</objURI>
<objURI>urn:ietf:params:xml:ns:obj3</objURI>
<svcExtension>
<extURI>http://custom/obj1ext-1.0</extURI>
<extURI>http://custom/obj1ext-2.0</extURI>
</svcExtension>
</svcs>
</login>
<clTRID>AA-00</clTRID>
</command>
</epp>''', {
'command': 'epp:login',
'login': 'ClientX',
'password': '2fooBar',
'newPassword': 'bar-FOO2',
'version': 4.2,
'lang': 'ua',
'objURIs': [
'urn:ietf:params:xml:ns:obj1',
'urn:ietf:params:xml:ns:obj2',
'urn:ietf:params:xml:ns:obj3'
],
'extURIs': [
'http://custom/obj1ext-1.0',
'http://custom/obj1ext-2.0'
]
})
if __name__ == '__main__':
unittest.main()
| 31.142857
| 62
| 0.445973
| 395
| 3,924
| 4.374684
| 0.159494
| 0.085069
| 0.157986
| 0.194444
| 0.895833
| 0.895833
| 0.847222
| 0.847222
| 0.847222
| 0.847222
| 0
| 0.033292
| 0.387615
| 3,924
| 125
| 63
| 31.392
| 0.685809
| 0.005097
| 0
| 0.862069
| 0
| 0
| 0.694338
| 0.239303
| 0
| 0
| 0
| 0
| 0.025862
| 1
| 0.025862
| false
| 0.017241
| 0.017241
| 0
| 0.051724
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b28dd03cf88c86defe6afc263d1a5848ef142769
| 436,491
|
py
|
Python
|
pirates/inventory/ItemData.py
|
ksmit799/POTCO-PS
|
520d38935ae8df4b452c733a82c94dddac01e275
|
[
"Apache-2.0"
] | 8
|
2017-01-24T04:33:29.000Z
|
2020-11-01T08:36:24.000Z
|
pirates/inventory/ItemData.py
|
ksmit799/Pirates-Online-Remake
|
520d38935ae8df4b452c733a82c94dddac01e275
|
[
"Apache-2.0"
] | 1
|
2017-03-02T18:05:17.000Z
|
2017-03-14T06:47:10.000Z
|
pirates/inventory/ItemData.py
|
ksmit799/Pirates-Online-Remake
|
520d38935ae8df4b452c733a82c94dddac01e275
|
[
"Apache-2.0"
] | 11
|
2017-03-02T18:46:07.000Z
|
2020-11-01T08:36:26.000Z
|
from panda3d.core import LVector3f
itemInfo = {1: [51, 2, 0, 1, u'Rusty Cutlass', u'RUSTY_CUTLASS', 1, 1, 1, 1, 1, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_a', u"It's a bit crude, but it still has an edge.", 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_hnd_swd_cutlass_a', 1, 0, 6, 0, 0, 0, 1, 0, 0, 3],
2: [51, 2, 200, 2, u'Iron Cutlass', u'IRON_CUTLASS', 1, 1, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_b', u'A well crafted iron blade. A good weapon!', 0, 0, 0, '', 1, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_b', 1, 5, 9, 0, 0, 0, 2, 0, 0, 5.5],
3: [51, 2, 1000, 3, u'Steel Cutlass', u'STEEL_CUTLASS', 1, 1, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_c', u'An ornate steel cutlass. Well balanced and sharp!', 5, 0, 0, '', 2, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_c', 1, 10, 12, 0, 0, 0, 3, 0, 0, 8],
4: [51, 2, 5000, 4, u'Fine Cutlass', u'FINE_CUTLASS', 1, 1, 0, 0, 1, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_d', u"A Fine Cutlass. It is crafted with pride by the Caribbean's best blacksmiths.", 10, 0, 0, '', 3, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_d', 1, 15, 16, 0, 0, 0, 4, 0, 0, 11],
5: [51, 2, 10000, 5, u'Pirate Blade', u'PIRATE_BLADE', 1, 1, 0, 0, 1, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_c', u'A Pirate Blade, a clear warning sign to any EITC or Navy blokes.', 15, 0, 0, '', 4, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 1, 20, 22, 0, 0, 0, 5, 0, 0, 15],
7: [51, 2, 30, 7, u'Worn Cutlass', u'WORN_CUTLASS', 1, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_hnd_swd_cutlass_a', 1, 0, 7, 0, 0, 0, 1, 0, 0, 3.5],
8: [51, 2, 60, 8, u"Swabbie's Cutlass", u'SWABBIE_CUTLASS', 1, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12100, 0, 0, 0, 0, 0, u'pir_m_hnd_swd_cutlass_b', 1, 2, 11, 0, 0, 0, 1, 0, 0, 6.5],
9: [51, 2, 150, 9, u"Deck-hand's Cutlass", u'DECKHAND_CUTLASS', 1, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_b', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_c', 1, 4, 15, 0, 1, 3509, 2, 0, 0, 11.5],
10: [51, 2, 150, 10, u"Cabin Boy's Cutlass", u'CABIN_BOY_CUTLASS', 1, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_a', 1, 3, 13, 0, 1, 3505, 1, 0, 0, 11.5],
11: [51, 2, 50, 11, u'Light Cutlass', u'LIGHT_CUTLASS', 1, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_c', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_c', 1, 2, 11, 0, 0, 0, 3, 0, 0, 5.5],
12: [51, 2, 90, 12, u'Heavy Cutlass', u'HEAVY_CUTLASS', 1, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_b', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12107, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_b', 1, 3, 13, 0, 0, 0, 2, 0, 0, 8.5],
13: [51, 2, 60, 13, u"Sailor's Cutlass", u'SAILOR_CUTLASS', 1, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_b', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12101, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_a', 1, 1, 9, 0, 0, 0, 2, 0, 0, 6.5],
14: [51, 2, 760, 14, u'Boarding Cutlass', u'BOARDING_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_c', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12102, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_c', 1, 6, 22, 0, 2, 3509, 3, 0, 0, 22],
15: [51, 2, 2560, 15, u'War Cutlass', u'WAR_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_d', 0, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12103, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_d', 1, 12, 39, 0, 3, 3509, 4, 0, 0, 35.5],
16: [51, 2, 210, 16, u'Sharp Cutlass', u'SHARP_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_c', 0, 0, 0, 0, '', 1, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_c', 1, 3, 16, 0, 0, 0, 3, 0, 0, 11],
17: [51, 2, 1050, 17, u'Tempered Cutlass', u'TEMPERED_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_c', 0, 2, 0, 0, '', 2, 100, 0, 0, 0, 0, 1, 12107, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_c', 1, 7, 29, 0, 0, 0, 3, 0, 0, 22.5],
18: [51, 2, 2020, 18, u'Engraved Cutlass', u'ENGRAVED_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_c', 0, 6, 0, 0, '', 2, 100, 0, 0, 0, 0, 2, 12107, 1, 12102, 0, 0, 1, u'pir_m_hnd_swd_cutlass_c', 1, 11, 37, 0, 0, 0, 3, 0, 0, 31.5],
19: [51, 2, 5870, 19, u'Bejeweled Cutlass', u'BEJEWELED_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_c', 0, 10, 0, 0, '', 3, 100, 0, 0, 0, 0, 2, 12107, 2, 12102, 0, 0, 1, u'pir_m_hnd_swd_cutlass_c', 1, 15, 50, 0, 0, 0, 3, 0, 0, 44],
20: [51, 2, 8490, 20, u'Masterwork Cutlass', u'MASTERWORK_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_c', 0, 14, 0, 0, '', 3, 100, 0, 0, 0, 0, 3, 12107, 3, 12102, 0, 0, 1, u'pir_m_hnd_swd_cutlass_c', 1, 19, 58, 0, 0, 0, 3, 0, 0, 53],
21: [51, 2, 570, 21, u"Navy Sergeant's Cutlass", u'NAVY_SERGEANT_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_c', 0, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_c', 1, 9, 28, 0, 1, 2239, 3, 0, 0, 19],
22: [51, 2, 2680, 22, u"EITC Mercenary's Cutlass", u'EITC_MERVENARY_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12105, 1, 12102, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 20, 50, 0, 2, 2239, 1, 0, 0, 42],
23: [51, 2, 100, 23, u"Bandit's Cutlass", u'BANDIT_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_a', 1, 2, 14, 0, 0, 0, 1, 0, 0, 7],
24: [51, 2, 410, 24, u"Pirate's Cutlass", u'PIRATE_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_b', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12102, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_b', 1, 4, 18, 0, 1, 3509, 1, 0, 0, 16],
25: [51, 2, 2080, 25, u"Slasher's Cutlass", u'SLASHER_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12102, 1, 12101, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 14, 38, 0, 2, 2239, 2, 0, 0, 37],
26: [51, 2, 570, 26, u'Poisoned Cutlass', u'POISONED_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 1, 0, 0, '', 1, 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 6, 22, 0, 1, 3505, 1, 0, 0, 19],
27: [51, 2, 2850, 27, u'Venomed Cutlass', u'VENOMED_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 7, 0, 0, '', 2, 101, 1, 200, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 12, 39, 0, 2, 3505, 1, 0, 0, 37.5],
28: [51, 2, 10150, 28, u"Assassin's Cutlass", u'ASSASSIN_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 13, 0, 0, '', 3, 101, 1, 200, 1, 201, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 18, 56, 0, 3, 3505, 1, 0, 0, 58],
29: [51, 2, 520, 29, u'Monkey Cutlass', u'MONKEY_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_b', 0, 0, 0, 0, '', 1, 121, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_b', 1, 5, 20, 0, 1, 3508, 2, 0, 0, 18],
30: [51, 2, 3160, 30, u'Baboon Cutlass', u'BABOON_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_b', 0, 7, 0, 0, '', 2, 121, 0, 0, 0, 0, 1, 12108, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_b', 1, 12, 39, 0, 2, 3508, 2, 0, 0, 39.5],
31: [51, 2, 6420, 31, u'Orangutan Cutlass', u'ORANGUTAN_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_b', 0, 14, 0, 0, '', 3, 121, 0, 0, 0, 0, 2, 12108, 1, 12109, 0, 0, 1, u'pir_m_hnd_swd_cutlass_b', 1, 19, 53, 0, 2, 3508, 2, 0, 0, 56.5],
32: [51, 2, 16490, 32, u'Gorilla Cutlass', u'GORILLA_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_b', 0, 21, 0, 0, '', 3, 121, 0, 0, 0, 0, 2, 12108, 2, 12109, 0, 0, 1, u'pir_m_hnd_swd_cutlass_b', 1, 26, 72, 0, 3, 3508, 2, 0, 0, 74],
33: [51, 2, 760, 33, u'Voodoo Hunter Cutlass', u'VOODOO_HUNTER_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_f', 0, 5, 0, 0, '', 1, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_f', 1, 10, 30, 0, 1, 3509, 5, 0, 0, 22],
34: [51, 2, 3000, 34, u'Witch Hunter Cutlass', u'WITCH_HUNTER_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_f', 0, 12, 0, 0, '', 2, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_f', 1, 17, 49, 0, 2, 3509, 5, 0, 0, 38.5],
35: [51, 2, 9140, 35, u'Cutlass of the Inquisition', u'CUTLASS_OF_THE_INQUISITION', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_f', 0, 19, 0, 0, '', 3, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_f', 1, 24, 68, 0, 3, 3509, 5, 0, 0, 55],
36: [51, 2, 520, 36, u'Silver Cutlass', u'SILVER_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_f', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_f', 1, 7, 24, 0, 1, 3500, 5, 0, 0, 18],
37: [51, 2, 2700, 37, u'Holy Cutlass', u'HOLY_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_f', 0, 8, 0, 0, '', 1, 209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_f', 1, 13, 41, 0, 2, 3500, 5, 0, 0, 36.5],
38: [51, 2, 9140, 38, u'Sacred Cutlass', u'SACRED_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_f', 0, 14, 0, 0, '', 1, 209, 0, 0, 0, 0, 1, 12103, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_f', 1, 19, 58, 0, 3, 3500, 5, 0, 0, 55],
39: [51, 2, 12740, 39, u'Divine Cutlass', u'DIVINE_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_f', 0, 20, 0, 0, '', 1, 209, 0, 0, 0, 0, 2, 12103, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_f', 1, 25, 70, 0, 3, 3500, 5, 0, 0, 65],
40: [51, 2, 760, 40, u"Brute's Cutlass", u'BRUTE_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 3, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12108, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 8, 26, 0, 1, 2239, 1, 0, 0, 22],
41: [51, 2, 4360, 41, u"Brawler's Cutlass", u'BRAWLER_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 11, 0, 0, '', 1, 205, 0, 0, 0, 0, 2, 12108, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 16, 47, 0, 2, 2239, 1, 0, 0, 46.5],
42: [51, 2, 15180, 42, u"Bruiser's Cutlass", u'BRUISER_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 19, 0, 0, '', 2, 205, 0, 0, 0, 0, 3, 12108, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 24, 68, 0, 3, 2239, 1, 0, 0, 71],
43: [51, 1, 2420, 43, u'Dark Cutlass', u'DARK_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_g', 0, 10, 0, 0, '', 1, 204, 1, 206, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_g', 1, 15, 45, 0, 1, 3501, 6, 0, 0, 34.5],
44: [51, 1, 10150, 44, u'Shadow Cutlass', u'SHADOW_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_g', 0, 19, 0, 0, '', 2, 204, 2, 206, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_g', 1, 24, 68, 0, 2, 3501, 6, 0, 0, 58],
45: [51, 1, 16490, 45, u'Forbidden Cutlass', u'FORBIDDEN_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_g', 0, 23, 0, 0, '', 3, 204, 3, 206, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_g', 1, 28, 76, 0, 3, 3501, 6, 0, 0, 74],
46: [51, 2, 90, 46, u"Mariner's Cutlass", u'MARINER_CUTLASS', 1, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_b', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_b', 1, 5, 17, 0, 0, 0, 2, 0, 0, 8.5],
47: [51, 2, 820, 47, u"Quarter Master's Cutlass", u'QUARTER_MASTER_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_b', 0, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_b', 1, 9, 28, 0, 1, 3504, 2, 0, 0, 23],
48: [51, 2, 1210, 48, u"First Mate's Cutlass", u'FIRST_MATE_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_d', 0, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12101, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_d', 1, 12, 34, 0, 1, 3504, 4, 0, 0, 28],
49: [51, 2, 3820, 49, u"Lieutenant's Cutlass", u'LIEUTENANT_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_d', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12102, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_d', 1, 15, 45, 0, 2, 3504, 4, 0, 0, 43.5],
50: [51, 2, 5550, 50, u"Commander's Cutlass", u'COMMANDER_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_d', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12102, 1, 12106, 0, 0, 1, u'pir_m_hnd_swd_cutlass_d', 1, 18, 51, 0, 2, 3504, 4, 0, 0, 52.5],
51: [51, 2, 7850, 51, u"Captain's Cutlass", u'CAPTAIN_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_d', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12103, 1, 12106, 0, 0, 1, u'pir_m_hnd_swd_cutlass_d', 1, 21, 57, 0, 3, 3504, 4, 0, 0, 62.5],
52: [51, 2, 16940, 52, u"Commodore's Cutlass", u'COMMODORE_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_d', 0, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12103, 2, 12106, 0, 0, 1, u'pir_m_hnd_swd_cutlass_d', 1, 24, 68, 0, 3, 3504, 4, 0, 0, 75],
53: [51, 2, 23820, 53, u"Vice Admiral's Cutlass", u'VICE_ADMIRAL_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_d', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12104, 2, 12106, 0, 0, 1, u'pir_m_hnd_swd_cutlass_d', 1, 27, 74, 0, 4, 3504, 4, 0, 0, 89],
54: [51, 2, 30060, 54, u"Admiral's Cutlass", u'ADMIRAL_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_d', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12104, 3, 12106, 0, 0, 1, u'pir_m_hnd_swd_cutlass_d', 1, 30, 80, 0, 4, 3504, 4, 0, 0, 100],
55: [51, 2, 2280, 55, u'Shark Blade', u'SHARK_BLADE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 7, 0, 0, '', 1, 100, 0, 0, 0, 0, 1, 12106, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 12, 39, 0, 1, 2244, 6, 0, 0, 33.5],
56: [51, 2, 10500, 56, u'Tiger Shark Blade', u'TIGER_SHARK_BLADE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 16, 0, 0, '', 2, 100, 0, 0, 0, 0, 2, 12106, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 21, 62, 0, 2, 2244, 6, 0, 0, 59],
57: [51, 2, 20230, 57, u'Black Shark Blade', u'BLACK_SHARK_BLADE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 25, 0, 0, '', 3, 100, 0, 0, 0, 0, 3, 12106, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 30, 80, 0, 3, 2244, 6, 0, 0, 82],
58: [51, 1, 3160, 58, u'Bane Blade Cutlass', u'BANE_BLADE_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_g', 0, 13, 0, 0, '', 1, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_g', 1, 18, 51, 0, 1, 2333, 6, 0, 0, 39.5],
59: [51, 1, 11590, 59, u'Bane Fire Cutlass', u'BANE_FIRE_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_g', 0, 19, 0, 0, '', 2, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_g', 1, 24, 68, 0, 2, 2333, 6, 0, 0, 62],
60: [51, 1, 19740, 60, u'Bane Curse Cutlass', u'BANE_CURSE_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_g', 0, 24, 0, 0, '', 3, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_g', 1, 29, 78, 0, 3, 2333, 6, 0, 0, 81],
61: [51, 2, 820, 61, u"Sea Dog's Cutlass", u'SEA_DOG_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_i', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_i', 1, 10, 30, 0, 1, 2244, 4, 0, 0, 23],
62: [51, 2, 2080, 62, u"Swashbuckler's Cutlass", u'SWASHBUCKLER_CUTLASS', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_i', 0, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12105, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_i', 1, 14, 38, 0, 2, 2244, 4, 0, 0, 37],
63: [51, 2, 4550, 63, u"Buccaneer's Cutlass", u'BUCCANEER_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_i', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12105, 1, 12108, 0, 0, 1, u'pir_m_hnd_swd_cutlass_i', 1, 18, 51, 0, 2, 2244, 4, 0, 0, 47.5],
64: [51, 2, 8620, 64, u"Privateer's Cutlass", u'PRIVATEER_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_i', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12105, 2, 12108, 0, 0, 1, u'pir_m_hnd_swd_cutlass_i', 1, 22, 59, 0, 3, 2244, 4, 0, 0, 65.5],
65: [51, 2, 17850, 65, u"Corsair's Cutlass", u'CORSAIR_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_i', 0, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12105, 2, 12108, 1, 12106, 1, u'pir_m_hnd_swd_cutlass_i', 1, 26, 72, 0, 3, 2244, 4, 0, 0, 77],
66: [51, 2, 23290, 66, u'Seven Seas Cutlass', u'SEVEN_SEAS_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_i', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12105, 3, 12108, 2, 12106, 1, u'pir_m_hnd_swd_cutlass_i', 1, 30, 80, 0, 3, 2244, 4, 0, 0, 88],
67: [51, 2, 5870, 67, u'Bloodfire Cutlass', u'BLOODFIRE_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_h', 0, 17, 0, 0, '', 1, 122, 1, 203, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_h', 1, 22, 64, 0, 0, 0, 1, 0, 0, 44],
68: [51, 1, 10860, 68, u'Crimsonfire Cutlass', u'CRIMSONFIRE_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_h', 0, 21, 0, 0, '', 2, 122, 2, 203, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_h', 1, 26, 72, 0, 0, 0, 1, 0, 0, 60],
69: [51, 1, 17390, 69, u'Emberfire Cutlass', u'EMBERFIRE_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_h', 0, 25, 0, 0, '', 3, 122, 3, 203, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_h', 1, 30, 80, 0, 0, 0, 1, 0, 0, 76],
70: [51, 1, 1550, 70, u'Life Stealer Cutlass', u'LIFE_STEALER_CUTLASS', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 5, 0, 0, '', 1, 160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 10, 35, 0, 1, 2239, 1, 0, 0, 27.5],
71: [51, 1, 6410, 71, u'Spirit Stealer Cutlass', u'SPIRIT_STEALER_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 11, 0, 0, '', 2, 160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 16, 52, 0, 2, 2239, 1, 0, 0, 46],
72: [51, 1, 13130, 72, u'Soul Stealer Cutlass', u'SOUL_STEALER_CUTLASS', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 21, 0, 0, '', 3, 160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 26, 72, 0, 3, 2239, 1, 0, 0, 66],
77: [51, 0, 112600, 77, u'Heart of Padres del Fuego', u'HEART_OF_PADRES_DEL_FUEGO', 5, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_h', 0, 25, 0, 0, '', 3, 122, 0, 0, 0, 0, 3, 12110, 3, 12104, 0, 0, 1, u'pir_m_hnd_swd_cutlass_h', 1, 30, 90, 0, 5, 2333, 4, 0, 0, 150],
78: [51, 0, 66230, 78, u'Lost Sword of El Dorado', u'LOST_SWORD_OF_EL_DORADO', 5, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_k', 0, 25, 0, 0, '', 3, 122, 0, 0, 0, 0, 3, 12107, 2, 12110, 0, 0, 1, u'pir_m_hnd_swd_cutlass_k', 1, 30, 90, 0, 3, 2244, 4, 0, 0, 115],
79: [51, 0, 41510, 79, u'Lost Blade of Calypso', u'LOST_BLADE_OF_CALYPSO', 5, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_g', 0, 15, 0, 0, '', 3, 160, 3, 161, 0, 0, 2, 12103, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_g', 1, 20, 70, 0, 3, 3501, 6, 0, 0, 91],
80: [51, 0, 49110, 80, u"Jack Sparrow's Blade", u'JACK_SPARROW_BLADE', 5, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_h', 0, 25, 0, 0, '', 3, 100, 0, 0, 0, 0, 3, 12108, 3, 12106, 0, 0, 1, u'pir_m_hnd_swd_cutlass_h', 1, 30, 90, 0, 3, 2244, 4, 0, 0, 99],
81: [51, 0, 23910, 81, u'Sword of Quetzalcoatl', u'SWORD_OF_QUETZALCOATL', 5, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_j', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_j', 1, 30, 90, 0, 3, 2244, 4, 0, 0, 69],
82: [51, 0, 20580, 82, u"Montezuma's Blade", u'MONTEZUMA_BLADE', 5, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_i', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_i', 1, 25, 80, 0, 3, 2244, 4, 0, 0, 64],
83: [51, 1, 690, 83, u'Spectral Cutlass', u'SPECTRAL_CUTLASS', 3, 1, 0, 0, 0, 1, 0, 0, 1, u'pir_t_ico_swd_cutlass_g', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_g', 1, 0, 24, 0, 1, 3501, 6, 0, 0, 18],
84: [51, 2, 840, 84, u'Darkfire Cutlass', u'DARKFIRE_CUTLASS', 3, 1, 0, 0, 0, 1, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 0, 22, 0, 1, 2333, 4, 0, 0, 20],
85: [51, 2, 690, 85, u"Conquistador's Cutlass", u'CONQUISTADOR_CUTLASS', 3, 1, 0, 0, 0, 1, 0, 0, 1, u'pir_t_ico_swd_cutlass_i', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_i', 1, 0, 20, 0, 1, 2244, 4, 0, 0, 18],
86: [51, 2, 43350, 86, u'Lost Sword of El Patron', u'LOST_SWORD_OF_EL_PATRON', 5, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_k', 0, 22, 0, 0, '', 3, 161, 0, 0, 0, 0, 3, 12107, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_k', 1, 27, 84, 0, 3, 3510, 4, 0, 0, 93],
87: [51, 2, 220, 87, u'Short Cutlass', u'SHORT_CUTLASS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_a', u'A shorter cutlass meant for close quarters.', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_a', 1, 3, 16, 0, 0, 0, 2, 0, 0, 8],
88: [51, 2, 480, 88, u'Voodoo Cutlass', u'VOODOO_CUTLASS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_b', u'Enchanted with a voodoo hex that heals the user.', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_b', 1, 5, 20, 0, 1, 3509, 2, 0, 0, 14],
89: [51, 2, 430, 89, u'Battle Cutlass', u'BATTLE_CUTLASS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_c', u'A powerful fighting weapon.', 3, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_c', 1, 8, 26, 0, 0, 0, 2, 0, 0, 13],
90: [51, 2, 680, 90, u'Ornate Cutlass', u'ORNATE_CUTLASS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_d', u'A strong cutlass with a golden hilt.', 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_d', 1, 13, 36, 0, 0, 0, 2, 0, 0, 18],
91: [51, 2, 990, 91, u'Grand Cutlass', u'GRAND_CUTLASS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_e', u'A long cutlass with a mighty swing.', 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_e', 1, 18, 46, 0, 0, 0, 2, 0, 0, 23],
92: [51, 2, 1340, 92, u'Royal Cutlass', u'ROYAL_CUTLASS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_f', u'A mighty cutlass used by the Royal Guard.', 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_cutlass_f', 1, 23, 56, 0, 0, 0, 2, 0, 0, 28],
93: [51, 2, 0, 93, u'Dual Rusty Cutlasses', u'DUAL_RUSTY_CUTLASS', 1, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_a', u"It's a bit crude, but it still has an edge.", 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_hnd_swd_cutlass_a', 32, 0, 6, 0, 0, 0, 1, 0, 0, 3],
24700: [52, 0, 1500, 24700, u'Capris_French_Assassin', u'FRENCH_ASSASSIN_CAPRIS', 3, 4, 0, 0, 0, 0, 0, 0, 8, u'pir_t_ico_pnt_f_short_pant', 43, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3908, -1, 0, -1, 17, 0],
24701: [52, 0, 1500, 24701, u'Capris_Baroness', u'BARONESS_CAPRIS', 3, 4, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_f_short_pant', 45, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3912, -1, 0, -1, 18, 0],
24702: [52, 0, 1500, 24702, u'Capris_China_Warrior', u'CHINA_WARRIOR_CAPRIS', 3, 4, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_f_short_pant', 53, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3921, -1, 0, -1, 19, 0],
24703: [52, 0, 700, 24703, u'Capris_Diplomat', u'DIPLOMAT_CAPRIS', 2, 4, 0, 1, 0, 0, 0, 0, 7, u'pir_t_ico_pnt_f_short_pant', 62, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3919, -1, 0, -1, 20, 0],
24704: [52, 0, 1500, 24704, u'Capris_Rogue_Privateer', u'ROGUE_PRIVATEER_CAPRIS', 3, 4, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_f_short_pant', 47, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3910, -1, 0, -1, 21, 0],
24705: [52, 0, 1500, 24705, u'Capris_Scourge', u'SCOURGE_CAPRIS', 3, 4, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_f_short_pant', 51, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3905, -1, 0, -1, 22, 0],
24706: [52, 0, 1500, 24706, u'Capris_Sea_Serpent', u'SEA_SERPENT_CAPRIS', 3, 4, 0, 0, 0, 0, 0, 0, 16, u'pir_t_ico_pnt_f_short_pant', 49, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3917, -1, 0, -1, 23, 0],
24707: [52, 0, 700, 24707, u'Capris_Zombie_Pirate', u'ZOMBIE_PIRATE_CAPRIS', 2, 4, 0, 1, 0, 0, 0, 0, 31, u'pir_t_ico_pnt_f_short_pant', 52, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, -1, 0, -1, 24, 0],
24708: [52, 0, 700, 24708, u'Capris_Zombies_Pirate', u'ZOMBIES_PIRATE_CAPRIS', 2, 4, 0, 1, 0, 0, 0, 0, 31, u'pir_t_ico_pnt_f_short_pant', 52, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3913, -1, 0, -1, 24, 0],
24601: [52, 2, 700, 24601, u'Skirt_Tan', u'LINEN_SKIRT', 1, 4, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_pnt_f_skirt', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 0, 1],
24602: [52, 2, 300, 24602, u'Skirt_Patchwork', u'HAND_ME_DOWN_SKIRT', 1, 4, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_pnt_f_skirt', 7, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 1, 0],
24603: [52, 2, 700, 24603, u'Skirt_Layered', u'LAYERED_SKIRT', 2, 4, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_pnt_f_skirt', 2, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 2, 0],
24604: [52, 2, 300, 24604, u'Skirt_LeatherTrim', u'POTATO_SACK_SKIRT', 1, 4, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_f_skirt', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 3, 1],
24605: [52, 2, 700, 24605, u'Skirt_Slip', u'SILK_SKIRT', 2, 4, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_pnt_f_skirt', 3, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 4, 1],
24606: [52, 2, 300, 24606, u'Skirt_Plain', u'DENIM_SKIRT', 1, 4, 1, 1, 0, 0, 0, 0, 24, u'pir_t_ico_pnt_f_skirt', 2, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 5, 0],
24607: [52, 2, 700, 24607, u'Skirt_Print', u'WOODLAND_SKIRT', 2, 4, 1, 1, 0, 0, 0, 0, 17, u'pir_t_ico_pnt_f_skirt', 9, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 6, 0],
24608: [52, 2, 1500, 24608, u'Skirt_Red', u'CANDYBOX_SKIRT', 3, 4, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_pnt_f_skirt', 35, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 7, 0],
24609: [52, 1, 700, 24609, u'Skirt_Brown', u'OBLIQUE_SKIRT', 2, 4, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_pnt_f_skirt', 2, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 8, 0],
24610: [52, 0, 700, 24610, u'Skirt_Green', u'PEA_SOUP_SKIRT', 2, 4, 1, 1, 0, 0, 0, 0, 41, u'pir_t_ico_pnt_f_skirt', 3, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 9, 0],
24611: [52, 0, 700, 24611, u'Skirt_LightBlue', u'TROPIC_SKIRT', 2, 4, 1, 1, 0, 0, 0, 0, 33, u'pir_t_ico_pnt_f_skirt', 35, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 10, 0],
24612: [52, 0, 1500, 24612, u'Skirt_Pink', u'PRINCESS_SKIRT', 3, 4, 1, 1, 0, 0, 0, 0, 34, u'pir_t_ico_pnt_f_skirt', 3, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 11, 0],
24613: [52, 1, 700, 24613, u'Skirt_Red_WhiteBelt', u'DECK_SKIRT', 2, 4, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_f_skirt', 1, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3922, -1, 2, -1, 12, 0],
24801: [52, 2, 400, 24801, u'Skirt_Gypsy', u'GYPSY_SKIRT', 3, 4, 0, 0, 0, 0, 0, 1, 7, u'pir_t_ico_pnt_f_skirt', 42, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 0, 0],
24802: [52, 2, 400, 24802, u'Skirt_Gypsy_Grunge', u'GYPSY_GRUNGE_SKIRT', 3, 4, 0, 0, 0, 0, 0, 1, 7, u'pir_t_ico_pnt_f_skirt', 42, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 1, 0],
24614: [52, 1, 1500, 24614, u'Skirt_Yellow', u'JEWELED_SKIRT', 3, 4, 0, 1, 0, 0, 0, 0, 19, u'pir_t_ico_pnt_f_skirt', 26, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3902, -1, 2, -1, 13, 0],
24615: [52, 1, 1500, 24615, u'Green_Purple_Skirt', u'GREEN_PURPLE_SKIRT', 3, 4, 0, 1, 0, 0, 0, 0, 42, u'pir_t_ico_pnt_f_skirt', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3915, -1, 2, -1, 14, 0],
24812: [52, 2, 400, 24812, u'Skirt_Shopkeeper', u'SHOPKEEPER_SKIRT', 3, 4, 0, 0, 0, 0, 0, 1, 7, u'pir_t_ico_pnt_f_skirt', 42, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 1, 0],
24616: [52, 1, 1500, 24616, u'Green_Embroidery_Skirt', u'GREEN_EMBROIDERY_SKIRT', 3, 4, 0, 1, 0, 0, 0, 0, 44, u'pir_t_ico_pnt_f_skirt', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3903, -1, 2, -1, 15, 0],
24617: [52, 1, 1500, 24617, u'Saint_Patricks_Skirt', u'SAINT_PATRICKS_SKIRT', 3, 4, 0, 1, 0, 0, 0, 0, 41, u'pir_t_ico_pnt_f_skirt', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, -1, 2, -1, 16, 0],
24618: [52, 1, 1500, 24618, u'Xmas_Skirt', u'XMAS_SKIRT', 3, 4, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_pnt_f_skirt', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, -1, 2, -1, 17, 0],
24619: [52, 1, 1500, 24619, u'Valentines_Skirt', u'VALENTINES_SKIRT', 3, 4, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_pnt_f_skirt', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, -1, 2, -1, 18, 0],
24620: [52, 0, 1500, 24620, u'Peacock_Skirt', u'PEACOCK_SKIRT', 3, 4, 0, 1, 0, 0, 0, 0, 5, u'pir_t_ico_pnt_f_skirt', 50, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 19, 0],
24621: [52, 0, 1500, 24621, u'Prince_Skirt', u'PRINCE_SKIRT', 3, 4, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_f_skirt', 63, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 2, -1, 20, 0],
24622: [52, 0, 1500, 24622, u'Wildfire_Skirt', u'WILDFIRE_SKIRT', 3, 4, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_pnt_f_skirt', 54, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 2, -1, 21, 0],
8501: [51, 2, 0, 8501, u'Monster Weapon', u'MONSTER_WEAPON', 1, 13, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_cutlass_a', u"It's a bit crude, but it still has an edge.", u'#N/A', 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_hnd_swd_cutlass_a', 33, u'#N/A', 6, 0, 0, 0, 1, 0, 0, 3],
24901: [52, 2, 400, 24901, u'Pants_Navy', u'NAVY_PANTS', 2, 4, 0, 0, 0, 0, 0, 1, 23, u'pir_t_ico_pnt_m_long_tucked', 42, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 5, 0, 0, 0],
24902: [52, 2, 400, 24902, u'Pants_EITC', u'EITC_PANTS', 2, 4, 0, 0, 0, 0, 0, 1, 32, u'pir_t_ico_pnt_m_long_tucked', 42, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, -1, 0, -1, 0],
1425: [51, 2, 8010, 1425, u'Blightfang Edge', u'BLIGHTFANG_EDGE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_f', 0, 31, 0, 0, u'rc.le.10lootTreasure', 3, 101, 2, 100, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_f', 38, 30, 95, 0, 3, 2333, 6, 0, 0, 89.5],
1426: [51, 2, 8560, 1426, u'Behemoth Blade', u'BEHEMOTH_BLADE', 5, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_h', 0, 29, 0, 0, u'rc.le.10lootTreasure', 3, 121, 0, 0, 0, 0, 5, 12106, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_h', 38, 28, 101, 0, 3, 3501, 6, 0, 0, 92.5],
1427: [51, 2, 10300, 1427, u'Thunderspine Sword', u'THUNDERSPINE_SWORD', 5, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_j', 0, 31, 0, 0, u'rc.le.10lootTreasure', 3, 100, 0, 0, 0, 0, 3, 12104, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_j', 5, 30, 95, 0, 3, 3703, 6, 0, 0, 101.5],
1428: [51, 2, 12660, 1428, u'Blade of the Abyss', u'BLADE_OF_THE_ABYSS', 5, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 30, 0, 0, u'rc.le.10lootTreasure', 3, 122, 0, 0, 0, 0, 3, 12107, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 5, 29, 93, 0, 3, 3701, 20, 0, 0, 112.5],
26005: [52, 1, 1800, 26005, u'Shoe_Tall_Boots_Fur_Top', u'PURSUIT_BOOTS', 3, 7, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_m_tall', 8, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 4, -1, 0],
26006: [52, 1, 1800, 26006, u'Shoe_Tall_Boots_Blue_Straps', u'INDOOR_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 44, u'pir_t_ico_sho_m_tall', 3, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3914, 1, -1, 5, -1, 0],
26007: [52, 0, 600, 26007, u'Shoe_Tall_Boots_Brown_Fur', u'MOUNTAIN_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_m_tall', 5, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 6, -1, 0],
26008: [52, 0, 600, 26008, u'Shoe_Tall_Boots_Brown_Laces', u'SERIOUS_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 31, u'pir_t_ico_sho_m_tall', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 7, -1, 0],
26009: [52, 2, 600, 26009, u'Shoe_Tall_Boots_Advanced_Outfit', u'ADVENTURE_BOOTS', 2, 7, 0, 0, 1, 0, 0, 0, 32, u'pir_t_ico_sho_m_tall', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 8, -1, 0],
8601: [51, 0, 1, 8601, u'Basic Fishing Rod', u'FISHING_ROD_1', 1, 14, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_bom_pouch', u'dum', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_hnd_tol_fishingPole', 34, u'#N/A', 1, 0, 0, 0, 0, 0, 0, 0.5],
8602: [51, 0, 1, 8602, u'Better Fishing Rod', u'FISHING_ROD_2', 1, 14, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_bom_pouch', u'doo', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_hnd_tol_fishingPoleMed', 34, u'#N/A', 1, 0, 0, 0, 0, 0, 0, 0.5],
8603: [51, 0, 1, 8603, u'Bestest Fishing Rod', u'FISHING_ROD_3', 1, 14, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_bom_pouch', u'daw', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_hnd_tol_fishingPoleLarge', 34, u'#N/A', 1, 0, 0, 0, 0, 0, 0, 0.5],
26010: [52, 2, 600, 26010, u'Shoe_Tall_Boots_Intermediate_Outfit', u'TRAVELERS_BOOTS', 2, 7, 0, 0, 1, 0, 0, 0, 7, u'pir_t_ico_sho_m_tall', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 9, -1, 0],
26011: [52, 1, 1800, 26011, u'Shoe_Tall_Boots_Royal', u'ROYAL_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_sho_m_tall', 8, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3904, 1, -1, 10, -1, 0],
25001: [52, 2, 150, 25001, u'Sash_Basic_Outfit', u'RECRUIT_SASH', 2, 5, 0, 0, 1, 0, 0, 0, 3, u'pir_t_ico_blt_m_sash', 1, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 15, 0, 0, 1],
26013: [52, 1, 1800, 26013, u'Shoe_Tall_Boots_Blue_Fur_Top', u'BLUE_FUR_TOP_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 47, u'pir_t_ico_sho_m_tall', 5, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3901, 1, -1, 12, -1, 0],
26014: [52, 1, 1800, 26014, u'Shoe_Tall_Boots_Spurs', u'SPUR_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 48, u'pir_t_ico_sho_m_tall', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3916, 1, -1, 13, -1, 0],
26015: [52, 1, 1800, 26015, u'Shoe_Tall_Boots_Saint_Patricks', u'SAINT_PATRICKS_TALL_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 38, u'pir_t_ico_sho_m_tall', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1, -1, 14, -1, 0],
26016: [52, 1, 1800, 26016, u'Shoe_Tall_Boots_Valentines', u'VALENTINES_TALL_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_m_tall', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 1, -1, 15, -1, 0],
26017: [52, 1, 1200, 26017, u'Shoe_Tall_Boots_Raven', u'RAVEN_TALL_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_m_tall', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3922, 1, -1, 8, -1, 0],
26018: [52, 0, 2500, 26018, u'Shoe_Tall_Boots_China_Warrior', u'CHINA_WARRIOR_TALL_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sho_m_tall', 53, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3921, 1, -1, 16, -1, 0],
26019: [52, 0, 2500, 26019, u'Shoe_Tall_Boots_Peacock', u'PEACOCK_TALL_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 25, u'pir_t_ico_sho_m_tall', 50, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 17, -1, 0],
26020: [52, 0, 2500, 26020, u'Shoe_Tall_Boots_Sea_Serpent', u'SEA_SERPENT_TALL_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 16, u'pir_t_ico_sho_m_tall', 49, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3917, 1, -1, 18, -1, 0],
25050: [52, 2, 150, 25050, u'Sash_Basic', u'BASIC_SASH', 1, 5, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_blt_m_sash', 31, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -1, 1],
25051: [52, 2, 300, 25051, u'Sash_Buckle', u'BUCKLED_SASH', 2, 5, 1, 1, 0, 0, 0, 0, 10, u'pir_t_ico_blt_m_sash', 1, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 0, -1, 0],
25052: [52, 1, 300, 25052, u'Sash_Gold_Tassle', u'FIERCE_SASH', 2, 5, 0, 1, 0, 0, 0, 0, 23, u'pir_t_ico_blt_m_sash', 3, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3904, 10, -1, 0, -1, 0],
25053: [52, 0, 300, 25053, u'Sash_Blue_Gold', u'TRIMMED_SASH', 2, 5, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_blt_m_sash', 1, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 0, -1, 0],
25054: [52, 0, 300, 25054, u'Sash_Pink', u'PINK_SASH', 2, 5, 1, 1, 0, 0, 0, 0, 34, u'pir_t_ico_blt_m_sash', 3, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, -1, 0, -1, 0],
25055: [52, 0, 300, 25055, u'Sash_Red', u'BLOOD_SASH', 2, 5, 1, 1, 0, 0, 0, 0, 3, u'pir_t_ico_blt_m_sash', 1, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, -1, 0, -1, 0],
25056: [52, 2, 300, 25056, u'Belt_Oval_Buckle_Oval', u'GOLD_SKULL_BELT', 2, 5, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_blt_m_oval', 1, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 0, -1, 0],
25057: [52, 2, 300, 25057, u'Belt_Oval_Buckle_Skull', u'JOLLY_BONES_BELT', 2, 5, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_blt_m_oval', 1, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -1, 0, -1, 0],
25058: [52, 2, 150, 25058, u'Belt_Oval_Buckle_Square', u'BOX_BELT', 1, 5, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_blt_m_square', 31, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, -1, 0, -1, 0],
25059: [52, 0, 300, 25059, u'Belt_Oval_Buckle_GoldSkull', u'TOP_SKULL_BELT', 2, 5, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_blt_m_oval', 8, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 0, -1, 0],
25060: [52, 0, 300, 25060, u'Belt_Oval_Buckle_OvalGold', u'PROUD_BELT', 2, 5, 1, 1, 0, 0, 0, 0, 9, u'pir_t_ico_blt_m_oval', 8, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 0, -1, 0],
25061: [52, 0, 300, 25061, u'Belt_Oval_Buckle_OvalGold2', u'ROUND_BELT', 2, 5, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_blt_m_oval', 1, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, -1, 0, -1, 0],
25062: [52, 0, 300, 25062, u'Belt_Square_Buckle_OvalGoldBrown', u'KNOCKOFF_PROUD__BELT', 2, 5, 1, 1, 0, 0, 0, 0, 9, u'pir_t_ico_blt_m_oval', 5, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, -1, 0, -1, 0],
25063: [52, 0, 300, 25063, u'Belt_Square_Buckle_OvalGoldBlack', u'TOP_SKULL_KNOCKOFF_BELT', 2, 5, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_blt_m_oval', 5, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, -1, 0, -1, 0],
25064: [52, 1, 300, 25064, u'Belt_Square_Black_Button', u'ENGRAVED_BOX_BELT', 2, 5, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_blt_m_square', 1, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3916, 15, -1, 0, -1, 0],
25065: [52, 1, 700, 25065, u'Belt_Square_Blue_Leather', u'INSPECTOR_BELT', 3, 5, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_blt_m_oval', 6, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3914, 16, -1, 0, -1, 0],
501: [51, 2, 70, 501, u'Rusty Sabre', u'RUSTY_SABRE', 1, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_a', u'A rusty sabre, able to make fast attacks!', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_a', 2, 3, 7, 0, 0, 0, 1, 0, 0, 3.5],
502: [51, 2, 90, 502, u'Light Sabre', u'LIGHT_SABRE', 1, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_a', u'Sabres are quick swords able to deal rapid fast attacks.', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_a', 2, 5, 10, 0, 0, 0, 1, 0, 0, 5],
503: [51, 2, 240, 503, u'Sharp Sabre', u'SHARP_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 1, 0, 0, '', 1, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 6, 14, 0, 0, 0, 2, 0, 0, 10],
504: [51, 2, 960, 504, u'Tempered Sabre', u'TEMPERED_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 5, 0, 0, '', 2, 100, 0, 0, 0, 0, 1, 12107, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 10, 25, 0, 0, 0, 2, 0, 0, 20.5],
505: [51, 2, 1740, 505, u'Engraved Sabre', u'ENGRAVED_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_d', 0, 9, 0, 0, '', 2, 100, 0, 0, 0, 0, 2, 12107, 1, 12102, 0, 0, 1, u'pir_m_hnd_swd_sabre_d', 2, 14, 31, 0, 0, 0, 2, 0, 0, 28.5],
506: [51, 2, 4980, 506, u'Bejeweled Sabre', u'BEJEWELED_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_d', 0, 13, 0, 0, '', 3, 100, 0, 0, 0, 0, 2, 12107, 2, 12102, 0, 0, 1, u'pir_m_hnd_swd_sabre_d', 2, 18, 42, 0, 0, 0, 2, 0, 0, 40],
507: [51, 2, 7090, 507, u'Masterwork Sabre', u'MASTERWORK_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_d', 0, 17, 0, 0, '', 3, 100, 0, 0, 0, 0, 3, 12107, 3, 12102, 0, 0, 1, u'pir_m_hnd_swd_sabre_d', 2, 22, 48, 0, 0, 0, 2, 0, 0, 48],
508: [51, 2, 580, 508, u"Navy Marine's Sabre", u'NAVY_MARINE_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12101, 2, 12100, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 7, 16, 0, 1, 3509, 3, 0, 0, 18],
509: [51, 2, 2090, 509, u"Navy Officer's Sabre", u'NAVY_OFFICER_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12102, 1, 12109, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 17, 31, 0, 2, 2238, 3, 0, 0, 36.5],
510: [51, 2, 750, 510, u'Boarding Sabre', u'BOARDING_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12102, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 10, 20, 0, 2, 3509, 3, 0, 0, 21],
511: [51, 2, 260, 511, u'Ornate Sabre', u'ORNATE_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_d', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12101, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_d', 2, 5, 13, 0, 0, 0, 3, 0, 0, 10.5],
512: [51, 2, 2860, 512, u'Royal Sabre', u'ROYAL_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_d', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12104, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_d', 2, 20, 40, 0, 3, 3509, 3, 0, 0, 37],
513: [51, 2, 820, 513, u"Fencer's Sabre", u'FENCER_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_c', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12105, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_c', 2, 15, 28, 0, 1, 2238, 3, 0, 0, 22],
514: [51, 2, 3160, 514, u"Duelist's Sabre", u'DUELIST_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_c', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12105, 1, 12404, 0, 0, 1, u'pir_m_hnd_swd_sabre_c', 2, 20, 40, 0, 2, 2238, 3, 0, 0, 39],
515: [51, 2, 4540, 515, u"Musketeer's Sabre", u'MUSKETEER_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_c', 0, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12105, 2, 12404, 0, 0, 1, u'pir_m_hnd_swd_sabre_c', 2, 24, 46, 0, 2, 2238, 3, 0, 0, 47],
516: [51, 2, 11530, 516, u"Master Fencer's Sabre", u'MASTER_FENCER_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_c', 0, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12105, 3, 12404, 0, 0, 1, u'pir_m_hnd_swd_sabre_c', 2, 28, 57, 0, 3, 2238, 3, 0, 0, 61.5],
517: [51, 2, 520, 517, u"Swordsman's Sabre", u'SWORDSMAN_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12105, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 10, 20, 0, 1, 2331, 2, 0, 0, 17],
518: [51, 2, 2640, 518, u"Sword Fighter's Sabre", u'SWORD_FIGHTER_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12105, 1, 12106, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 18, 37, 0, 2, 2331, 2, 0, 0, 35.5],
519: [51, 2, 8770, 519, u"Sword Master's Sabre", u'SWORD_MASTER_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12105, 2, 12106, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 25, 53, 0, 3, 2331, 2, 0, 0, 53.5],
520: [51, 2, 660, 520, u'Voodoo Hunter Sabre', u'VOODOO_HUNTER_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_c', 0, 8, 0, 0, '', 1, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_c', 2, 13, 25, 0, 1, 3509, 3, 0, 0, 19.5],
521: [51, 2, 2040, 521, u'Witch Hunter Sabre', u'WITCH_HUNTER_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_c', 0, 11, 0, 0, '', 2, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_c', 2, 16, 34, 0, 2, 3509, 3, 0, 0, 31],
522: [51, 2, 7090, 522, u'Sabre of the Inquisition', u'SABRE_OF_THE_INQUISITION', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_c', 0, 21, 0, 0, '', 3, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_c', 2, 26, 54, 0, 3, 3509, 3, 0, 0, 48],
523: [51, 2, 470, 523, u'Silver Sabre', u'SILVER_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 10, 20, 0, 1, 3500, 2, 0, 0, 16],
524: [51, 2, 2430, 524, u'Holy Sabre', u'HOLY_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 12, 0, 0, '', 1, 209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 17, 36, 0, 2, 3500, 2, 0, 0, 34],
525: [51, 2, 6260, 525, u'Sacred Sabre', u'SACRED_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 18, 0, 0, '', 1, 209, 0, 0, 0, 0, 1, 12103, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 23, 50, 0, 2, 3500, 2, 0, 0, 45],
526: [51, 2, 10800, 526, u'Divine Sabre', u'DIVINE_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 24, 0, 0, '', 1, 209, 0, 0, 0, 0, 2, 12103, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 29, 59, 0, 3, 3500, 2, 0, 0, 59.5],
527: [51, 2, 660, 527, u'Hawk Sabre', u'HAWK_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_d', 0, 4, 0, 0, '', 1, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_d', 2, 9, 19, 0, 1, 3502, 3, 0, 0, 19.5],
528: [51, 2, 2640, 528, u'Falcon Sabre', u'FALCON_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_d', 0, 10, 0, 0, '', 1, 100, 0, 0, 0, 0, 1, 12105, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_d', 2, 15, 33, 0, 2, 3502, 3, 0, 0, 35.5],
529: [51, 2, 4730, 529, u'Eagle Sabre', u'EAGLE_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_d', 0, 15, 0, 0, '', 2, 100, 0, 0, 0, 0, 2, 12105, 1, 12103, 0, 0, 1, u'pir_m_hnd_swd_sabre_d', 2, 20, 40, 0, 2, 3502, 3, 0, 0, 48],
530: [51, 2, 13050, 530, u'Great Hawk Sabre', u'GREAT_HAWK_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_d', 0, 20, 0, 0, '', 2, 100, 0, 0, 0, 0, 2, 12105, 2, 12103, 0, 0, 1, u'pir_m_hnd_swd_sabre_d', 2, 25, 53, 0, 3, 3502, 3, 0, 0, 65.5],
531: [51, 2, 16610, 531, u'Kingfisher Sabre', u'KINGFISHER_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_d', 0, 25, 0, 0, '', 3, 100, 0, 0, 0, 0, 3, 12105, 2, 12103, 0, 0, 1, u'pir_m_hnd_swd_sabre_d', 2, 30, 60, 0, 3, 3502, 3, 0, 0, 74],
532: [51, 2, 130, 532, u"Mariner's Saber", u'MARINER_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 3, 10, 0, 0, 0, 3, 0, 0, 5],
533: [51, 2, 720, 533, u"Quarter Master's Saber", u'QUARTER_MASTER_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 12, 23, 0, 1, 3504, 3, 0, 0, 20.5],
534: [51, 2, 950, 534, u"First Mate's Saber", u'FIRST_MATE_SABRE', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12101, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 14, 26, 0, 1, 3504, 3, 0, 0, 24],
535: [51, 2, 3010, 535, u"Lieutenant's Saber", u'LIEUTENANT_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12102, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 16, 34, 0, 2, 3504, 3, 0, 0, 38],
536: [51, 2, 4440, 536, u"Commander's Saber", u'COMMANDER_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12102, 1, 12106, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 19, 39, 0, 2, 3504, 3, 0, 0, 46.5],
537: [51, 2, 6170, 537, u"Captain's Saber", u'CAPTAIN_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12103, 1, 12106, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 21, 42, 0, 3, 3504, 3, 0, 0, 55],
538: [51, 2, 13450, 538, u"Commodore's Saber", u'COMMODORE_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12103, 2, 12106, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 24, 51, 0, 3, 3504, 3, 0, 0, 66.5],
539: [51, 2, 19380, 539, u"Vice Admiral's Saber", u'VICE_ADMIRAL_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12104, 2, 12106, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 27, 56, 0, 4, 3504, 3, 0, 0, 80],
540: [51, 2, 24480, 540, u"Admiral's Saber", u'ADMIRAL_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12104, 3, 12106, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 30, 60, 0, 4, 3504, 3, 0, 0, 90],
541: [51, 2, 4630, 541, u'Bloodfire Sabre', u'BLOODFIRE_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_c', 0, 20, 0, 0, '', 1, 122, 1, 203, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_c', 2, 25, 53, 0, 0, 0, 3, 0, 0, 38.5],
542: [51, 1, 8450, 542, u'Crimsonfire Sabre', u'CRIMSONFIRE_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_c', 0, 23, 0, 0, '', 2, 122, 2, 203, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_c', 2, 28, 57, 0, 0, 0, 3, 0, 0, 52.5],
543: [51, 1, 13250, 543, u'Emberfire Sabre', u'EMBERFIRE_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_c', 0, 25, 0, 0, '', 3, 122, 3, 203, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_c', 2, 30, 60, 0, 0, 0, 3, 0, 0, 66],
544: [51, 1, 2570, 544, u'Bane Blade Sabre', u'BANE_BLADE_SABRE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 16, 0, 0, '', 1, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 21, 42, 0, 1, 2333, 6, 0, 0, 35],
545: [51, 1, 9260, 545, u'Bane Fire Sabre', u'BANE_FIRE_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 21, 0, 0, '', 2, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 26, 54, 0, 2, 2333, 6, 0, 0, 55],
546: [51, 1, 15730, 546, u'Bane Curse Sabre', u'BANE_CURSE_SABRE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', 0, 25, 0, 0, '', 3, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 30, 60, 0, 3, 2333, 6, 0, 0, 72],
547: [51, 1, 10800, 547, u"LePorc's Sabre", u'LE_PORC_SABRE', 4, 1, 0, 0, 0, 0, 1, 0, 1, u'pir_t_ico_swd_sabre_e', 0, 12, 0, 0, '', 2, 100, 0, 0, 0, 0, 3, 12109, 3, 12105, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 17, 41, 0, 3, 2238, 3, 0, 0, 59.5],
551: [51, 2, 430, 551, u'Iron Sabre', u'IRON_SABRE', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_b', u'A plain sabre, able to deal fast rapid strikes!', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_b', 2, 5, 13, 0, 0, 0, 2, 0, 0, 6.5],
552: [51, 2, 700, 552, u'Steel Sabre', u'STEEL_SABRE', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_c', u'A plain steel sabre. Strong and fast.', 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_c', 2, 10, 20, 0, 0, 0, 2, 0, 0, 10],
553: [51, 2, 1040, 553, u'Fine Sabre', u'FINE_SABRE', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_d', u'A well-balanced and swift blade.', 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_d', 2, 15, 28, 0, 0, 0, 2, 0, 0, 14],
554: [51, 2, 1360, 554, u'War Sabre', u'WAR_SABRE', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', u'A powerful military sabre used in wars.', 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 20, 35, 0, 0, 0, 2, 0, 0, 17.5],
555: [51, 2, 1750, 555, u'Master Sabre', u'MASTER_SABRE', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_sabre_e', u'A masterful sabre used by the best fighters.', 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_sabre_e', 2, 25, 43, 0, 0, 0, 2, 0, 0, 21.5],
25200: [52, 0, 300, 25200, u'Sash_Prince', u'PRINCE_SASH', 3, 5, 0, 1, 0, 0, 0, 0, 40, u'pir_t_ico_blt_f_sash', 46, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 24, -1, 0, -1, 0],
25201: [52, 0, 300, 25201, u'Sash_Peacock_Male', u'PEACOCK_SASH_MALE', 3, 5, 0, 1, 0, 0, 0, 0, 25, u'pir_t_ico_blt_f_sash', 50, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, -1, 0, -1, 0],
10202: [56, 1, 1200, 10202, u'Golden Spyglass', u'GOLDEN_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_b', 0, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12656, 1, 12653, 0, 0, 1, u'pir_m_inv_can_spyglass_b', 29, 24, 0, 0, 0, 0, 20],
801: [51, 2, 360, 801, u'Worn Broadsword', u'WORN_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_a', u'Broadswords can hit multiple enemies in one slash.', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_a', 3, 5, 29, 0, 0, 0, 5, 0, 0, 14.5],
802: [51, 2, 530, 802, u'Iron Broadsword', u'IRON_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_a', u'Broadswords are heavy swords that can cut through many enemies at once.', 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12101, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_a', 3, 7, 32, 0, 0, 0, 5, 0, 0, 18],
803: [51, 2, 430, 803, u'Light Broadsword', u'LIGHT_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_a', u'Broadswords can hit multiple enemies in one slash.', 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_a', 3, 7, 32, 0, 0, 0, 5, 0, 0, 16],
804: [51, 2, 530, 804, u'Heavy Broadsword', u'HEAVY_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_b', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12107, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_b', 3, 7, 32, 0, 0, 0, 5, 0, 0, 18],
805: [51, 2, 3500, 805, u'War Broadsword', u'WAR_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_b', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12103, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_b', 3, 15, 51, 0, 3, 3509, 5, 0, 0, 41.5],
806: [51, 2, 4860, 806, u'Royal Broadsword', u'ROYAL_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_c', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12104, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 3, 22, 64, 0, 3, 3509, 5, 0, 0, 49],
807: [51, 2, 710, 807, u"Brute's Broadsword", u'BRUTE_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_a', 0, 3, 0, 0, '', 1, 100, 0, 0, 0, 0, 1, 12100, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_a', 3, 8, 34, 0, 0, 0, 5, 0, 0, 21],
808: [51, 2, 1630, 808, u"EITC Grunt's Broadsword", u'EITC_GRUNT_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_b', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12101, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_b', 3, 13, 43, 0, 1, 2336, 5, 0, 0, 32.5],
9001: [51, 0, 1, 9001, u'Torch', u'PROP_TORCH', 1, 15, 0, 0, 0, 0, 0, 0, 1, u'torch', u'dum', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'torch', 35, u'#N/A', 1, 0, 0, 0, 0, 0, 0, 0.5],
9002: [51, 0, 1, 9002, u'Powder Keg', u'PROP_POWDER_KEG', 1, 15, 0, 0, 0, 0, 0, 0, 1, u'powder_keg', u'dum', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_hnd_bom_barrelDynamite', 36, u'#N/A', 1, 0, 0, 0, 0, 0, 0, 0.5],
811: [51, 2, 2110, 811, u'Tempered Broadsword', u'TEMPERED_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_b', 0, 8, 0, 0, '', 2, 100, 0, 0, 0, 0, 1, 12107, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_b', 3, 13, 48, 0, 0, 0, 5, 0, 0, 32],
812: [51, 2, 3340, 812, u'Engraved Broadsword', u'ENGRAVED_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_c', 0, 12, 0, 0, '', 2, 100, 0, 0, 0, 0, 2, 12107, 1, 12102, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 3, 17, 55, 0, 0, 0, 5, 0, 0, 40.5],
813: [51, 2, 8360, 813, u'Bejeweled Broadsword', u'BEJEWELED_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_c', 0, 16, 0, 0, '', 3, 100, 0, 0, 0, 0, 2, 12107, 2, 12102, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 3, 21, 67, 0, 0, 0, 5, 0, 0, 52.5],
814: [51, 2, 11250, 814, u'Masterwork Broadsword', u'MASTERWORK_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_c', 0, 20, 0, 0, '', 3, 100, 0, 0, 0, 0, 3, 12107, 3, 12102, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 3, 25, 74, 0, 0, 0, 5, 0, 0, 61],
815: [51, 2, 1140, 815, u'Monkey Broadsword', u'MONKEY_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_a', 0, 5, 0, 0, '', 1, 121, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_a', 3, 10, 38, 0, 1, 3508, 5, 0, 0, 27],
816: [51, 2, 4570, 816, u'Baboon Broadsword', u'BABOON_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_a', 0, 12, 0, 0, '', 2, 121, 0, 0, 0, 0, 1, 12108, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_a', 3, 17, 55, 0, 2, 3508, 5, 0, 0, 47.5],
817: [51, 2, 14790, 817, u'Gorilla Broadsword', u'GORILLA_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_a', 0, 18, 0, 0, '', 3, 121, 0, 0, 0, 0, 2, 12108, 1, 12109, 0, 0, 1, u'pir_m_hnd_swd_broadsword_a', 3, 23, 70, 0, 3, 3508, 5, 0, 0, 70],
818: [51, 2, 1400, 818, u'Voodoo Hunter Broadsword', u'VOODOO_HUNTER_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_d', 0, 10, 0, 0, '', 1, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_d', 3, 15, 46, 0, 1, 3509, 5, 0, 0, 30],
819: [51, 2, 4110, 819, u'Witch Hunter Broadsword', u'WITCH_HUNTER_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_d', 0, 16, 0, 0, '', 2, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_d', 3, 21, 62, 0, 2, 3509, 5, 0, 0, 45],
820: [51, 2, 10530, 820, u'Broadsword of the Inquisition', u'BROADSWORD_OF_THE_INQUISITION', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_d', 0, 21, 0, 0, '', 3, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_d', 3, 26, 76, 0, 3, 3509, 5, 0, 0, 59],
821: [51, 1, 4380, 821, u'Bane Blade Broadsword', u'BANE_BLADE_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_f', 0, 18, 0, 0, '', 1, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_f', 3, 23, 65, 0, 1, 2333, 6, 0, 0, 46.5],
822: [51, 1, 13360, 822, u'Bane Fire Broadsword', u'BANE_FIRE_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_f', 0, 22, 0, 0, '', 2, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_f', 3, 27, 77, 0, 2, 2333, 6, 0, 0, 66.5],
823: [51, 1, 21010, 823, u'Bane Curse Broadsword', u'BANE_CURSE_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_f', 0, 25, 0, 0, '', 3, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_f', 3, 30, 83, 0, 3, 2333, 6, 0, 0, 83.5],
824: [51, 2, 1260, 824, u'Military Broadsword', u'MILITARY_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_e', 3, 13, 43, 0, 1, 2336, 5, 0, 0, 28.5],
825: [51, 2, 1990, 825, u"Soldier's Broadsword", u'SOLDIER_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', 0, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12108, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_e', 3, 17, 50, 0, 1, 2336, 5, 0, 0, 36],
826: [51, 2, 5260, 826, u'Cavalry Broadsword', u'CAVALRY_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12108, 1, 12106, 0, 0, 1, u'pir_m_hnd_swd_broadsword_e', 3, 20, 60, 0, 2, 2336, 5, 0, 0, 51],
827: [51, 2, 7380, 827, u"Dragoon's Broadsword", u'DRAGOON_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12108, 2, 12106, 0, 0, 1, u'pir_m_hnd_swd_broadsword_e', 3, 23, 65, 0, 2, 2336, 5, 0, 0, 60.5],
828: [51, 2, 17880, 828, u"Brigadier's Broadsword", u'BRIGADIER_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', 0, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12108, 2, 12106, 0, 0, 1, u'pir_m_hnd_swd_broadsword_e', 3, 26, 76, 0, 3, 2336, 5, 0, 0, 77],
829: [51, 2, 22540, 829, u"General's Broadsword", u'GENERAL_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', 0, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12108, 3, 12106, 1, 12103, 1, u'pir_m_hnd_swd_broadsword_e', 3, 29, 81, 0, 3, 2336, 5, 0, 0, 86.5],
830: [51, 1, 3590, 830, u'Dark Broadsword', u'DARK_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_f', 0, 15, 0, 0, '', 1, 204, 1, 206, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_f', 3, 20, 60, 0, 1, 3501, 6, 0, 0, 42],
831: [51, 1, 11250, 831, u'Shadow Broadsword', u'SHADOW_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_f', 0, 20, 0, 0, '', 2, 204, 2, 206, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_f', 3, 25, 74, 0, 2, 3501, 6, 0, 0, 61],
832: [51, 1, 17650, 832, u'Forbidden Broadsword', u'FORBIDDEN_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_f', 0, 24, 0, 0, '', 3, 204, 3, 206, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_f', 3, 29, 81, 0, 3, 3501, 6, 0, 0, 76.5],
833: [51, 1, 1780, 833, u"Fighter's Broadsword", u'FIGHTER_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_c', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 3, 18, 52, 0, 1, 2337, 5, 0, 0, 34],
834: [51, 1, 5060, 834, u'Savage Broadsword', u'SAVAGE_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_c', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12107, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 3, 22, 64, 0, 2, 2337, 5, 0, 0, 50],
835: [51, 1, 6900, 835, u"Warmonger's Broadsword", u'WARMONGER_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_c', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12107, 1, 12103, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 3, 25, 69, 0, 2, 2337, 5, 0, 0, 58.5],
836: [51, 1, 17190, 836, u"Warlord's Broadsword", u'WARLORD_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_c', 0, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12107, 2, 12103, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 3, 28, 79, 0, 3, 2337, 5, 0, 0, 75.5],
837: [51, 1, 21010, 837, u"War Master's Broadsword", u'WAR_MASTER_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_c', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12107, 3, 12103, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 3, 30, 83, 0, 3, 2337, 5, 0, 0, 83.5],
838: [51, 1, 4290, 838, u"Lieutenant's Broadsword", u'LIEUTENANT_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_d', 0, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12102, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_d', 3, 14, 50, 0, 2, 3504, 5, 0, 0, 46],
839: [51, 1, 6330, 839, u"Commander's Broadsword", u'COMMANDER_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_d', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12102, 1, 12106, 0, 0, 1, u'pir_m_hnd_swd_broadsword_d', 3, 19, 58, 0, 2, 3504, 5, 0, 0, 56],
840: [51, 1, 8900, 840, u"Captain's Broadsword", u'CAPTAIN_BROADSWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_d', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12103, 1, 12106, 0, 0, 1, u'pir_m_hnd_swd_broadsword_d', 3, 23, 65, 0, 3, 3504, 5, 0, 0, 66.5],
841: [51, 1, 19050, 841, u"Vice Admiral's Broadsword", u'COMMODORE_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_d', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12103, 2, 12106, 0, 0, 1, u'pir_m_hnd_swd_broadsword_d', 3, 27, 77, 0, 3, 3504, 5, 0, 0, 79.5],
842: [51, 2, 7590, 842, u'Bloodfire Broadsword', u'BLOODFIRE_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', 0, 21, 0, 0, '', 1, 122, 1, 203, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_e', 3, 26, 76, 0, 0, 0, 5, 0, 0, 50],
843: [51, 1, 11810, 843, u'Crimsonfire Broadsword', u'CRIMSONFIRE_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', 0, 22, 0, 0, '', 2, 122, 2, 203, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_e', 3, 27, 77, 0, 0, 0, 5, 0, 0, 62.5],
844: [51, 1, 18110, 844, u'Emberfire Broadsword', u'EMBERFIRE_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', 0, 25, 0, 0, '', 3, 122, 3, 203, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_e', 3, 30, 83, 0, 0, 0, 5, 0, 0, 77.5],
845: [51, 0, 2690, 845, u'Black Raven Broadsword', u'BLACK_RAVEN_BROADSWORD', 2, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_a', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_a', 3, 18, 52, 0, 2, 2337, 6, 0, 0, 42],
846: [51, 0, 1940, 846, u'Vulture Claw Broadsword', u'VULTURE_CLAW_BROADSWORD', 2, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_a', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_a', 3, 13, 43, 0, 2, 2336, 5, 0, 0, 35.5],
847: [51, 1, 60100, 847, u'Ignis Maximus', u'IGNIS_MAXIMUS', 5, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_d', 0, 25, 0, 0, '', 3, 122, 0, 0, 0, 0, 3, 12107, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_d', 3, 30, 93, 0, 3, 2333, 5, 0, 0, 109.5],
848: [51, 1, 2750, 848, u"Avaricia's Broadsword", u'AVARICIA_BROADSWORD', 2, 1, 0, 0, 0, 0, 1, 1, 1, u'pir_t_ico_swd_broadsword_c', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 3, 13, 43, 0, 3, 2336, 5, 0, 0, 42.5],
849: [51, 0, 3800, 849, u'Broadsword A', u'BROADSWORD_A', 2, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_e', 3, 18, 52, 0, 3, 2337, 5, 0, 0, 50],
850: [51, 0, 3800, 850, u'Broadsword B', u'BROADSWORD_B', 2, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_f', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_f', 3, 18, 52, 0, 3, 2337, 6, 0, 0, 50],
851: [51, 0, 3800, 851, u"Barbossa's Edge", u'BARBOSSA_EDGE', 2, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_f', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_f', 3, 18, 52, 0, 3, 2337, 6, 0, 0, 50],
852: [51, 1, 1310, 852, u'Nemesis Blade', u'NEMESIS_BLADE', 3, 1, 0, 0, 0, 1, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_e', 3, 0, 36, 0, 1, 2336, 5, 0, 0, 25],
853: [51, 2, 740, 853, u'Small Broadsword', u'SMALL_BROADSWORD', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_a', u'Its powerful slashes can hit all nearby enemies!', 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_a', 3, 7, 32, 0, 0, 0, 5, 0, 0, 16],
854: [51, 2, 1040, 854, u'Steel Broadsword', u'STEEL_BROADSWORD', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_b', u'A heavy steel sword used by the military.', 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_b', 3, 12, 41, 0, 0, 0, 5, 0, 0, 20.5],
855: [51, 2, 1380, 855, u'Mighty Broadsword', u'MIGHTY_BROADSWORD', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_c', u'A heavy sword able to hit many foes at once.', 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_c', 3, 17, 50, 0, 0, 0, 5, 0, 0, 25],
856: [51, 2, 1760, 856, u'Ornate Broadsword', u'ORNATE_BROADSWORD', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_d', u'A hefty sword forged by a master blacksmith.', 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_d', 3, 22, 59, 0, 0, 0, 5, 0, 0, 29.5],
857: [51, 2, 2130, 857, u'Great Broadsword', u'GREAT_BROADSWORD', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_e', u'A powerful heavy blade.', 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_e', 3, 27, 67, 0, 0, 0, 5, 0, 0, 33.5],
858: [51, 0, 200, 858, u'Sword of Triton', u'SWORD_OF_TRITON', 3, 1, 0, 0, 0, 1, 0, 0, 1, u'pir_t_ico_swd_broadsword_b', u'the broken hilt of a larger blade', 0, 0, 0, '', 1, 105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_triton', 3, 0, 36, 0, 1, 2336, 5, 0, 0, u'#REF!'],
25550: [52, 2, 150, 25550, u'Sash_Basic_Buckle', u'BUCKLE_SASH', 1, 5, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_blt_f_sash', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 0, 1],
25551: [52, 2, 300, 25551, u'Sash_Pattern', u'PATTERNED_SASH', 2, 5, 1, 1, 0, 0, 0, 0, 38, u'pir_t_ico_blt_f_sash', 3, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 0, 0],
25552: [52, 2, 300, 25552, u'Sash_Tassles', u'TASSLES_SASH', 2, 5, 1, 1, 0, 0, 0, 0, 9, u'pir_t_ico_blt_f_sash', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 0, 0],
25553: [52, 2, 300, 25553, u'Sash_Gold_Buckle', u'GOLD_BUCKLE_SASH', 2, 5, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_blt_f_sash', 1, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 0, 0],
25554: [52, 1, 300, 25554, u'Sash_Blue', u'BLUE_SASH', 2, 5, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_blt_f_sash', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3904, -1, 11, -1, 0, 0],
25555: [52, 1, 300, 25555, u'Sash_Red_Fur', u'RED_FUR_SASH', 2, 5, 0, 1, 0, 0, 0, 0, 40, u'pir_t_ico_blt_f_sash', 37, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3920, -1, 12, -1, 0, 0],
25556: [52, 0, 300, 25556, u'Belt_Black_Square', u'GOLD_SQUARE_BELT', 2, 5, 1, 1, 0, 0, 0, 0, 31, u'pir_t_ico_blt_f_square', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 5, -1, 0, 0],
25557: [52, 0, 300, 25557, u'Belt_Rivet_Square', u'BRASS_SQUARE_BELT', 2, 5, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_blt_f_square', 1, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 6, -1, 0, 0],
25558: [52, 2, 300, 25558, u'Belt_Corner', u'WOODLAND_BELT', 2, 5, 1, 1, 0, 0, 0, 0, 38, u'pir_t_ico_blt_f_square', 9, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 7, -1, 0, 0],
25559: [52, 2, 300, 25559, u'Belt_Studded_Circle', u'GOLD_BOX_BELT', 2, 5, 1, 1, 0, 0, 0, 0, 31, u'pir_t_ico_blt_f_square', 1, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 8, -1, 0, 0],
25560: [52, 2, 300, 25560, u'Belt_Tie_Pattern', u'LACED_BELT', 2, 5, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_blt_f_square', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 9, -1, 0, 0],
25561: [52, 2, 150, 25561, u'Belt_Double_Weave', u'SIDE_BELT', 1, 5, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_blt_f_square', 1, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 10, -1, 0, 0],
25562: [52, 2, 700, 25562, u'Advanced_Outfit_Belt', u'ADVENTURE_BELT', 3, 5, 0, 0, 1, 0, 0, 0, 31, u'pir_t_ico_blt_f_square', 8, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 16, -1, 0, 0],
25563: [52, 2, 300, 25563, u'Intermediate_Outfit_Belt', u'TRAVELERS_BELT', 2, 5, 0, 0, 1, 0, 0, 0, 7, u'pir_t_ico_blt_f_square', 1, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 17, -1, 0, 0],
25564: [52, 1, 300, 25564, u'Sash_MardiGras', u'MARDI_GRAS_SASH', 3, 5, 0, 1, 0, 0, 0, 0, 34, u'pir_t_ico_blt_f_sash', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, -1, 18, -1, 0, 0],
25565: [52, 0, 300, 25565, u'Sash_Peacock_Female', u'PEACOCK_SASH_FEMALE', 3, 5, 0, 0, 0, 0, 0, 0, 5, u'pir_t_ico_blt_f_sash', 50, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 22, -1, 0, 0],
25566: [52, 0, 300, 25566, u'Sash_Wildfire_Female', u'WILDFIRE_SASH_FEMALE', 3, 5, 0, 1, 0, 0, 0, 0, 22, u'pir_t_ico_blt_f_sash', 54, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 26, -1, 0, 0],
25600: [52, 0, 300, 25600, u'Sash_French_Assassin', u'FRENCH_ASSASSIN_SASH', 3, 5, 0, 0, 0, 0, 0, 0, 32, u'pir_t_ico_blt_f_sash', 43, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3908, 20, 19, 0, 0, 0],
25601: [52, 0, 150, 25601, u'Sash_Bountyhunter', u'BOUNTYHUNTER_SASH', 2, 5, 0, 0, 0, 0, 0, 0, 7, u'pir_t_ico_blt_f_sash', 55, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3911, 21, 20, 0, 0, 0],
25602: [52, 0, 150, 25602, u'Sash_Barbary_Corsair', u'BARBARY_CORSAIR_SASH', 2, 5, 0, 0, 0, 0, 0, 0, 7, u'pir_t_ico_blt_f_sash', 56, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3906, 22, 21, 0, 0, 0],
25700: [52, 0, 300, 25700, u'Belt_Rogue_Privateer', u'ROGUE_PRIVATEER_BELT', 3, 5, 0, 0, 0, 0, 0, 0, 7, u'pir_t_ico_blt_f_square', 47, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3910, 25, 23, 0, 0, 0],
25701: [52, 0, 300, 25701, u'Belt_Scourge', u'SCOURGE_BELT', 3, 5, 0, 0, 0, 0, 0, 0, 32, u'pir_t_ico_blt_f_square', 51, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3905, 26, 24, 0, 0, 0],
25702: [52, 0, 300, 25702, u'Belt_Sea_Serpent', u'SEA_SERPENT_BELT', 3, 5, 0, 0, 0, 0, 0, 0, 7, u'pir_t_ico_blt_f_square', 49, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3917, 27, 25, 0, 0, 0],
25703: [52, 0, 150, 25703, u'Belt_Zombie_Pirate', u'ZOMBIE_PIRATE_BELT', 2, 5, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_blt_f_square', 52, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 28, 27, 0, 0, 0],
25704: [52, 0, 150, 25704, u'Belt_Zombies_Pirate', u'ZOMBIES_PIRATE_BELT', 2, 5, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_blt_f_square', 52, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3913, 28, 27, 0, 0, 0],
1144: [51, 2, 16800, 1144, u'Scimitar', u'SCIMITAR_42', 3, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_scimitar_a', 0, 16, 0, 0, '', 4, 101, 0, 0, 0, 0, 5, 12105, 5, 12108, 5, 12103, 1, u'pir_m_hnd_swd_scimitar_a', 4, 17, 59, 0, 0, 0, 1, 0, 0, 91.5],
1148: [51, 2, 21260, 1148, u'Scimitar', u'SCIMITAR_46', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_scimitar_a', 0, 22, 0, 0, '', 2, 209, 0, 0, 0, 0, 2, 12102, 2, 12103, 0, 0, 1, u'pir_m_hnd_swd_scimitar_a', 4, 23, 76, 0, 3, 2244, 1, 0, 0, 84],
1149: [51, 2, 730, 1149, u'Scimitar', u'SCIMITAR_47', 5, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_scimitar_a', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_scimitar_a', 4, 7, 54, 0, 0, 0, 1, 0, 0, 27],
1150: [51, 2, 730, 1150, u'Scimitar', u'SCIMITAR_48', 5, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_scimitar_a', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_scimitar_a', 4, 7, 54, 0, 0, 0, 1, 0, 0, 27],
6118: [51, 1, 3310, 6118, u'Staff of the Sacred Sun', u'STAFF_OF_THE_SACRED_SUN', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_a', 0, 44, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12701, 2, 12704, 0, 0, 1, u'pir_m_hnd_stf_ward_a', 21, 29, 41, 0, 3, 2371, 13, 0, LVector3f(0, 1.6, 0), 57.5],
10210: [56, 1, 5890, 10210, u'Thieves Spyglass', u'THIEVES_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_b', 0, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12652, 2, 12659, 0, 0, 1, u'pir_m_inv_can_spyglass_b', 29, 21, 0, 0, 3, 3404, 42],
2225: [51, 2, 4100, 2225, u"Privateer's Bayonet", u'PRIVATEER_BAYONET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12200, 2, 12207, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 10, 22, 64, 1, 2, 2316, 0, 0, 0, 64],
2226: [51, 2, 6080, 2226, u"Corsair's Bayonet", u'CORSAIR_BAYONET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', 0, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12200, 2, 12207, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 10, 26, 72, 1, 3, 2316, 0, 0, 0, 78],
10410: [56, 2, 2550, 10410, u"Treasure Hunter's Sea Chart", u'TREASURE_HUNTER_SEA_CHART', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 20, 0, 0, '', 3, 1001, 0, 0, 0, 0, 3, 12657, 1, 12659, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 25, 0, 0, 0, 0, 29],
10411: [56, 2, 50, 10411, u'Old World Sea Chart', u'OLD_WORLD_SEA_CHART', 2, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 6, 0, 0, '', 1, 1001, 0, 0, 0, 0, 1, 12658, 0, 0, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 11, 0, 0, 0, 0, 5],
10412: [56, 2, 260, 10412, u'New World Sea Chart', u'NEW_WORLD_SEA_CHART', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 11, 0, 0, '', 1, 1001, 0, 0, 0, 0, 2, 12658, 1, 12657, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 16, 0, 0, 0, 0, 11],
1401: [51, 2, 1850, 1401, u'Sword of Decay', u'SWORD_OF_DECAY', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_c', 0, 21, 0, 0, u'rc.le.10lootTreasure', 1, 122, 1, 120, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_c', 5, 20, 60, 0, 0, 0, 6, 0, 0, 43],
1402: [51, 2, 2120, 1402, u'Spineskull Blade', u'SPINESKULL_BLADE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_c', 0, 24, 0, 0, u'rc.le.10lootTreasure', 2, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_c', 5, 23, 66, 0, 1, 2336, 6, 0, 0, 46],
1403: [51, 2, 2500, 1403, u'Grim Hound Blade', u'GRIM_HOUND_BLADE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_c', 0, 26, 0, 0, u'rc.le.10lootTreasure', 2, 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_c', 5, 25, 70, 0, 1, 2334, 6, 0, 0, 50],
1404: [51, 2, 2120, 1404, u'Sea Steel Sword', u'SEA_STEEL_SWORD', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_b', 0, 21, 0, 0, u'rc.le.10lootTreasure', 1, 203, 0, 0, 0, 0, 2, 12105, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_b', 38, 20, 70, 0, 1, 3508, 6, 0, 0, 46],
1405: [51, 2, 2700, 1405, u'Barnacle Breaker', u'BARNACLE_BREAKER', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_b', 0, 24, 0, 0, u'rc.le.10lootTreasure', 1, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_b', 38, 23, 76, 0, 1, 2335, 6, 0, 0, 52],
1406: [51, 2, 2700, 1406, u'Deepwater Blade', u'DEEPWATER_BLADE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_b', 0, 26, 0, 0, u'rc.le.10lootTreasure', 1, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_b', 38, 25, 80, 0, 1, 2333, 6, 0, 0, 52],
1407: [51, 2, 1980, 1407, u'Grave Reaper', u'GRAVE_REAPER', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_d', 0, 21, 0, 0, u'rc.le.10lootTreasure', 1, 200, 0, 0, 0, 0, 2, 12106, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_d', 37, 20, 55, 0, 1, 2334, 6, 0, 0, 44.5],
1408: [51, 2, 2160, 1408, u'Plaguefire Blade', u'PLAGUEFIRE_BLADE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_d', 0, 24, 0, 0, u'rc.le.10lootTreasure', 1, 122, 2, 101, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_d', 37, 23, 61, 0, 0, 0, 6, 0, 0, 46.5],
1409: [51, 2, 2970, 1409, u'Viper Blade', u'VIPER_BLADE', 3, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_d', 0, 27, 0, 0, u'rc.le.10lootTreasure', 2, 101, 0, 0, 0, 0, 2, 12106, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_d', 37, 26, 67, 0, 1, 2335, 6, 0, 0, 54.5],
1410: [51, 2, 4560, 1410, u'Nautilus Blade', u'NAUTILUS_BLADE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_e', 0, 24, 0, 0, u'rc.le.10lootTreasure', 2, 100, 0, 0, 0, 0, 2, 12103, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_e', 5, 23, 71, 0, 2, 2335, 6, 0, 0, 67.5],
1411: [51, 2, 4970, 1411, u'Doom Rattler', u'DOOM_RATTLER', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_e', 0, 27, 0, 0, u'rc.le.10lootTreasure', 1, 207, 0, 0, 0, 0, 2, 12104, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_e', 5, 26, 77, 0, 2, 2333, 6, 0, 0, 70.5],
1412: [51, 2, 6970, 1412, u'Whalebone Blade', u'WHALEBONE_BLADE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_e', 0, 30, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 3, 12105, 3, 12106, 0, 0, 1, u'pir_m_hnd_swd_davyJones_e', 5, 29, 83, 0, 3, 2334, 6, 0, 0, 83.5],
1413: [51, 2, 6160, 1413, u'The Dark Mutineer', u'THE_DARK_MUTINEER', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_e', 0, 31, 0, 0, u'rc.le.10lootTreasure', 2, 120, 0, 0, 0, 0, 3, 12108, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_e', 5, 30, 85, 0, 3, 3501, 6, 0, 0, 78.5],
1414: [51, 2, 4290, 1414, u'Spinecrest Sword', u'SPINECREST_SWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 22, 0, 0, u'rc.le.10lootTreasure', 1, 205, 0, 0, 0, 0, 2, 12105, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 21, 77, 0, 2, 2333, 6, 0, 0, 65.5],
1415: [51, 2, 4160, 1415, u'Razortooth Sword', u'RAZORTOOTH_SWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 24, 0, 0, u'rc.le.10lootTreasure', 1, 204, 0, 0, 0, 0, 3, 12107, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 23, 81, 0, 2, 2336, 6, 0, 0, 64.5],
1416: [51, 2, 7140, 1416, u'Ripsaw Blade', u'RIPSAW_BLADE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 27, 0, 0, u'rc.le.10lootTreasure', 2, 122, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 26, 87, 0, 3, 2336, 6, 0, 0, 84.5],
1417: [51, 2, 6970, 1417, u'Sharkfang Blade', u'SHARKFANG_BLADE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 28, 0, 0, u'rc.le.10lootTreasure', 2, 100, 0, 0, 0, 0, 2, 12106, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 27, 89, 0, 3, 2334, 6, 0, 0, 83.5],
1418: [51, 2, 9120, 1418, u'Hull Ripper', u'HULL_RIPPER', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 30, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 2, 12104, 3, 12108, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 29, 93, 0, 3, 2333, 6, 0, 0, 95.5],
1419: [51, 2, 8740, 1419, u'Barracuda Blade', u'BARRACUDA_BLADE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 31, 0, 0, u'rc.le.10lootTreasure', 3, 100, 0, 0, 0, 0, 2, 12110, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 30, 95, 0, 3, 2335, 6, 0, 0, 93.5],
1420: [51, 2, 4360, 1420, u'Bitter End', u'BITTER_END', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_g', 0, 25, 0, 0, u'rc.le.10lootTreasure', 2, 161, 0, 0, 0, 0, 2, 12105, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_g', 37, 24, 68, 0, 2, 2334, 6, 0, 0, 66],
1421: [51, 2, 4760, 1421, u'Dread Spike', u'DREAD_SPIKE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_g', 0, 28, 0, 0, u'rc.le.10lootTreasure', 3, 100, 0, 0, 0, 0, 1, 12104, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_g', 37, 27, 74, 0, 2, 2335, 6, 0, 0, 69],
1422: [51, 2, 6560, 1422, u'Doom Stinger', u'DOOM_STINGER', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_g', 0, 30, 0, 0, u'rc.le.10lootTreasure', 3, 101, 0, 0, 0, 0, 3, 12105, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_g', 37, 29, 78, 0, 3, 2333, 6, 0, 0, 81],
1423: [51, 2, 4560, 1423, u"Treachery's Edge", u'TREACHERYS_EDGE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_f', 0, 27, 0, 0, u'rc.le.10lootTreasure', 2, 100, 2, 120, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_f', 38, 26, 87, 0, 2, 3501, 6, 0, 0, 67.5],
1424: [51, 2, 8190, 1424, u'Tyrant Blade', u'TYRANT_BLADE', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_f', 0, 29, 0, 0, u'rc.le.10lootTreasure', 2, 160, 0, 0, 0, 0, 2, 12108, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_f', 38, 28, 91, 0, 3, 2334, 6, 0, 0, 90.5],
26001: [52, 2, 150, 26001, u'Shoe_Tall_Boots_Tan_Flap', u'COMFY_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_m_tall', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -1, 0],
26002: [52, 2, 600, 26002, u'Shoe_Tall_Boots_Two_Buckle', u'WALLOP_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 48, u'pir_t_ico_sho_m_tall', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 1, -1, 0],
26003: [52, 2, 150, 26003, u'Shoe_Tall_Boots_Lace', u'WORN_IN_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_m_tall', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 2, -1, 0],
26004: [52, 2, 150, 26004, u'Shoe_Tall_Boots_Lower_Leather', u'HARD_LEATHER_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 48, u'pir_t_ico_sho_m_tall', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 3, -1, 0],
1429: [51, 2, 12660, 1429, u'World Eater Blade', u'WORLD_EATER_BLADE', 5, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_h', 0, 31, 0, 0, u'rc.le.10lootTreasure', 3, 160, 3, 161, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_h', 38, 30, 105, 0, 3, 3702, 6, 0, 0, 112.5],
1430: [51, 2, 8460, 1430, u'The Emerald Curse', u'THE_EMERALD_CURSE', 5, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_i', 0, 31, 0, 0, u'rc.le.10lootTreasure', 3, 120, 0, 0, 0, 0, 4, 12105, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_i', 37, 30, 90, 0, 3, 3703, 19, 0, 0, 92],
1431: [51, 1, 4360, 1431, u'Cursed Blade', u'CURSED_BLADE_31', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_e', 0, 30, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_e', 37, 29, 78, 0, 3, 2334, 6, 0, 0, 66],
1432: [51, 1, 4360, 1432, u'Cursed Blade', u'CURSED_BLADE_32', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_e', 0, 30, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_e', 37, 29, 78, 0, 3, 2334, 6, 0, 0, 66],
1433: [51, 1, 4360, 1433, u'Cursed Blade', u'CURSED_BLADE_33', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_e', 0, 30, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_e', 37, 29, 78, 0, 3, 2334, 6, 0, 0, 66],
1434: [51, 1, 4360, 1434, u'Cursed Blade', u'CURSED_BLADE_34', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_e', 0, 30, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_e', 37, 29, 78, 0, 3, 2334, 6, 0, 0, 66],
1435: [51, 1, 4360, 1435, u'Cursed Blade', u'CURSED_BLADE_35', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_e', 0, 30, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_e', 37, 29, 78, 0, 3, 2334, 6, 0, 0, 66],
1436: [51, 1, 4620, 1436, u'Cursed Blade', u'CURSED_BLADE_36', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_e', 0, 32, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_e', 37, 31, 82, 0, 3, 2335, 6, 0, 0, 68],
1437: [51, 1, 4620, 1437, u'Cursed Blade', u'CURSED_BLADE_37', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_e', 0, 32, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_e', 37, 31, 82, 0, 3, 2335, 6, 0, 0, 68],
1438: [51, 1, 4620, 1438, u'Cursed Blade', u'CURSED_BLADE_38', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 32, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 37, 31, 82, 0, 3, 2335, 6, 0, 0, 68],
1439: [51, 1, 5700, 1439, u'Puffer Blade', u'CURSED_BLADE_39', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 32, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 31, 97, 0, 3, 2335, 6, 0, 0, 75.5],
1440: [51, 1, 5700, 1440, u'Kelpblade', u'CURSED_BLADE_40', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 32, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 31, 97, 0, 3, 2335, 6, 0, 0, 75.5],
1441: [51, 1, 5700, 1441, u'Cursed Blade of Yorr', u'CURSED_BLADE_41', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 32, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 31, 97, 0, 3, 2335, 6, 0, 0, 75.5],
1442: [51, 1, 5700, 1442, u'Nightbringer Sword', u'CURSED_BLADE_42', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 32, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 31, 97, 0, 3, 2335, 6, 0, 0, 75.5],
1443: [51, 1, 5700, 1443, u'Krakenbane Sword', u'CURSED_BLADE_43', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 32, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 31, 97, 0, 3, 2335, 6, 0, 0, 75.5],
1444: [51, 1, 3540, 1444, u'Cursed Blade', u'CURSED_BLADE_44', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 25, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 24, 83, 0, 3, 3501, 6, 0, 0, 59.5],
1445: [51, 1, 3540, 1445, u'Cursed Blade', u'CURSED_BLADE_45', 4, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 25, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 24, 83, 0, 3, 3501, 6, 0, 0, 59.5],
1446: [51, 1, 4160, 1446, u'Krakenslayer', u'CURSED_BLADE_46', 5, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 25, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 24, 93, 0, 3, 3501, 6, 0, 0, 64.5],
1447: [51, 1, 4160, 1447, u'Lost Blade of Leviathan', u'CURSED_BLADE_47', 5, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 25, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 24, 93, 0, 3, 3501, 6, 0, 0, 64.5],
1448: [51, 1, 4160, 1448, u'Hammerhead Blade', u'CURSED_BLADE_48', 5, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 25, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 24, 93, 0, 3, 3501, 6, 0, 0, 64.5],
1449: [51, 1, 3660, 1449, u'Cursed Blade', u'CURSED_BLADE_49', 5, 1, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_davyJones_a', 0, 30, 0, 0, u'rc.le.10lootTreasure', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_a', 38, 29, 103, 0, 1, 2334, 6, 0, 0, 60.5],
1450: [51, 2, 2210, 1450, u'Cursed Seafang Blade', u'SEAFANG_BLADE', 3, 1, 0, 0, 0, 1, 0, 0, 1, u'pir_t_ico_swd_davyJones_b', 0, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12103, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_b', 5, 20, 60, 0, 1, 2335, 6, 0, 0, 47],
1451: [51, 1, 700, 1451, u'Darkfrost Blade', u'DARKFROST_BLADE', 3, 1, 0, 0, 0, 1, 0, 0, 1, u'pir_t_ico_swd_davyJones_d', 0, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_davyJones_d', 37, 10, 35, 0, 1, 2334, 6, 0, 0, 26.5],
26051: [52, 2, 150, 26051, u'Shoe_Med_Boots_Laced', u'BUCKET_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_m_medium', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 0, -1, 0],
26052: [52, 2, 150, 26052, u'Shoe_Med_Boots_Buckle', u'BUCKLE_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sho_m_medium', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 1, -1, 0],
26053: [52, 2, 600, 26053, u'Shoe_Med_Boots_Lace_Front', u'FISHING_TROPHY_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sho_m_medium', 5, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 2, -1, 0],
26054: [52, 2, 150, 26054, u'Shoe_Med_Boots_Lace_Plain', u'OLD_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 31, u'pir_t_ico_sho_m_medium', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 3, -1, 0],
26055: [52, 0, 1800, 26055, u'Shoe_Med_Boots_Brown', u'SPIFFY_BOOTS', 3, 7, 1, 1, 0, 0, 0, 0, 49, u'pir_t_ico_sho_m_medium', 3, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 4, -1, 0],
26056: [52, 0, 600, 26056, u'Shoe_Med_Boots_Green_Top', u'HIKING_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 48, u'pir_t_ico_sho_m_medium', 2, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 5, -1, 0],
26057: [52, 0, 600, 26057, u'Shoe_Med_Boots_Light_Brown', u'RAWHIDE_BOOTS', 2, 7, 0, 1, 0, 0, 0, 0, 8, u'pir_t_ico_sho_m_medium', 1, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3902, 2, -1, 6, -1, 0],
26058: [52, 2, 600, 26058, u'Shoe_Med_Boots_Basic_Outfit', u'RECRUIT_BOOTS', 2, 7, 0, 0, 1, 0, 0, 0, 32, u'pir_t_ico_sho_m_medium', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 7, -1, 0],
26059: [52, 1, 1800, 26059, u'Shoe_Med_Boots_Xmas', u'XMAS_MED_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_m_tall', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 2, -1, 8, -1, 0],
26060: [52, 1, 1800, 26060, u'Shoe_Med_Boots_Mardi_Gras', u'MARDI_GRAS_MED_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 26, u'pir_t_ico_sho_m_tall', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 2, -1, 9, -1, 0],
26061: [52, 1, 1800, 26061, u'Shoe_Cuff_Boots_GoldTrim', u'GOLD_TRIM_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sho_m_tall', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3909, 5, -1, 0, -1, 0],
26062: [52, 1, 1800, 26062, u'Shoe_Cuff_Boots_RedTrim', u'RED_TRIM_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_m_tall', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3920, 5, -1, 1, -1, 0],
26063: [52, 1, 1800, 26063, u'Shoe_Med_Boots_Blue', u'BLUE_MED_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 4, u'pir_t_ico_sho_m_tall', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 10, -1, 0],
26064: [52, 0, 1800, 26064, u'Shoe_Med_Boots_Bountyhunter', u'BOUNTYHUNTER_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sho_m_medium', 55, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3911, 2, -1, 11, -1, 0],
26065: [52, 0, 600, 26065, u'Shoe_Med_Boots_Voyager', u'VOYAGER_BOOTS', 2, 7, 0, 1, 0, 0, 0, 0, 8, u'pir_t_ico_sho_m_medium', 1, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3915, 2, -1, 6, -1, 0],
26066: [52, 0, 600, 26066, u'Shoe_Med_Boots_Mayor', u'MAYOR_BOOTS', 2, 7, 0, 1, 0, 0, 0, 0, 8, u'pir_t_ico_sho_m_medium', 1, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3918, 2, -1, 6, -1, 0],
26100: [52, 0, 600, 26100, u'Shoe_Cuff_Boots_Barbary_Corsair', u'BARBARY_CORSAIR_BOOTS', 2, 7, 0, 0, 0, 0, 0, 0, 7, u'pir_t_ico_sho_m_tall', 56, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3906, 5, -1, 2, -1, 0],
26101: [52, 0, 1800, 26101, u'Shoe_Cuff_Boots_Rogue_Privateer', u'ROGUE_PRIVATEER_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 31, u'pir_t_ico_sho_m_tall', 47, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3910, 5, -1, 3, -1, 0],
26102: [52, 0, 600, 26102, u'Shoe_Cuff_Boots_Scourge', u'SCOURGE_BOOTS', 2, 7, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sho_m_tall', 51, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3905, 5, -1, 4, -1, 0],
26103: [52, 0, 1800, 26103, u'Shoe_Cuff_Boots_Wildfire', u'WILDFIRE_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_m_tall', 54, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 5, -1, 5, -1, 0],
26104: [52, 0, 1800, 26104, u'Shoe_Cuff_Boots_Zombie_Pirate', u'ZOMBIE_PIRATE_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_m_tall', 52, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 5, -1, 6, -1, 0],
26105: [52, 0, 1800, 26105, u'Shoe_Cuff_Boots_Zombies_Pirate', u'ZOMBIES_PIRATE_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_m_tall', 52, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3913, 5, -1, 6, -1, 0],
26200: [52, 0, 2500, 26200, u'Shoe_EITC_FrenchAssassin', u'FRENCH_ASSASSIN_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sho_m_tall', 43, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3908, 4, -1, 1, -1, 0],
26201: [52, 0, 2500, 26201, u'Shoe_EITC_Baron', u'BARON_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sho_m_tall', 44, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3912, 4, -1, 2, -1, 0],
26300: [52, 0, 1800, 26300, u'Shoe_Diplomat', u'DIPLOMAT_SHOES', 3, 7, 0, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_m_medium', 64, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3919, 3, -1, 5, -1, 0],
26301: [52, 0, 1800, 26301, u'Shoe_Prince', u'PRINCE_SHOES', 3, 7, 0, 1, 0, 0, 0, 0, 31, u'pir_t_ico_sho_m_medium', 46, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 3, -1, 6, -1, 0],
31700: [54, 1, 55, 31700, u'Fake Moustache Lip', u'FAKE_MOUSTACHE_LIP', 3, 3, 0, 0, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_nose_loop', 9, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10, 0, 0],
10001: [56, 2, 50, 10001, u'Old Cannon Ram', u'OLD_CANNON_RAM', 1, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12901, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 3, 0, 0, 0, 0, 6],
10002: [56, 2, 110, 10002, u'Hasty Cannon Ram', u'HASTY_CANNON_RAM', 2, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12909, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 7, 0, 0, 0, 0, 8],
10003: [56, 2, 990, 10003, u"Gunner's Cannon Ram", u'GUNNERS_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12909, 1, 12901, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 12, 0, 0, 0, 0, 22],
10004: [56, 2, 3920, 10004, u"Master Gunner's Cannon Ram", u'MASTER_GUNNERS_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12909, 2, 12901, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 18, 0, 0, 0, 0, 36],
10005: [56, 2, 150, 10005, u'Greyhound Cannon Ram', u'GREYHOUND_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 2, 0, 0, '', 1, 150, 0, 0, 0, 0, 1, 12902, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 7, 0, 0, 0, 0, 8],
10006: [56, 2, 800, 10006, u'Bloodhound Cannon Ram', u'BLOODHOUND_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 10, 0, 0, '', 1, 150, 0, 0, 0, 0, 2, 12902, 1, 12904, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 15, 0, 0, 0, 0, 16],
10007: [56, 2, 140, 10007, u'Makeshift Cannon Ram', u'MAKESHIFT_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_d', 0, 7, 0, 0, '', 1, 152, 0, 0, 0, 0, 1, 12903, 1, 12902, 0, 0, 1, u'pir_m_inv_can_ram_d', 22, 12, 0, 0, 0, 0, 6],
10008: [56, 1, 1590, 10008, u'Man-o-War Cannon Ram', u'MANOWAR_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_a', 0, 12, 0, 0, '', 1, 151, 0, 0, 0, 0, 1, 12906, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_a', 22, 17, 0, 0, 0, 0, 28],
10009: [56, 1, 5840, 10009, u'Juggernaut Cannon Ram', u'JUGGERNAUT_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_a', 0, 18, 0, 0, '', 1, 151, 0, 0, 0, 0, 2, 12906, 1, 12904, 0, 0, 1, u'pir_m_inv_can_ram_a', 22, 23, 0, 0, 0, 0, 44],
10010: [56, 2, 1170, 10010, u'Marauder Cannon Ram', u'MARAUDER_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 9, 0, 0, '', 1, 153, 0, 0, 0, 0, 1, 12904, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 14, 0, 0, 0, 0, 24],
10011: [56, 2, 5320, 10011, u'Cerberus Cannon Ram', u'CERBERUS_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 14, 0, 0, '', 1, 153, 0, 0, 0, 0, 2, 12904, 1, 12905, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 19, 0, 0, 0, 0, 42],
10012: [56, 2, 1820, 10012, u'Phantom Cannon Ram', u'PHANTOM_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_d', 0, 16, 0, 0, '', 1, 154, 0, 0, 0, 0, 1, 12905, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_d', 22, 21, 0, 0, 0, 0, 30],
10013: [56, 1, 6380, 10013, u'Storm Reaper Cannon Ram', u'STORMREAPER_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_d', 0, 23, 0, 0, '', 1, 154, 0, 0, 0, 0, 2, 12905, 1, 12901, 0, 0, 1, u'pir_m_inv_can_ram_d', 22, 28, 0, 0, 0, 0, 46],
10014: [56, 2, 1820, 10014, u'Revenant Cannon Ram', u'REVENANT_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_c', 0, 19, 0, 0, '', 1, 155, 0, 0, 0, 0, 1, 12907, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_c', 22, 24, 0, 0, 0, 0, 30],
10015: [56, 1, 4830, 10015, u'Shadow Crow Cannon Ram', u'SHADOW_CROW_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_c', 0, 24, 0, 0, '', 1, 155, 0, 0, 0, 0, 2, 12907, 1, 12902, 0, 0, 1, u'pir_m_inv_can_ram_c', 22, 29, 0, 0, 0, 0, 40],
10016: [56, 2, 230, 10016, u'Bronze Cannon Ram', u'BRONZE_CANNON_RAM', 2, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 0, 0, 0, '', 1, 170, 0, 0, 0, 0, 1, 12901, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 3, 0, 0, 0, 0, 12],
10017: [56, 2, 1170, 10017, u'Iron Cannon Ram', u'IRON_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 1, 0, 0, '', 2, 170, 0, 0, 0, 0, 2, 12901, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 6, 0, 0, 0, 0, 24],
10018: [56, 2, 3920, 10018, u'Steel Cannon Ram', u'STEEL_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 7, 0, 0, '', 3, 170, 0, 0, 0, 0, 3, 12901, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 12, 0, 0, 0, 0, 36],
10019: [56, 2, 50, 10019, u'Cotton Cannon Ram', u'COTTON_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_a', 0, 3, 0, 0, '', 1, 171, 0, 0, 0, 0, 1, 12902, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_a', 22, 8, 0, 0, 0, 0, 4],
10020: [56, 2, 220, 10020, u'Wool Cannon Ram', u'WOOL_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_a', 0, 9, 0, 0, '', 2, 171, 0, 0, 0, 0, 2, 12902, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_a', 22, 14, 0, 0, 0, 0, 8],
10021: [56, 2, 460, 10021, u'Fleece Cannon Ram', u'FLEECE_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_a', 0, 13, 0, 0, '', 3, 171, 0, 0, 0, 0, 3, 12902, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_a', 22, 18, 0, 0, 0, 0, 12],
10022: [56, 1, 1000, 10022, u'Sponge Cannon Ram', u'SPONGE_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_a', 0, 17, 0, 0, '', 1, 172, 0, 0, 0, 0, 1, 12906, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_a', 22, 22, 0, 0, 0, 0, 18],
10025: [56, 2, 530, 10025, u'Fiery Cannon Ram', u'FIERY_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 11, 0, 0, '', 1, 174, 0, 0, 0, 0, 1, 12904, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 16, 0, 0, 0, 0, 16],
10026: [56, 2, 3100, 10026, u'Searing Cannon Ram', u'SEARING_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 15, 0, 0, '', 2, 174, 0, 0, 0, 0, 2, 12904, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 20, 0, 0, 0, 0, 32],
10027: [56, 2, 6940, 10027, u'Cajun Cannon Ram', u'CAJUN_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 19, 0, 0, '', 3, 174, 0, 0, 0, 0, 3, 12904, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 24, 0, 0, 0, 0, 48],
10028: [56, 2, 670, 10028, u'Haunted Cannon Ram', u'HAUNTED_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_d', 0, 19, 0, 0, '', 1, 175, 0, 0, 0, 0, 1, 12907, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_d', 22, 24, 0, 0, 0, 0, 18],
10029: [56, 1, 3920, 10029, u'Spectral Cannon Ram', u'SPECTRAL_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_d', 0, 22, 0, 0, '', 2, 175, 0, 0, 0, 0, 2, 12907, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_d', 22, 27, 0, 0, 0, 0, 36],
10030: [56, 1, 8780, 10030, u'Possessed Cannon Ram', u'POSSESSED_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_d', 0, 25, 0, 0, '', 3, 175, 0, 0, 0, 0, 3, 12907, 0, 0, 0, 0, 1, u'pir_m_inv_can_ram_d', 22, 30, 0, 0, 0, 0, 54],
10031: [56, 1, 310, 10031, u'Ladle Cannon Ram', u'LADLE_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_c', 0, 0, 0, 0, '', 1, 180, 0, 0, 0, 0, 1, 12901, 1, 12902, 0, 0, 1, u'pir_m_inv_can_ram_c', 22, 5, 0, 0, 0, 0, 12],
10032: [56, 1, 3100, 10032, u'Copper Ladle Cannon Ram', u'COPPER_LADLE_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_c', 0, 5, 0, 0, '', 2, 180, 0, 0, 0, 0, 2, 12901, 1, 12904, 0, 0, 1, u'pir_m_inv_can_ram_c', 22, 10, 0, 0, 0, 0, 32],
10033: [56, 2, 410, 10033, u'Cloth Cannon Ram', u'CLOTH_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_a', 0, 4, 0, 0, '', 1, 181, 0, 0, 0, 0, 1, 12902, 1, 12904, 0, 0, 1, u'pir_m_inv_can_ram_a', 22, 9, 0, 0, 0, 0, 14],
10034: [56, 2, 1230, 10034, u'Padded Cannon Ram', u'PADDED_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_a', 0, 13, 0, 0, '', 2, 181, 0, 0, 0, 0, 2, 12902, 1, 12906, 0, 0, 1, u'pir_m_inv_can_ram_a', 22, 18, 0, 0, 0, 0, 20],
10035: [56, 2, 990, 10035, u'Priming Ram', u'PRIMING_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_d', 0, 12, 0, 0, '', 1, 182, 0, 0, 0, 0, 1, 12906, 1, 12901, 0, 0, 1, u'pir_m_inv_can_ram_d', 22, 17, 0, 0, 0, 0, 22],
10036: [56, 2, 5320, 10036, u'Iron Priming Ram', u'IRON_PRIMING_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_d', 0, 19, 0, 0, '', 2, 182, 0, 0, 0, 0, 2, 12906, 1, 12905, 0, 0, 1, u'pir_m_inv_can_ram_d', 22, 24, 0, 0, 0, 0, 42],
10037: [56, 1, 1370, 10037, u'Burnt Cannon Ram', u'BURNT_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 7, 0, 0, '', 1, 184, 0, 0, 0, 0, 1, 12904, 1, 12905, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 12, 0, 0, 0, 0, 26],
10038: [56, 1, 5320, 10038, u'Charred Cannon Ram', u'CHARRED_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_b', 0, 15, 0, 0, '', 2, 184, 0, 0, 0, 0, 2, 12904, 1, 12907, 0, 0, 1, u'pir_m_inv_can_ram_b', 22, 20, 0, 0, 0, 0, 42],
10039: [56, 1, 1170, 10039, u'Rotten Cannon Ram', u'ROTTEN_CANNON_RAM', 3, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_c', 0, 15, 0, 0, '', 1, 185, 0, 0, 0, 0, 1, 12907, 1, 12901, 0, 0, 1, u'pir_m_inv_can_ram_c', 22, 20, 0, 0, 0, 0, 24],
10040: [56, 1, 3920, 10040, u'Wicked Cannon Ram', u'WICKED_CANNON_RAM', 4, 7, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_ram_c', 0, 19, 0, 0, '', 2, 185, 0, 0, 0, 0, 2, 12907, 1, 12902, 0, 0, 1, u'pir_m_inv_can_ram_c', 22, 24, 0, 0, 0, 0, 36],
26501: [52, 2, 400, 26501, u'Shoe_Short_Boots_Celtic', u'DECK_SLAPPER_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 48, u'pir_t_ico_sho_f_short', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 0, 0],
26502: [52, 2, 150, 26502, u'Shoe_Short_Boots_Three_Buckle', u'DEER_SKIN_ANKLE_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_f_short', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 1, 0],
26503: [52, 2, 150, 26503, u'Shoe_Short_Boots_Plain', u'LEATHER_ANKLE_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 48, u'pir_t_ico_sho_f_short', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 2, 0],
26504: [52, 2, 150, 26504, u'Shoe_Short_Boots_Weave', u'CLOPPER_ANKLE_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 31, u'pir_t_ico_sho_f_short', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 3, 0],
26505: [52, 0, 400, 26505, u'Shoe_Short_Boots_Black_Torn_Top', u'BOARDWALK_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 45, u'pir_t_ico_sho_f_short', 5, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 4, 0],
26506: [52, 0, 400, 26506, u'Shoe_Short_Boots_Brown_Side_Buttons', u'CORINTHIAN_ANKLE_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 31, u'pir_t_ico_sho_f_short', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 5, 0],
26507: [52, 1, 400, 26507, u'Shoe_Short_Boots_Brown_Side_Laces', u'SHOP_BOOTS', 2, 7, 0, 1, 0, 0, 0, 0, 49, u'pir_t_ico_sho_f_short', 2, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3902, -1, 1, -1, 6, 0],
26508: [52, 0, 400, 26508, u'Shoe_Short_Boots_Brown_Stitching', u'PEASANT_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_sho_f_short', 2, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 7, 0],
26509: [52, 2, 400, 26509, u'Shoe_Short_Boots_Basic_Outfit', u'RECRUIT_SHORT_BOOTS', 2, 7, 0, 0, 1, 0, 0, 0, 31, u'pir_t_ico_sho_f_short', 1, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 8, 0],
26510: [52, 2, 400, 26510, u'Shoe_Short_Boots_Advanced_Outfit', u'ADVENTURE_SHORT_BOOTS', 2, 7, 0, 0, 1, 0, 0, 0, 49, u'pir_t_ico_sho_f_short', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 9, 0],
26511: [52, 1, 400, 26511, u'Shoe_Short_Boots_Round_Buckle', u'ROUND_BUCKLE_SHORT_BOOTS', 3, 7, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_f_short', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 10, 0],
26512: [52, 1, 400, 26512, u'Shoe_Short_Boots_Xmas', u'XMAS_SHORT_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_f_short', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, -1, 1, -1, 11, 0],
26513: [52, 1, 400, 26513, u'Shoe_Short_Boots_Valentines', u'VALENTINES_SHORT_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 46, u'pir_t_ico_sho_f_short', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, -1, 1, -1, 12, 0],
26514: [52, 1, 400, 26514, u'Shoe_Short_Boots_Mardi_Gras', u'MARDI_GRAS_SHORT_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 26, u'pir_t_ico_sho_f_short', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, -1, 1, -1, 13, 0],
26515: [52, 0, 400, 26515, u'Shoe_Short_Boots_Diplomat', u'DIPLOMAT_SHORT_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_f_short', 65, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3919, -1, 1, -1, 14, 0],
26516: [52, 0, 400, 26516, u'Shoe_Short_Boots_The_Prince', u'PRINCE_SHORT_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sho_f_short', 46, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 1, -1, 15, 0],
26517: [52, 1, 400, 26517, u'Shoe_Short_Boots_Voyager', u'VOYAGER_LADY_BOOTS', 2, 7, 0, 1, 0, 0, 0, 0, 49, u'pir_t_ico_sho_f_short', 2, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3915, -1, 1, -1, 6, 0],
26518: [52, 1, 400, 26518, u'Shoe_Short_Boots_Mayor', u'MAYOR_LADY_BOOTS', 2, 7, 0, 1, 0, 0, 0, 0, 49, u'pir_t_ico_sho_f_short', 2, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3918, -1, 1, -1, 6, 0],
26551: [52, 2, 150, 26551, u'Shoe_Mid_Boots_Skull_Buckle', u'BUCKLE_SHORT_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 31, u'pir_t_ico_sho_f_med', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 0, 0],
26552: [52, 2, 150, 26552, u'Shoe_Mid_Boots_Studs', u'RAMPART_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 45, u'pir_t_ico_sho_f_med', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 1, 0],
26553: [52, 2, 600, 26553, u'Shoe_Mid_Boots_Studs2', u'GANGPLANK_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_f_med', 2, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 2, 0],
26554: [52, 2, 200, 26554, u'Shoe_Mid_Boots_Weave_Buckle', u'SIDEWALK_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_f_med', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 3, 0],
26555: [52, 0, 200, 26555, u'Shoe_Mid_Boots_Black_Topstitch', u'STITCH_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sho_f_med', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 4, 0],
26556: [52, 0, 200, 26556, u'Shoe_Mid_Boots_Brown_Side_Stitch', u'FIELD_BOOTS', 1, 7, 1, 1, 0, 0, 0, 0, 48, u'pir_t_ico_sho_f_med', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 5, 0],
26557: [52, 0, 600, 26557, u'Shoe_Mid_Boots_Orange', u'PLANTATION_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_sho_f_med', 5, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 6, 0],
26558: [52, 0, 1800, 26558, u'Shoe_Mid_Boots_Purple', u'TRYST_BOOTS', 3, 7, 1, 1, 0, 0, 0, 0, 46, u'pir_t_ico_sho_f_med', 3, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 7, 0],
26559: [52, 1, 1800, 26559, u'Shoe_Mid_Boots_Green_Purple', u'GREEN_PURPLE_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 44, u'pir_t_ico_sho_f_med', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3903, -1, 2, -1, 8, 0],
26560: [52, 0, 1800, 26560, u'Shoe_Mid_Boots_Baroness', u'BARONESS_SHORT_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sho_f_short', 45, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3912, -1, 2, -1, 9, 0],
26561: [52, 0, 1800, 26561, u'Shoe_Mid_Boots_Rogue_Privateer', u'ROGUE_PRIVATEER_SHORT_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 31, u'pir_t_ico_sho_f_short', 47, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3910, -1, 2, -1, 10, 0],
26562: [52, 2, 600, 26562, u'Shoe_Mid_Boots_Raven', u'RAVEN_SHORT_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_f_med', 2, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3922, -1, 2, -1, 2, 0],
2001: [51, 2, 0, 2001, u'Flintlock Pistol', u'FLINTLOCK_PISTOL', 1, 2, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', u'A standard flintlock pistol. Fires one shot before it needs to be reloaded.', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_hnd_gun_pistol_a', 6, 0, 2, 1, 0, 0, 0, 0, 0, 11],
2002: [51, 2, 300, 2002, u'Double-Barrel', u'DOUBLE_BARREL', 1, 2, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_a', u'A flintlock pistol with two barrels. Each barrel can be fired separately before reloading.', 2, 0, 0, '', 1, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_a', 7, 5, 3, 2, 0, 0, 0, 0, 0, 22.5],
2003: [51, 2, 2000, 2003, u'Tri-Barrel', u'TRI_BARREL', 1, 2, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_c', u'A multi-barreled pistol! This rare device can fire three times before reloading.', 7, 0, 0, '', 2, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_c', 7, 10, 5, 3, 0, 0, 0, 0, 0, 34.5],
2004: [51, 2, 7500, 2004, u'Heavy Tri-Barrel', u'HEAVY_TRI_BARREL', 1, 2, 0, 0, 1, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_d', u'A Heavy Tri-Barrel Pistol, a fine piece of weaponry. ', 12, 0, 0, '', 3, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_d', 7, 15, 8, 3, 0, 0, 0, 0, 0, 37],
2005: [51, 2, 15000, 2005, u'Grand Pistol', u'GRAND_PISTOL', 1, 2, 0, 0, 1, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_e', u'A Grand Pistol, equipped with three deadly barrels.', 17, 0, 0, '', 4, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_e', 7, 20, 12, 3, 0, 0, 0, 0, 0, 40],
2006: [51, 2, 140, 2006, u'Wheellock Pistol', u'WHEELLOCK_PISTOL', 1, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 1, 4, 1, 0, 0, 0, 0, 0, 12],
2007: [51, 2, 170, 2007, u'Snaplock Pistol', u'SNAPLOCK_PISTOL', 1, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 2, 6, 1, 0, 0, 0, 0, 0, 13],
2008: [51, 2, 230, 2008, u'Sea Dog Pistol', u'SEA_DOG_PISTOL', 1, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12201, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 3, 8, 1, 0, 0, 0, 0, 0, 15],
10201: [56, 1, 400, 10201, u'Oriental Spyglass', u'ORIENTAL_SPYGLASS', 2, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_b', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12656, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_b', 29, 20, 0, 0, 0, 0, 8],
2010: [51, 2, 550, 2010, u"Buccaneer's Pistol", u'BUCCANEER_PISTOL', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 3, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12201, 1, 12207, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 8, 21, 1, 0, 0, 0, 0, 0, 23.5],
10203: [56, 1, 2450, 10203, u'Singaporean Spyglass', u'SINGAPOREAN_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_b', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12656, 2, 12653, 0, 0, 1, u'pir_m_inv_can_spyglass_b', 29, 28, 0, 0, 0, 0, 32],
10204: [56, 1, 400, 10204, u"Sea Dog's Spyglass", u'SEADOG_SPYGLASS', 2, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_a', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12654, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_a', 29, 10, 0, 0, 0, 0, 8],
10205: [56, 1, 1200, 10205, u"Pirate's Spyglass", u'PIRATE_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_a', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12654, 1, 12652, 0, 0, 1, u'pir_m_inv_can_spyglass_a', 29, 16, 0, 0, 0, 0, 20],
10206: [56, 1, 2450, 10206, u"Raider's Spyglass", u'RAIDER_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_a', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12654, 2, 12652, 0, 0, 1, u'pir_m_inv_can_spyglass_a', 29, 22, 0, 0, 0, 0, 32],
10207: [56, 1, 470, 10207, u"Mechant's Spyglass", u'MERCHANT_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_b', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_b', 29, 8, 0, 0, 1, 3404, 6],
10208: [56, 1, 910, 10208, u"Smuggler's Spyglass", u'SMUGGLER_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_b', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12652, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_b', 29, 13, 0, 0, 2, 3404, 16],
10209: [56, 1, 2950, 10209, u"Rumrunner's Spyglass", u'RUMRUNNER_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_b', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12652, 1, 12659, 0, 0, 1, u'pir_m_inv_can_spyglass_b', 29, 17, 0, 0, 2, 3404, 28],
2018: [51, 2, 1940, 2018, u'Shadow Stalker Pistol', u'SHADOW_STALKER_PISTOL', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_d', 0, 12, 0, 0, '', 1, 133, 0, 0, 0, 0, 1, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_d', 6, 17, 44, 1, 2, 3503, 0, 0, 0, 44],
10211: [56, 1, 530, 10211, u"Quarter Master's Spyglass", u'QUARTER_MASTER_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_c', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_c', 29, 6, 0, 0, 1, 3403, 8],
10212: [56, 1, 1050, 10212, u"First Mate's Spyglass", u'FIRST_MATE_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_c', 0, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12658, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_c', 29, 12, 0, 0, 2, 3403, 18],
10213: [56, 1, 3300, 10213, u"Captain's Spyglass", u'CAPTAIN_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_c', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12658, 1, 12660, 0, 0, 1, u'pir_m_inv_can_spyglass_c', 29, 20, 0, 0, 2, 3403, 30],
10214: [56, 1, 8100, 10214, u"Commodore's Spyglass", u'COMMODORE_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_c', 0, 27, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12658, 2, 12660, 0, 0, 1, u'pir_m_inv_can_spyglass_c', 29, 27, 0, 0, 3, 3403, 50],
10215: [56, 1, 690, 10215, u"Mariner's Spyglass", u'MARINER_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_c', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12657, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_c', 29, 8, 0, 0, 1, 3403, 12],
10216: [56, 1, 2450, 10216, u"Lieutenant's Spyglass", u'LIEUTENANT_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_c', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12657, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_c', 29, 15, 0, 0, 2, 3403, 32],
2025: [51, 2, 1560, 2025, u'Silver Pistol', u'SILVER_PISTOL', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12205, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 17, 39, 1, 1, 3500, 0, 0, 0, 39.5],
2026: [51, 2, 3600, 2026, u'Holy Pistol', u'HOLY_PISTOL', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 16, 0, 0, '', 1, 209, 0, 0, 0, 0, 2, 12205, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 21, 52, 1, 2, 3500, 0, 0, 0, 60],
2027: [51, 2, 5850, 2027, u'Sacred Pistol', u'SACRED_PISTOL', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 20, 0, 0, '', 1, 209, 0, 0, 0, 0, 3, 12205, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 25, 65, 1, 3, 3500, 0, 0, 0, 76.5],
26604: [52, 2, 600, 26604, u'Knee_Boots_Plain', u'STROLL_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_f_knee', 1, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 3, 0],
2029: [51, 1, 4030, 2029, u'Warding Pistol', u'WARDING_PISTOL', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_c', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12204, 1, 12207, 0, 0, 1, u'pir_m_hnd_gun_pistol_c', 6, 21, 57, 1, 2, 3506, 0, 0, 0, 63.5],
10222: [56, 1, 9680, 10222, u"Corsair's Spyglass", u'CORSAIR_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_d', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12654, 2, 12655, 0, 0, 1, u'pir_m_inv_can_spyglass_d', 29, 30, 0, 0, 3, 3402, 55],
10223: [56, 1, 560, 10223, u"Conquistador's Spyglass", u'CONQUISTADOR_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_d', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_d', 29, 14, 0, 0, 1, 3400, 9],
10224: [56, 1, 1750, 10224, u"Mercenary's Spyglass", u'MERCENARY_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_d', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12654, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_d', 29, 20, 0, 0, 2, 3400, 26],
10225: [56, 1, 6410, 10225, u"Bounty Hunter's Spyglass", u'BOUNTY_HUNTER_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_d', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12654, 1, 12660, 0, 0, 1, u'pir_m_inv_can_spyglass_d', 29, 26, 0, 0, 2, 3400, 44],
10226: [56, 1, 12510, 10226, u"Warmonger's Spyglass", u'WARMONGER_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_d', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12654, 2, 12660, 0, 0, 1, u'pir_m_inv_can_spyglass_d', 29, 30, 0, 0, 3, 3400, 63],
2035: [51, 1, 10100, 2035, u"Mercer's Pistol", u'MERCER_PISTOL', 5, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_d', 0, 25, 0, 0, '', 3, 100, 0, 0, 0, 0, 3, 12209, 3, 12210, 0, 0, 1, u'pir_m_hnd_gun_pistol_d', 6, 30, 85, 1, 3, 3502, 0, 0, 0, 100.5],
26612: [52, 0, 1800, 26612, u'Knee_Boots_French_Assassin', u'FRENCH_ASSASSIN_KNEE_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sho_f_knee', 43, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3908, -1, 3, -1, 11, 0],
2037: [51, 2, 270, 2037, u'Iron Pistol', u'IRON_PISTOL', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', u'Fires one shot before it needs to be reloaded.', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 4, 13, 1, 0, 0, 0, 0, 0, 16.5],
10230: [56, 1, 7800, 10230, u"Black Guard's Spyglass", u'BLACK_GUARD_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_a', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12653, 2, 12660, 0, 0, 1, u'pir_m_inv_can_spyglass_a', 29, 28, 0, 0, 3, 3401, 49],
2039: [51, 2, 1330, 2039, u'Ornate Pistol', u'ORNATE_PISTOL', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_c', u'A fancy pistol created by a master Gunsmith.', 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_c', 6, 24, 53, 1, 0, 0, 0, 0, 0, 36.5],
2040: [51, 1, 700, 2040, u'Haymaker Pistol', u'HAYMAKER_PISTOL', 2, 2, 0, 0, 0, 1, 0, 0, 1, u'pir_t_ico_gun_pistol_c', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12201, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_c', 6, 1, 21, 1, 1, 2346, 0, 0, 0, 26.5],
10233: [56, 1, 24810, 10233, u"Davy Jones's Spyglass", u'DAVY_JONES_SPYGLASS', 5, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_a', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12660, 3, 12652, 0, 0, 1, u'pir_m_inv_can_spyglass_a', 29, 30, 0, 0, 3, 3400, 69],
10234: [56, 1, 13500, 10234, u"Beckette's Spyglass", u'BECKETTE_SPYGLASS', 5, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_b', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12659, 1, 12656, 0, 0, 1, u'pir_m_inv_can_spyglass_b', 29, 30, 0, 0, 3, 3404, 50],
10235: [56, 1, 15050, 10235, u"Norrington's Spyglass", u'NORRINGTON_SPYGLASS', 5, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_a', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12654, 2, 12656, 0, 0, 1, u'pir_m_inv_can_spyglass_a', 29, 30, 0, 0, 3, 3401, 53],
10236: [56, 1, 24810, 10236, u"Barbossa's Spyglass", u'BARBOSSA_SPYGLASS', 5, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_d', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12655, 2, 12660, 1, 12653, 1, u'pir_m_inv_can_spyglass_d', 29, 30, 0, 0, 3, 3402, 69],
10237: [56, 1, 21480, 10237, u"Jack Sparrow's Spyglass", u'JACK_SPARROW_SPYGLASS', 5, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_c', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12659, 3, 12657, 1, 12653, 1, u'pir_m_inv_can_spyglass_c', 29, 30, 0, 0, 3, 3403, 64],
26651: [52, 2, 800, 26651, u'Tall_Boots_Celtic', u'CELTIC_TALL_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_f_tall', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 0, 0],
26652: [52, 2, 800, 26652, u'Tall_Boots_Buckle', u'FENCER_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 48, u'pir_t_ico_sho_f_tall', 1, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 1, 0],
26653: [52, 2, 800, 26653, u'Tall_Boots_Plain', u'DEER_SKIN_TALL_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 9, u'pir_t_ico_sho_f_tall', 5, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 2, 0],
26654: [52, 2, 800, 26654, u'Tall_Boots_Weave', u'SUEDE_TALL_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 49, u'pir_t_ico_sho_f_tall', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 3, 0],
26655: [52, 1, 2500, 26655, u'Tall_Boots_Blue_Stitches', u'BLUE_SILVER_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_sho_f_tall', 3, 32, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3904, -1, 4, -1, 4, 0],
26656: [52, 1, 2500, 26656, u'Tall_Boots_Red_Anklebelts', u'SUN_RIDER_TALL_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 40, u'pir_t_ico_sho_f_tall', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3920, -1, 4, -1, 5, 0],
26657: [52, 1, 2500, 26657, u'Tall_Boots_Teal_Stitches', u'TALL_COURT_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 44, u'pir_t_ico_sho_f_tall', 3, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3914, -1, 4, -1, 6, 0],
26658: [52, 2, 800, 26658, u'Tall_Boots_Intermediate_Outfit', u'TRAVELERS_TALL_BOOTS', 2, 7, 0, 0, 1, 0, 0, 0, 29, u'pir_t_ico_sho_f_tall', 8, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 7, 0],
26659: [52, 1, 2500, 26659, u'Tall_Boots_Violet_Stitches', u'VIOLET_SILVER_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 48, u'pir_t_ico_sho_f_tall', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3916, -1, 4, -1, 8, 0],
26660: [52, 1, 2500, 26660, u'Tall_Boots_Silver', u'SILVER_TALL_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 47, u'pir_t_ico_sho_f_tall', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3901, -1, 4, -1, 9, 0],
26661: [52, 0, 2500, 26661, u'Tall_Boots_Bountyhunter', u'BOUNTYHUNTER_LADY_TALL_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 7, u'pir_t_ico_sho_f_knee', 55, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3911, -1, 4, -1, 10, 0],
26662: [52, 0, 2500, 26662, u'Tall_Boots_China_Warrior', u'CHINA_WARRIOR__LADY_TALL_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 32, u'pir_t_ico_sho_f_knee', 53, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3921, -1, 4, -1, 11, 0],
26663: [52, 0, 2500, 26663, u'Tall_Boots_Barbary_Corsair', u'BARBARY_CORSAIR_LADY_TALL_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 31, u'pir_t_ico_sho_f_knee', 56, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3906, -1, 4, -1, 12, 0],
26664: [52, 0, 2500, 26664, u'Tall_Boots_Sea_Serpent', u'SEA_SERPENT_LADY_TALL_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 16, u'pir_t_ico_sho_f_knee', 49, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3917, -1, 4, -1, 13, 0],
26803: [52, 2, 0, 26803, u'Shoe_Navy_Buckle', u'NAVY_BUCKLE_SHOES', 2, 7, 0, 0, 0, 0, 0, 1, 29, u'pir_t_ico_sho_m_medium', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 1, 0, 0],
2201: [51, 2, 320, 2201, u'Rusty Bayonet', u'RUSTY_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 10, 3, 16, 1, 0, 0, 0, 0, 0, 18],
2202: [51, 2, 400, 2202, u'Flintlock Bayonet', u'FLINTLOCK_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 10, 5, 20, 1, 0, 0, 0, 0, 0, 20],
2203: [51, 2, 400, 2203, u'Wheellock Bayonet', u'WHEELLOCK_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 10, 5, 20, 1, 0, 0, 0, 0, 0, 20],
2204: [51, 2, 580, 2204, u'Snaplock Bayonet', u'SNAPLOCK_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12200, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 10, 6, 22, 1, 0, 0, 0, 0, 0, 24],
2205: [51, 2, 730, 2205, u'Combat Bayonet', u'COMBAT_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 1, 0, 0, '', 1, 100, 0, 0, 0, 0, 1, 2310, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 10, 6, 22, 1, 0, 0, 0, 0, 0, 27],
2206: [51, 2, 2350, 2206, u'Battle Bayonet', u'BATTLE_BAYONET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 9, 0, 0, '', 2, 100, 0, 0, 0, 0, 1, 2310, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 10, 14, 43, 1, 1, 2315, 0, 0, 0, 48.5],
2207: [51, 2, 5180, 2207, u'War Bayonet', u'WAR_BAYONET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 16, 0, 0, '', 3, 100, 0, 0, 0, 0, 2, 2310, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 10, 21, 62, 1, 2, 2315, 0, 0, 0, 72],
2208: [51, 2, 530, 2208, u"Navy Cadet's Bayonet", u'NAVY_CADET_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 2310, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 10, 5, 20, 1, 0, 0, 0, 0, 0, 23],
2209: [51, 2, 1370, 2209, u"Navy Guard's Bayonet", u'NAVY_GUARD_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 2310, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 10, 11, 32, 1, 1, 2315, 0, 0, 0, 37],
2210: [51, 2, 3420, 2210, u"Navy Musketeer's Bayonet", u'NAVY_MUSKETEER_BAYONET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 2310, 1, 12208, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 10, 17, 49, 1, 2, 2315, 0, 0, 0, 58.5],
2211: [51, 2, 4490, 2211, u"Navy Veteran's Bayonet", u'NAVY_VETERAN_BAYONET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 2310, 2, 12208, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 10, 21, 62, 1, 2, 2315, 0, 0, 0, 67],
2212: [51, 2, 6720, 2212, u"Navy Dragoon's Bayonet", u'NAVY_DRAGOON_BAYONET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 2310, 2, 12208, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 10, 25, 70, 1, 3, 2315, 0, 0, 0, 82],
2213: [51, 2, 840, 2213, u'Crab Sticker Bayonet', u'CRAB_STICKER_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 2310, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 10, 7, 24, 1, 1, 3509, 0, 0, 0, 29],
2214: [51, 2, 1720, 2214, u'Pig Sticker Bayonet', u'PIG_STICKER_BAYONET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 2310, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 10, 13, 41, 1, 2, 3509, 0, 0, 0, 41.5],
2215: [51, 2, 2450, 2215, u'Gator Sticker Bayonet', u'GATOR_STICKER_BAYONET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 2310, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 10, 18, 51, 1, 2, 3509, 0, 0, 0, 49.5],
10408: [56, 2, 90, 10408, u"Voyager's Sea Chart", u'VOYAGER_SEA_CHART', 2, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 4, 0, 0, '', 1, 1001, 0, 0, 0, 0, 1, 12657, 0, 0, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 9, 0, 0, 0, 0, 7],
10409: [56, 2, 990, 10409, u"Explorer's Sea Chart", u'EXPLORER_SEA_CHART', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 12, 0, 0, '', 2, 1001, 0, 0, 0, 0, 2, 12657, 1, 12659, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 17, 0, 0, 0, 0, 22],
2218: [51, 2, 1520, 2218, u'Military Bayonet', u'MILITARY_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12210, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 10, 15, 40, 1, 1, 2314, 0, 0, 0, 39],
2219: [51, 2, 2860, 2219, u"Soldier's Bayonet", u'SOLDIER_BAYONET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12210, 1, 12209, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 10, 20, 55, 1, 2, 2314, 0, 0, 0, 53.5],
2220: [51, 2, 4360, 2220, u"Officer's Bayonet", u'OFFICER_BAYONET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', 0, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12210, 2, 12209, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 10, 24, 68, 1, 2, 2314, 0, 0, 0, 66],
2221: [51, 2, 6240, 2221, u"Brigadier's Bayonet", u'BRIGADIER_BAYONET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', 0, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12210, 2, 12209, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 10, 28, 76, 1, 3, 2314, 0, 0, 0, 79],
2222: [51, 2, 960, 2222, u"Sea Dog's Bayonet", u'SEA_DOG_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', 0, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 10, 9, 28, 1, 1, 2316, 0, 0, 0, 31],
10415: [56, 2, 470, 10415, u"Bandit's Sea Globe", u'BANDIT_SEA_GLOBE', 3, 8, 1, 0, 1, 0, 0, 0, 1, u'pir_t_ico_sal_globe_b', 0, 17, 0, 0, '', 1, 1001, 0, 0, 0, 0, 1, 12659, 1, 12652, 0, 0, 1, u'pir_m_inv_sal_globe_b', 28, 22, 0, 0, 0, 0, 15],
2224: [51, 2, 2860, 2224, u"Buccaneer's Bayonet", u'BUCCANEER_BAYONET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12200, 1, 12207, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 10, 18, 51, 1, 2, 2316, 0, 0, 0, 53.5],
10417: [56, 2, 140, 10417, u"Privateer's Sea Globe", u'PRIVATEER_SEA_GLOBE', 2, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_globe_a', 0, 10, 0, 0, '', 1, 1001, 0, 0, 0, 0, 1, 12655, 0, 0, 0, 0, 1, u'pir_m_inv_sal_globe_a', 28, 15, 0, 0, 0, 0, 9],
10418: [56, 2, 740, 10418, u"Corsair's Sea Globe", u'CORSAIR_SEA_GLOBE', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_globe_a', 0, 13, 0, 0, '', 1, 1001, 0, 0, 0, 0, 2, 12655, 1, 12657, 0, 0, 1, u'pir_m_inv_sal_globe_a', 28, 18, 0, 0, 0, 0, 19],
10419: [56, 2, 2550, 10419, u'Seven Seas Sea Globe', u'SEVEN_SEAS_SEA_GLOBE', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_globe_a', 0, 16, 0, 0, '', 1, 1001, 0, 0, 0, 0, 3, 12655, 2, 12657, 0, 0, 1, u'pir_m_inv_sal_globe_a', 28, 21, 0, 0, 0, 0, 29],
26804: [52, 2, 0, 26804, u'Shoe_Navy_Flap', u'NAVY_FLAP_SHOES', 2, 7, 0, 0, 0, 0, 0, 1, 29, u'pir_t_ico_sho_m_medium', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 2, 0, 0],
26805: [52, 2, 0, 26805, u'Shoe_Navy_Lace', u'NAVY_LACE_SHOES', 2, 7, 0, 0, 0, 0, 0, 1, 29, u'pir_t_ico_sho_m_medium', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 3, 0, 0],
26806: [52, 2, 0, 26806, u'Shoe_Navy_Singlestrap', u'NAVY_SINGLESTRAP_SHOES', 2, 7, 0, 0, 0, 0, 0, 1, 29, u'pir_t_ico_sho_m_medium', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 4, 0, 0],
2231: [51, 2, 800, 2231, u'Iron Bayonet', u'IRON_BAYONET', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', u'Looks like it once belonged to a Navy Cadet.', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 10, 5, 20, 1, 0, 0, 0, 0, 0, 20],
2232: [51, 2, 1800, 2232, u'Steel Bayonet', u'STEEL_BAYONET', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', u'A powerful rifle that can fight up-close or from far away.', 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 10, 15, 40, 1, 0, 0, 0, 0, 0, 30],
2233: [51, 2, 3200, 2233, u'Master Bayonet', u'MASTER_BAYONET', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', u'A master-crafted weapon, made by the best rifle makers.', 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 10, 25, 60, 1, 0, 0, 0, 0, 0, 40],
2234: [51, 1, 960, 2234, u'Zombie Kabab Bayonet', u'ZOMBIE_KABAB_BAYONET', 2, 2, 0, 0, 0, 1, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 0, 0, 0, '', 1, 100, 0, 0, 0, 0, 1, 2310, 1, 12205, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 10, 1, 22, 1, 0, 0, 0, 0, 0, 31],
35003: [53, 2, 800, 35003, u'TattooChestLockKey', u'TATTOO_CHEST_LOCK_KEY', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_key_lock', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 13, 2, 103, 0, 0],
35004: [53, 2, 800, 35004, u'TattooChestSkullDagger', u'TATTOO_CHEST_SKULL_DAGGER', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_skull_dagger', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 14, 2, 103, 0, 0],
35005: [53, 2, 800, 35005, u'TattooChestSmallSkullCross', u'TATTOO_CHEST_SMALL_SKULL_CROSS', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_skullcrossbones', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 15, 2, 102, 0, 0],
35006: [53, 2, 800, 35006, u'TattooChestAnchor', u'TATTOO_CHEST_ANCHOR', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_mono_anchor', 6, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 16, 2, 103, 0, 0],
35007: [53, 2, 800, 35007, u'TattooChestCompass', u'TATTOO_CHEST_COMPASS', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_mono_compass', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 17, 2, 103, 0, 0],
35008: [53, 2, 800, 35008, u'TattooChestDaggerScroll', u'TATTOO_CHEST_DAGGER_SCROLL', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_mono_dagger', 6, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 18, 2, 103, 0, 0],
35009: [53, 2, 800, 35009, u'TattooChestShipAnchor', u'TATTOO_CHEST_SHIP_ANCHOR', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_mono_ship_anchor', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 19, 2, 103, 0, 0],
35010: [53, 2, 800, 35010, u'TattooChestSmallSkullCrossbones', u'TATTOO_CHEST_SMALL_SKULL_CROSSBONES', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_mono_skullcrossbones', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 20, 2, 103, 0, 0],
35011: [53, 2, 800, 35011, u'TattooChestSquidandShip', u'TATTOO_CHEST_SQUID_AND_SHIP', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_squid_ship', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 26, 1, 103, 0, 0],
35012: [53, 1, 800, 35012, u'TattooChestSaintPatricksDay', u'TATTOO_CHEST_SAINT_PATRICKS_DAY', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_saint_patricks', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 27, 27, 2, 103, 0, 0],
35013: [53, 2, 800, 35013, u'TattooChestCelticLeaf', u'TATTOO_CHEST_CELTIC_LEAF', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_celtic4leaf', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 42, 42, 2, 103, 0, 0],
35014: [53, 2, 800, 35014, u'TattooChestSmallEthnicEagle', u'TATTOO_CHEST_SMALL_ETHNIC_EAGLE', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_ethniceagle', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 43, 1, 103, 0, 0],
35015: [53, 2, 800, 35015, u'TattooChestSmallCrossedFlintlocks', u'TATTOO_CHEST_SMALL_CROSSED_FLINTLOCKS', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_flintlocks', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 44, 44, 2, 102, 0, 0],
35016: [53, 2, 800, 35016, u'TattooChestShamrock', u'TATTOO_CHEST_SHAMROCK', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_shamrock', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 45, 45, 2, 103, 0, 0],
35017: [53, 2, 800, 35017, u'TattooChestSmallThaiMonkey', u'TATTOO_CHEST_SMALL_THAI_MONKEY', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_thaimonkeyface', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 46, 46, 2, 103, 0, 0],
35018: [53, 1, 800, 35018, u'TattooChestHawaiianpectoral', u'TATTOO_CHEST_HAWAIIAN_PECTORAL', 2, 0, 0, 0, 1, 0, 0, 0, 1, u'tattoo_chest_color_hawaiian_pectoral', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 62, 62, 3, 104, 0, 0],
35019: [53, 2, 800, 35019, u'TattooChestSmallTribalyakuza', u'TATTOO_CHEST_SMALL_TRIBALYAKUZA', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_mono_tribal_yakuza', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 63, 2, 103, 0, 0],
35020: [53, 2, 800, 35020, u'TattooChestSpanishShipPVP', u'TATTOO_CHEST_SPANISH_SHIP_PVP', 2, 0, 0, 0, 1, 0, 0, 0, 1, u'tattoo_pvp_icon_spanish', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 104, 104, 2, 103, 0, 0],
35021: [53, 2, 800, 35021, u'TattooChestFrenchShipPVP', u'TATTOO_CHEST_FRENCH_SHIP_PVP', 2, 0, 0, 0, 1, 0, 0, 0, 1, u'tattoo_pvp_icon_french', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 105, 105, 2, 103, 0, 0],
35022: [53, 2, 800, 35022, u'TattooChestClassicMothersDay', u'TATTOO_CHEST_CLASSIC_MOTHERS_DAY', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_mothersday_classic', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 96, 96, 4, 103, 0, 0],
35023: [53, 1, 500, 35023, u'TattooChestHealedBulletHoles', u'TATTOO_CHEST_HEALED_BULLET_HOLES', 2, 0, 0, 1, 0, 0, 1, 0, 1, u'tattoo_scars_bulletholes_healed', 4, 0, 2, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 106, 106, 7, 105, 0, 0],
35024: [53, 1, 500, 35024, u'TattooChestPirateBrand', u'TATTOO_CHEST_PIRATE_BRAND', 2, 0, 0, 1, 0, 0, 1, 0, 1, u'tattoo_scars_piratebrand', 8, 0, 1, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 107, 107, 5, 106, 0, 0],
35025: [53, 1, 500, 35025, u'TattooChestLargeStitchedScar', u'TATTOO_CHEST_LARGE_STITCHED_SCAR', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_scars_traintrack01', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 108, 108, 6, 107, 0, 0],
35026: [53, 1, 500, 35026, u'TattooChestStitchedBulletHoles', u'TATTOO_CHEST_STITCHED_BULLET_HOLES', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_stitches_bulletholes', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 129, 129, 7, 105, 0, 0],
35027: [53, 1, 500, 35027, u'TattooChestLargeXStitch', u'TATTOO_CHEST_LARGE_X_STITCH', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_stitches_x', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 130, 130, 8, 108, 0, 0],
35028: [53, 1, 500, 35028, u'TattooChestLargeYStitch', u'TATTOO_CHEST_LARGE_Y_STITCH', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_stitches_y', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 131, 131, 8, 109, 0, 0],
35029: [53, 1, 1400, 35029, u'TattooChestFullCrossedFlintlocks', u'TATTOO_CHEST_FULL_CROSSED_FLINTLOCKS', 3, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_flintlocks', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 44, 44, 1, 102, 0, 0],
35030: [53, 1, 1400, 35030, u'TattooChestFullTribalYakuza', u'TATTOO_CHEST_FULL_TRIBAL_YAKUZA', 3, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_mono_tribal_yakuza', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 63, 1, 101, 0, 0],
35031: [53, 1, 1400, 35031, u'TattooChestFullSkullCrossbones', u'TATTOO_CHEST_FULL_SKULL_CROSSBONES', 3, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_mono_skullcrossbones', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 20, 1, 102, 0, 0],
35032: [53, 1, 1400, 35032, u'TattooChestFullSkullCross', u'TATTOO_CHEST_FULL_SKULL_CROSS', 3, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_skullcrossbones', 11, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 15, 1, 102, 0, 0],
35033: [53, 1, 1400, 35033, u'TattooChestFullThaiMonkey', u'TATTOO_CHEST_FULL_THAI_MONKEY', 3, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_thaimonkeyface', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 46, 46, 1, 102, 0, 0],
35034: [53, 1, 1400, 35034, u'TattooChestFullEthnicEagle', u'TATTOO_CHEST_FULL_ETHNIC_EAGLE', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_ethniceagle', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 43, 1, 101, 0, 0],
10601: [56, 2, 330, 10601, u'Wooden Charm', u'WOODEN_CHARM', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_voodooCharm_b', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12659, 0, 0, 0, 0, 1, u'pir_m_inv_sal_voodooCharm_b', 30, 14, 0, 0, 0, 0, 8],
10602: [56, 2, 1070, 10602, u'Lucky Charm', u'LUCKY_CHARM', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_voodooCharm_b', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12659, 0, 0, 0, 0, 1, u'pir_m_inv_sal_voodooCharm_b', 30, 14, 0, 0, 0, 0, 16],
10603: [56, 2, 2030, 10603, u'Golden Charm', u'GOLDEN_CHARM', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_voodooCharm_b', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12659, 0, 0, 0, 0, 1, u'pir_m_inv_sal_voodooCharm_b', 30, 14, 0, 0, 0, 0, 24],
10604: [56, 1, 3370, 10604, u'Fortune Charm', u'FORTUNE_CHARM', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_voodooCharm_b', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 4, 12659, 0, 0, 0, 0, 1, u'pir_m_inv_sal_voodooCharm_b', 30, 14, 0, 0, 0, 0, 32],
10605: [56, 1, 8500, 10605, u"El Dorodo's Charm", u'EL_DORODO_CHARM', 5, 8, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_voodooCharm_b', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 5, 12659, 0, 0, 0, 0, 1, u'pir_m_inv_sal_voodooCharm_b', 30, 14, 0, 0, 0, 0, 40],
10621: [56, 1, 1350, 10621, u'Muntineers Charm', u'MUTINEERS_CHARM', 3, 8, 0, 0, 0, 0, 0, 1, 1, u'pir_t_ico_sal_voodooCharm_b', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12659, 1, 12654, 0, 0, 1, u'pir_m_inv_sal_voodooCharm_b', 30, 0, 0, 0, 0, 0, 24],
2451: [51, 2, 440, 2451, u'Old Musket', u'OLD_MUSKET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 3, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 8, 6, 22, 1, 0, 0, 0, 0, 0, 21],
2452: [51, 2, 530, 2452, u'Flintlock Musket', u'FLINTLOCK_MUSKET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 8, 8, 26, 1, 0, 0, 0, 0, 0, 23],
2453: [51, 2, 530, 2453, u'Wheellock Musket', u'WHEELLOCK_MUSKET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 8, 8, 26, 1, 0, 0, 0, 0, 0, 23],
2454: [51, 2, 580, 2454, u'Sea Dog Musket', u'SEA_DOG_MUSKET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12201, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 8, 8, 26, 1, 0, 0, 0, 0, 0, 24],
2455: [51, 1, 1230, 2455, u'Hotshot Musket', u'HOTSHOT_MUSKET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12210, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 8, 10, 30, 1, 1, 2347, 0, 0, 0, 35],
2456: [51, 1, 2350, 2456, u'Burnshot Musket', u'BURNSHOT_MUSKET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12210, 1, 12207, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 8, 14, 43, 1, 2, 2347, 0, 0, 0, 48.5],
2457: [51, 1, 3970, 2457, u'Flameshot Musket', u'FLAMESHOT_MUSKET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12210, 2, 12207, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 8, 21, 62, 1, 2, 2347, 0, 0, 0, 63],
2458: [51, 1, 5930, 2458, u'Firebrand Musket', u'FIREBRAND_MUSKET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_b', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12210, 2, 12207, 0, 0, 1, u'pir_m_hnd_gun_musket_b', 8, 25, 70, 1, 3, 2347, 0, 0, 0, 77],
2459: [51, 2, 1600, 2459, u"Scallywag's Musket", u'SCALLYWAG_MUSKET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 13, 0, 0, '', 1, 207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 8, 16, 42, 1, 1, 3505, 0, 0, 0, 40],
2460: [51, 2, 3660, 2460, u"Robber's Musket", u'ROBBER_MUSKET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 20, 0, 0, '', 2, 207, 0, 0, 0, 0, 1, 12208, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 8, 23, 61, 1, 2, 3505, 0, 0, 0, 60.5],
2461: [51, 2, 6080, 2461, u"Scoundrel's Musket", u'SCOUNDREL_MUSKET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 24, 0, 0, '', 3, 207, 0, 0, 0, 0, 2, 12208, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 8, 27, 74, 1, 3, 3505, 0, 0, 0, 78],
2462: [51, 1, 900, 2462, u"Gunner's Musket", u'GUNNER_MUSKET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12200, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 8, 8, 26, 1, 1, 2341, 0, 0, 0, 30],
2463: [51, 1, 2970, 2463, u"Rifleman's Musket", u'RIFLEMAN_MUSKET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12200, 1, 12206, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 8, 19, 53, 1, 2, 2341, 0, 0, 0, 54.5],
2464: [51, 1, 5630, 2464, u"Master Gunner's Musket", u'MASTER_GUNNER_MUSKET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', 0, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12200, 2, 12206, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 8, 26, 72, 1, 3, 2341, 0, 0, 0, 75],
2465: [51, 1, 1560, 2465, u"Huntsman's Musket", u'HUNTSMAN_MUSKET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', 0, 9, 0, 0, '', 1, 100, 0, 0, 0, 0, 1, 12203, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 8, 12, 39, 1, 1, 2341, 0, 0, 0, 39.5],
2466: [51, 1, 3720, 2466, u"Marksman's Musket", u'MARKSMAN_MUSKET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', 0, 18, 0, 0, '', 2, 100, 0, 0, 0, 0, 2, 12203, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 8, 21, 62, 1, 2, 2341, 0, 0, 0, 61],
2467: [51, 1, 5930, 2467, u"Sniper's Musket", u'SNIPER_MUSKET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', 0, 24, 0, 0, '', 3, 100, 0, 0, 0, 0, 3, 12203, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 8, 27, 74, 1, 3, 2341, 0, 0, 0, 77],
2468: [51, 2, 1600, 2468, u'Hex Guard Musket', u'HEX_GUARD_MUSKET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', 0, 13, 0, 0, '', 1, 132, 0, 0, 0, 0, 1, 12204, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 8, 16, 42, 1, 1, 3503, 0, 0, 0, 40],
2469: [51, 2, 3310, 2469, u'Hex Stopper Musket', u'HEX_STOPPER_MUSKET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', 0, 19, 0, 0, '', 2, 132, 0, 0, 0, 0, 2, 12204, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 8, 22, 59, 1, 2, 3503, 0, 0, 0, 57.5],
2470: [51, 2, 5630, 2470, u'Hex Breaker Musket', u'HEX_BREAKER_MUSKET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', 0, 25, 0, 0, '', 3, 132, 0, 0, 0, 0, 3, 12204, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 8, 28, 76, 1, 3, 3503, 0, 0, 0, 75],
2471: [51, 2, 2030, 2471, u'Silver Musket', u'SILVER_MUSKET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12205, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 8, 20, 50, 1, 1, 3500, 0, 0, 0, 45],
2472: [51, 2, 4160, 2472, u'Holy Musket', u'HOLY_MUSKET', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', 0, 20, 0, 0, '', 1, 209, 0, 0, 0, 0, 2, 12205, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 8, 23, 61, 1, 2, 3500, 0, 0, 0, 64.5],
2473: [51, 2, 6890, 2473, u'Sacred Musket', u'SACRED_MUSKET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', 0, 26, 0, 0, '', 1, 209, 0, 0, 0, 0, 3, 12205, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 8, 29, 78, 1, 3, 3500, 0, 0, 0, 83],
2480: [51, 2, 1060, 2480, u"Sailor's Musket", u'SAILOR_MUSKET', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', u'Has great range, but cannot fire if you are too close to the enemy.', 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 8, 8, 26, 1, 0, 0, 0, 0, 0, 23],
2481: [51, 2, 2180, 2481, u'Boarding Musket', u'BOARDING_MUSKET', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', u'Often used by pirates when boarding an enemy ship.', 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 8, 18, 46, 1, 0, 0, 0, 0, 0, 33],
2482: [51, 2, 3700, 2482, u'Royal Musket', u'ROYAL_MUSKET', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', u'A gold musket used by the Royal Guard.', 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 8, 28, 66, 1, 0, 0, 0, 0, 0, 43],
2036: [51, 1, 8370, 2036, u"Jack Sparrow's Revenge", u'JACK_SPARROW_REVENGE', 5, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 25, 0, 0, '', 3, 100, 0, 0, 0, 0, 3, 12207, 3, 12205, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 30, 85, 1, 3, 2346, 0, 0, 0, 91.5],
2701: [51, 2, 550, 2701, u'Cracked Blunderbuss', u'CRACKED_BLUNDERBUSS', 1, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_a', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_a', 9, 5, 27, 1, 0, 0, 0, 0, 0, 23.5],
2702: [51, 2, 650, 2702, u'Matchlock Blunderbuss', u'MATCHLOCK_BLUNDERBUSS', 1, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_a', 0, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_a', 9, 7, 31, 1, 0, 0, 0, 0, 0, 25.5],
2703: [51, 2, 700, 2703, u'Flintlock Blunderbuss', u'FLINTLOCK_BLUNDERBUSS', 1, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_a', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_a', 9, 8, 33, 1, 0, 0, 0, 0, 0, 26.5],
2704: [51, 2, 960, 2704, u'Scattergun', u'SCATTERGUN', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_c', 0, 4, 0, 0, '', 1, 100, 0, 0, 0, 0, 1, 12201, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_c', 9, 7, 34, 1, 0, 0, 0, 0, 0, 31],
2705: [51, 2, 1980, 2705, u'Heavy Scattergun', u'HEAVY_SCATTERGUN', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_c', 0, 11, 0, 0, '', 2, 100, 0, 0, 0, 0, 2, 12201, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_c', 9, 14, 53, 1, 0, 0, 0, 0, 0, 44.5],
2706: [51, 2, 3360, 2706, u'War Scattergun', u'WAR_SCATTERGUN', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_c', 0, 18, 0, 0, '', 3, 100, 0, 0, 0, 0, 3, 12201, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_c', 9, 21, 72, 1, 0, 0, 0, 0, 0, 58],
2707: [51, 2, 1680, 2707, u'Sea Dog Blunderbuss', u'SEA_DOG_BLUNDERBUSS', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_b', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12200, 1, 12207, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_b', 9, 13, 46, 1, 1, 3509, 0, 0, 0, 41],
2708: [51, 2, 3190, 2708, u"Swashbuckler's Blunderbuss", u'SWASHBUCKLER_BLUNDERBUSS', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_b', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12200, 2, 12207, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_b', 9, 18, 61, 1, 2, 3509, 0, 0, 0, 56.5],
2709: [51, 2, 5040, 2709, u"Buccaneer's Blunderbuss", u'BUCCANEER_BLUNDERBUSS', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_b', 0, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12200, 3, 12207, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_b', 9, 22, 74, 1, 3, 3509, 0, 0, 0, 71],
2710: [51, 2, 1440, 2710, u'Monkey Blunderbuss', u'MONKEY_BLUNDERBUSS', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_c', 0, 7, 0, 0, '', 1, 121, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_c', 9, 10, 40, 1, 1, 3508, 0, 0, 0, 38],
2711: [51, 2, 3310, 2711, u'Baboon Blunderbuss', u'BABOON_BLUNDERBUSS', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_c', 0, 14, 0, 0, '', 2, 121, 0, 0, 0, 0, 1, 12208, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_c', 9, 17, 59, 1, 2, 3508, 0, 0, 0, 57.5],
2712: [51, 2, 6080, 2712, u'Gorilla Blunderbuss', u'GORILLA_BLUNDERBUSS', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_c', 0, 22, 0, 0, '', 3, 121, 0, 0, 0, 0, 2, 12208, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_c', 9, 25, 80, 1, 3, 3508, 0, 0, 0, 78],
2713: [51, 2, 1020, 2713, u'Navy Blunderbuss', u'NAVY_BLUNDERBUSS', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_b', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_b', 9, 5, 30, 1, 1, 2344, 0, 0, 0, 32],
2714: [51, 2, 2210, 2714, u'EITC Blunderbuss', u'EITC_BLUNDERBUSS', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_b', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12210, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_b', 9, 9, 38, 1, 2, 2344, 0, 0, 0, 47],
2715: [51, 2, 4160, 2715, u'Pirate Blunderbuss', u'PIRATE_BLUNDERBUSS', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_b', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12210, 1, 12202, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_b', 9, 18, 61, 1, 2, 2344, 0, 0, 0, 64.5],
2716: [51, 2, 7740, 2716, u'Grand Blunderbuss', u'GRAND_BLUNDERBUSS', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_b', 0, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12210, 2, 12202, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_b', 9, 26, 82, 1, 3, 2344, 0, 0, 0, 88],
2717: [51, 1, 1020, 2717, u"Hunter's Blunderbuss", u'HUNTER_BLUNDERBUSS', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_d', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_d', 9, 5, 30, 1, 1, 2345, 0, 0, 0, 32],
2718: [51, 1, 2550, 2718, u"Hired-gun's Blunderbuss", u'HIRED_GUN_BLUNDERBUSS', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_d', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12203, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_d', 9, 11, 47, 1, 2, 2345, 0, 0, 0, 50.5],
2719: [51, 1, 4420, 2719, u"Mercenary's Blunderbuss", u'MERCENARY_BLUNDERBUSS', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_d', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12203, 1, 12206, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_d', 9, 20, 65, 1, 2, 2345, 0, 0, 0, 66.5],
2720: [51, 1, 8280, 2720, u"Bounty Hunter's Blunderbuss", u'BOUNTY_HUNTER_BLUNDERBUSS', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_d', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12203, 2, 12206, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_d', 9, 28, 86, 1, 3, 2345, 0, 0, 0, 91],
2721: [51, 2, 1300, 2721, u'Night Hunter Blunderbuss', u'NIGHT_HUNTER_BLUNDERBUSS', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_e', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_e', 9, 13, 46, 1, 1, 3503, 0, 0, 0, 36],
2722: [51, 2, 2650, 2722, u'Shadow Stalker Blunderbuss', u'SHADOW_STALKER_BLUNDERBUSS', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_e', 0, 17, 0, 0, '', 1, 133, 0, 0, 0, 0, 1, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_e', 9, 20, 65, 1, 1, 3503, 0, 0, 0, 51.5],
2723: [51, 2, 3780, 2723, u'Foul Bane Blunderbuss', u'FOUL_BANE_BLUNDERBUSS', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_e', 0, 22, 0, 0, '', 1, 133, 0, 0, 0, 0, 2, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_e', 9, 25, 75, 1, 2, 3503, 0, 0, 0, 61.5],
2724: [51, 2, 5330, 2724, u'Fullmoon Special Blunderbuss', u'FULLMOON_SPECIAL_BLUNDERBUSS', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_e', 0, 26, 0, 0, '', 1, 133, 0, 0, 0, 0, 3, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_e', 9, 29, 88, 1, 3, 3503, 0, 0, 0, 73],
2725: [51, 1, 2970, 2725, u'Runic Blunderbuss', u'RUNIC_BLUNDERBUSS', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_e', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12204, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_e', 9, 20, 65, 1, 1, 3506, 0, 0, 0, 54.5],
2726: [51, 1, 5780, 2726, u'Warding Blunderbuss', u'WARDING_BLUNDERBUSS', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_e', 0, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12204, 1, 12207, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_e', 9, 26, 82, 1, 2, 3506, 0, 0, 0, 76],
2727: [51, 1, 8650, 2727, u'Arcane Blunderbuss', u'ARCANE_BLUNDERBUSS', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_e', 0, 27, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12204, 2, 12207, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_e', 9, 30, 90, 1, 3, 3506, 0, 0, 0, 93],
2736: [51, 2, 1350, 2736, u'Small Blunderbuss', u'SMALL_BLUNDERBUSS', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_a', u'Fires a powerful spread-shot Short range only.', 3, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_a', 9, 6, 32, 1, 0, 0, 0, 0, 0, 26],
2737: [51, 2, 2590, 2737, u'Fine Blunderbuss', u'FINE_BLUNDERBUSS', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_c', u'Short ranged attack, but can hit a group of enemies.', 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_c', 9, 16, 52, 1, 0, 0, 0, 0, 0, 36],
2738: [51, 2, 4230, 2738, u'Royal Blunderbuss', u'ROYAL_BLUNDERBUSS', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_blunderbuss_d', u'A mighty hand-cannon decorated with gold.', 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_blunderbuss_d', 9, 26, 72, 1, 0, 0, 0, 0, 0, 46],
26012: [52, 1, 1800, 26012, u'Shoe_Tall_Boots_Emerald', u'EMERALD_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 44, u'pir_t_ico_sho_m_tall', 8, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3903, 1, -1, 11, -1, 0],
2951: [51, 2, 600, 2951, u"EITC Grunt's Repeater Pistol", u'EITC_GRUNT_REPEATER', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_a', 0, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12202, 1, 12207, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_a', 7, 12, 3, 2, 0, 0, 0, 0, 0, 24.5],
2952: [51, 1, 1330, 2952, u"EITC Hired-gun's Repeater Pistol", u'EITC_HIRED_GUN_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_a', 0, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12202, 2, 12207, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_a', 7, 14, 11, 2, 1, 2342, 0, 0, 0, 36.5],
2953: [51, 1, 3780, 2953, u"EITC Mercenary's Repeater Pistol", u'EITC_MERCENARY_REPEATER', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_d', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12202, 3, 12207, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_d', 7, 20, 25, 3, 2, 2342, 0, 0, 0, 61.5],
2954: [51, 2, 1020, 2954, u'Wicked Repeater', u'WICKED_REPEATER', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_b', 0, 5, 0, 0, '', 1, 130, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_b', 7, 8, 2, 3, 0, 0, 0, 0, 0, 32],
2955: [51, 2, 1890, 2955, u'Dread Repeater', u'DREAD_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_b', 0, 12, 0, 0, '', 1, 130, 1, 100, 0, 0, 1, 12203, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_b', 7, 15, 13, 3, 0, 0, 0, 0, 0, 43.5],
2956: [51, 2, 2920, 2956, u'Baneblast Repeater', u'BANEBLAST_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_b', 0, 18, 0, 0, '', 1, 130, 2, 100, 0, 0, 2, 12203, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_b', 7, 21, 22, 3, 0, 0, 0, 0, 0, 54],
2957: [51, 2, 4490, 2957, u'Skullbone Repeater', u'SKULLBONE_REPEATER', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_b', 0, 24, 0, 0, '', 1, 130, 3, 100, 0, 0, 3, 12203, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_b', 7, 27, 36, 3, 0, 0, 0, 0, 0, 67],
2958: [51, 2, 1190, 2958, u'Night Hunter Repeater', u'NIGHT_HUNTER_REPEATER', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_c', 0, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_c', 7, 12, 3, 3, 1, 3503, 0, 0, 0, 34.5],
2959: [51, 2, 2810, 2959, u'Shadow Stalker Repeater', u'SHADOW_STALKER_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_c', 0, 18, 0, 0, '', 1, 133, 0, 0, 0, 0, 1, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_c', 7, 21, 22, 3, 2, 3503, 0, 0, 0, 53],
2960: [51, 2, 3420, 2960, u'Foul Bane Repeater', u'FOUL_BANE_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_c', 0, 23, 0, 0, '', 1, 133, 0, 0, 0, 0, 2, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_c', 7, 26, 29, 3, 2, 3503, 0, 0, 0, 58.5],
2961: [51, 2, 4760, 2961, u'Fullmoon Special Repeater', u'FULLMOON_SPECIAL_REPEATER', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_c', 0, 27, 0, 0, '', 1, 133, 0, 0, 0, 0, 3, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_c', 7, 30, 40, 3, 3, 3503, 0, 0, 0, 69],
2962: [51, 2, 1560, 2962, u'Monkey Repeater', u'MONKEY_REPEATER', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_d', 0, 6, 0, 0, '', 1, 121, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_d', 7, 9, 3, 3, 1, 3508, 0, 0, 0, 39.5],
2963: [51, 2, 2650, 2963, u'Baboon Repeater', u'BABOON_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_d', 0, 12, 0, 0, '', 1, 121, 0, 0, 0, 0, 1, 12208, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_d', 7, 15, 13, 3, 2, 3508, 0, 0, 0, 51.5],
2964: [51, 2, 3540, 2964, u'Orangutan Repeater', u'ORANGUTAN_REPEATER', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_d', 0, 17, 0, 0, '', 1, 121, 0, 0, 0, 0, 2, 12208, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_d', 7, 20, 25, 3, 2, 3508, 0, 0, 0, 59.5],
2965: [51, 2, 4970, 2965, u'Gorilla Repeater', u'GORILLA_REPEATER', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_d', 0, 22, 0, 0, '', 1, 121, 0, 0, 0, 0, 3, 12208, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_d', 7, 25, 33, 3, 3, 3508, 0, 0, 0, 70.5],
2966: [51, 1, 1410, 2966, u'Sea Dog Repeater', u'SEA_DOG_REPEATER', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_e', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_e', 7, 13, 5, 3, 1, 2342, 0, 0, 0, 37.5],
2967: [51, 1, 1600, 2967, u"Swashbuckler's Repeater", u'SWASHBUCKLER_REPEATER', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_e', 0, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12201, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_e', 7, 15, 8, 3, 1, 2342, 0, 0, 0, 40],
2968: [51, 1, 2860, 2968, u"Buccaneer's Repeater", u'BUCCANEER_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_e', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12201, 1, 12200, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_e', 7, 19, 19, 3, 2, 2342, 0, 0, 0, 53.5],
2969: [51, 1, 3310, 2969, u"Privateer's Repeater Pistol", u'PRIVATEER_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_e', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12201, 1, 12200, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_e', 7, 23, 25, 3, 2, 2342, 0, 0, 0, 57.5],
2970: [51, 1, 4900, 2970, u"Corsair's Repeater Pistol", u'CORSAIR_REPEATER', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_e', 0, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12201, 2, 12200, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_e', 7, 26, 34, 3, 3, 2342, 0, 0, 0, 70],
2971: [51, 1, 5850, 2971, u'Seven Seas Repeater', u'SEVEN_SEAS_REPEATER', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_e', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12201, 3, 12200, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_e', 7, 29, 39, 3, 3, 2342, 0, 0, 0, 76.5],
2972: [51, 1, 3780, 2972, u'Runic Repeater', u'RUNIC_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_f', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12204, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_f', 7, 19, 19, 4, 1, 3506, 0, 0, 0, 61.5],
2973: [51, 1, 6080, 2973, u'Warding Repeater', u'WARDING_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_f', 0, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12204, 1, 12207, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_f', 7, 24, 26, 4, 2, 3506, 0, 0, 0, 78],
2974: [51, 1, 9220, 2974, u'Arcane Repeater', u'ARCANE_REPEATER', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_f', 0, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12204, 2, 12207, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_f', 7, 27, 36, 4, 3, 3506, 0, 0, 0, 96],
2975: [51, 1, 10400, 2975, u'Cabal Repeater', u'CABAL_REPEATER', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_f', 0, 27, 0, 0, '', 1, 132, 0, 0, 0, 0, 3, 12204, 3, 12207, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_f', 7, 30, 40, 4, 3, 3506, 0, 0, 0, 102],
2976: [51, 2, 1520, 2976, u'Enhanced Repeater Pistol', u'ENHANCED_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_g', 0, 5, 0, 0, '', 1, 130, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_g', 7, 8, 2, 3, 1, 2343, 0, 0, 0, 39],
2977: [51, 2, 3190, 2977, u'Clockwork Repeater Pistol', u'CLOCKWORK_REPEATER', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_g', 0, 15, 0, 0, '', 1, 131, 0, 0, 0, 0, 1, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_g', 7, 18, 17, 3, 2, 2343, 0, 0, 0, 56.5],
2978: [51, 2, 4290, 2978, u'Gatling Repeater Pistol', u'GATLING_REPEATER', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_g', 0, 18, 0, 0, '', 1, 133, 0, 0, 0, 0, 2, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_g', 7, 21, 27, 3, 2, 2343, 0, 0, 0, 65.5],
2979: [51, 2, 5550, 2979, u'Master Crafted Repeater Pistol', u'MASTER_CRAFTED_REPEATER', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_g', 0, 21, 0, 0, '', 1, 134, 0, 0, 0, 0, 2, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_g', 7, 24, 31, 3, 3, 2343, 0, 0, 0, 74.5],
2980: [51, 1, 2550, 2980, u'Dark Repeater Pistol', u'DARK_REPEATER_PISTOL', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_c', 0, 16, 0, 0, '', 1, 131, 0, 0, 0, 0, 1, 12203, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_c', 7, 19, 19, 3, 1, 3501, 0, 0, 0, 50.5],
2981: [51, 1, 4290, 2981, u'Shadow Repeater Pistol', u'SHADOW_REPEATER_PISTOL', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_c', 0, 21, 0, 0, '', 1, 131, 0, 0, 0, 0, 2, 12203, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_c', 7, 24, 31, 3, 2, 3501, 0, 0, 0, 65.5],
2982: [51, 1, 6010, 2982, u'Forbidden Repeater Pistol', u'FORBIDDEN_REPEATER_PISTOL', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_c', 0, 25, 0, 0, '', 1, 131, 0, 0, 0, 0, 3, 12203, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_c', 7, 28, 37, 3, 3, 3501, 0, 0, 0, 77.5],
2983: [51, 2, 2120, 2983, u'Silver Repeater Pistol', u'SILVER_REPEATER_PISTOL', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_g', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12205, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_g', 7, 18, 12, 3, 1, 3500, 0, 0, 0, 46],
2984: [51, 2, 4490, 2984, u'Holy Repeater Pistol', u'HOLY_REPEATER_PISTOL', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_g', 0, 21, 0, 0, '', 1, 209, 0, 0, 0, 0, 2, 12205, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_g', 7, 24, 26, 3, 2, 3500, 0, 0, 0, 67],
2985: [51, 2, 7060, 2985, u'Sacred Repeater Pistol', u'SACRED_REPEATER_PISTOL', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_g', 0, 27, 0, 0, '', 1, 209, 0, 0, 0, 0, 3, 12205, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_g', 7, 30, 40, 3, 3, 3500, 0, 0, 0, 84],
2990: [51, 2, 5930, 2990, u"Barbossa's Fury", u'BARBOSSA_FURY', 5, 2, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_f', 0, 11, 0, 0, '', 3, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_f', 7, 14, 26, 4, 3, 2342, 0, 0, 0, 77],
2991: [51, 2, 840, 2991, u'Twin Barrel Pistol', u'TWIN_BARREL_PISTOL', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_a', u'A pistol with two barrels. Each barrel can be fired separately before reloading.', 3, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_a', 7, 6, 1, 2, 0, 0, 0, 0, 0, 20.5],
2992: [51, 2, 2180, 2992, u'Steel Repeater', u'STEEL_REPEATER', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_b', u'A repeater pistol! This gun can fire three times before reloading.', 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_b', 7, 14, 6, 3, 0, 0, 0, 0, 0, 33],
2993: [51, 2, 2810, 2993, u'Ornate Repeater', u'ORNATE_REPEATER', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_c', u'An expert gun! This gun can fire three times before reloading.', 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_c', 7, 20, 15, 3, 0, 0, 0, 0, 0, 37.5],
2994: [51, 2, 3700, 2994, u'Volley Pistol', u'VOLLEY_PISTOL', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_multiBarrel_e', u'A three-barreled gun! This gun can fire three times before reloading.', 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_multiBarrel_e', 7, 27, 26, 3, 0, 0, 0, 0, 0, 43],
2011: [51, 2, 380, 2011, u'Rattler Pistol', u'RATTLER_PISTOL', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_b', 0, 0, 0, 0, '', 1, 130, 0, 0, 0, 0, 1, 12202, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_b', 6, 4, 13, 1, 0, 0, 0, 0, 0, 19.5],
2012: [51, 2, 1160, 2012, u'Cobra Pistol', u'COBRA_PISTOL', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_b', 0, 7, 0, 0, '', 1, 130, 1, 200, 0, 0, 2, 12202, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_b', 6, 12, 34, 1, 0, 0, 0, 0, 0, 34],
25101: [52, 2, 700, 25101, u'Belt_Advanced_Outfit', u'SQUARE_ADVENTURE_BELT', 3, 5, 0, 0, 1, 0, 0, 0, 8, u'pir_t_ico_blt_m_square', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, -1, 0, -1, 0],
25102: [52, 2, 300, 25102, u'Belt_Intermediate_Outfit', u'SQUARE_TRAVELERS_BELT', 2, 5, 0, 0, 1, 0, 0, 0, 7, u'pir_t_ico_blt_m_square', 1, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, -1, 0, -1, 0],
36001: [53, 2, 300, 36001, u'TattooArmShark', u'TATTOO_ARM_SHARK', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_shark', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 210, 310, 410, 510],
36002: [53, 2, 300, 36002, u'TattooArmSkullPirate', u'TATTOO_ARM_SKULL_PIRATE', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_skull_pirate', 2, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 210, 310, 410, 510],
36003: [53, 2, 300, 36003, u'TattooArmSkullShield', u'TATTOO_ARM_SKULL_SHIELD', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_skull_shield', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 210, 310, 410, 510],
36004: [53, 1, 300, 36004, u'TattooArmSkullStab', u'TATTOO_ARM_SKULL_STAB', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_skull_stab', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 210, 310, 410, 510],
36005: [53, 2, 300, 36005, u'TattooArmSnakesDagger', u'TATTOO_ARM_SNAKES_DAGGER', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_snake', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 210, 310, 410, 510],
36006: [53, 2, 300, 36006, u'TattooArmFancyDagger', u'TATTOO_ARM_FANCY_DAGGER', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_dagger_fancy', 7, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 210, 310, 410, 510],
36007: [53, 2, 300, 36007, u'TattooArmSkullFlag', u'TATTOO_ARM_SKULL_FLAG', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_flag_skull', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 7, 211, 311, 411, 511],
36008: [53, 1, 300, 36008, u'TattooArmFancyKey', u'TATTOO_ARM_FANCY_KEY', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_key', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 8, 210, 310, 410, 510],
36009: [53, 2, 300, 36009, u'TattooArmSkullIronCross', u'TATTOO_ARM_SKULL_IRON_CROSS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_skull_ironcross', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 9, 210, 310, 410, 510],
36010: [53, 2, 300, 36010, u'TattooArmSkullScroll', u'TATTOO_ARM_SKULL_SCROLL', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_sword_hook', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10, 210, 310, 410, 510],
36011: [53, 2, 300, 36011, u'TattooArmNauticalStar', u'TATTOO_ARM_NAUTICAL_STAR', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_nautical_star', 15, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 22, 211, 311, 411, 511],
36012: [53, 2, 300, 36012, u'TattooArmMayanFace', u'TATTOO_ARM_MAYAN_FACE', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_mayanface', 15, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 23, 210, 310, 410, 510],
36013: [53, 2, 300, 36013, u'TattooArmOctopus', u'TATTOO_ARM_OCTOPUS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_skull_octo', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 24, 210, 310, 410, 510],
36014: [53, 2, 300, 36014, u'TattooArmTribalSkull', u'TATTOO_ARM_TRIBAL_SKULL', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_skull_tribal', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 25, 210, 310, 410, 510],
36015: [53, 2, 300, 36015, u'TattooArmSaintPatrick', u'TATTOO_ARM_SAINT_PATRICK', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_saint_patricks', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 27, 211, 311, 411, 511],
36016: [53, 2, 300, 36016, u'TattooArmNativeLizards', u'TATTOO_ARM_NATIVE_LIZARDS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_nativelizards', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 28, 210, 310, 410, 510],
36017: [53, 2, 300, 36017, u'TattooArmTribalSwirl', u'TATTOO_ARM_TRIBAL_SWIRL', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_tribal_01', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 29, 210, 310, 410, 510],
36018: [53, 2, 300, 36018, u'TattooArmTribalBird', u'TATTOO_ARM_TRIBAL_BIRD', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_tribal_bird', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 30, 210, 310, 410, 510],
36019: [53, 2, 300, 36019, u'TattooArmTribalJellyfish', u'TATTOO_ARM_TRIBAL_JELLYFISH', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_tribal_jellyfish_01', 17, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 31, 210, 310, 410, 510],
36020: [53, 2, 300, 36020, u'TattooArmTribalJellyfishes', u'TATTOO_ARM_TRIBAL_JELLYFISHES', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_tribal_jellyfish_02', 17, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 32, 210, 310, 410, 510],
36021: [53, 2, 300, 36021, u'TattooArmAsianLeaf', u'TATTOO_ARM_ASIAN_LEAF', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_asian_leaf', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 35, 210, 310, 410, 510],
36022: [53, 2, 300, 36022, u'TattooArmEthnic', u'TATTOO_ARM_ETHNIC', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_ethnic_02', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 36, 36, 210, 310, 410, 510],
36023: [53, 2, 300, 36023, u'TattooArmMaoriMan', u'TATTOO_ARM_MAORI_MAN', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_maoriman', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 37, 37, 210, 310, 410, 510],
36024: [53, 2, 300, 36024, u'TattooArmNativeLeaf', u'TATTOO_ARM_NATIVE_LEAF', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_nativeleaf', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 38, 210, 310, 410, 510],
36025: [53, 2, 300, 36025, u'TattooArmThai', u'TATTOO_ARM_THAI', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_thai_01', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 39, 210, 310, 410, 510],
36026: [53, 2, 300, 36026, u'TattooArmCelticLeaf', u'TATTOO_ARM_CELTIC_LEAF', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_celtic4leaf', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 42, 42, 211, 311, 411, 511],
36027: [53, 2, 300, 36027, u'TattooArmShamrock', u'TATTOO_ARM_SHAMROCK', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_shamrock', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 45, 45, 211, 311, 411, 511],
36028: [53, 2, 300, 36028, u'TattooArmTiki', u'TATTOO_ARM_TIKI', 2, 1, 0, 0, 1, 0, 0, 0, 1, u'tattoo_arm_color_hawaiian_tiki', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 54, 54, 210, 310, 410, 510],
36029: [53, 1, 300, 36029, u'TattooArmCelticKnot', u'TATTOO_ARM_CELTIC_KNOT', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_celtic_knot', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 52, 52, 210, 310, 410, 510],
36030: [53, 2, 300, 36030, u'TattooArmTribalWaves', u'TATTOO_ARM_TRIBAL_WAVES', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_tribal_waves', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 56, 56, 210, 310, 410, 510],
36031: [53, 2, 300, 36031, u'TattooArmSharks', u'TATTOO_ARM_SHARKS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_sharks', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 210, 310, 410, 510],
36032: [53, 1, 300, 36032, u'TattooArmChineseKnot', u'TATTOO_ARM_CHINESE_KNOT', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_chinese_knot', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 53, 53, 210, 310, 410, 510],
36033: [53, 2, 300, 36033, u'TattooArmWaveFan', u'TATTOO_ARM_WAVE_FAN', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_wave_fan', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 61, 61, 210, 310, 410, 510],
36034: [53, 2, 300, 36034, u'TattooArmCelticDeer', u'TATTOO_ARM_CELTIC_DEER', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_celtic_deer', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 57, 57, 210, 310, 410, 510],
36035: [53, 2, 300, 36035, u'TattooArmHawaiian', u'TATTOO_ARM_HAWAIIAN', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_hawaiian', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 58, 58, 210, 310, 410, 510],
36036: [53, 1, 300, 36036, u'TattooArmPetroglyph', u'TATTOO_ARM_PETROGLYPH', 2, 1, 0, 0, 1, 0, 0, 0, 1, u'tattoo_arm_mono_petroglyph', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 59, 59, 210, 310, 410, 510],
36037: [53, 2, 300, 36037, u'TattooArmRavens', u'TATTOO_ARM_RAVENS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_mono_ravens', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 60, 60, 210, 310, 410, 510],
36038: [53, 2, 300, 36038, u'TattooArmSpanish', u'TATTOO_ARM_SPANISH', 2, 1, 0, 0, 1, 0, 0, 0, 1, u'tattoo_pvp_icon_spanish', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 104, 104, 211, 311, 411, 511],
36039: [53, 2, 300, 36039, u'TattooArmFrench', u'TATTOO_ARM_FRENCH', 2, 1, 0, 0, 1, 0, 0, 0, 1, u'tattoo_pvp_icon_french', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 105, 105, 211, 311, 411, 511],
36040: [53, 2, 300, 36040, u'TattooArmMothersDayFlowersColor', u'TATTOO_ARM_MOTHERS_DAY_FLOWERS_COLOR', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_mothersday_flowers', 12, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 92, 92, 210, 310, 410, 510],
36041: [53, 2, 300, 36041, u'TattooArmMothersDaySparrows', u'TATTOO_ARM_MOTHERS_DAY_SPARROWS', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_arm_color_mothersday_sparrows', 12, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 95, 95, 210, 310, 410, 510],
36042: [53, 1, 300, 36042, u'TattooArmOctopusSleeve', u'TATTOO_ARM_OCTOPUS_SLEEVES', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_sleeve_color_octopus', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 111, 111, 212, 312, 412, 512],
36043: [53, 1, 200, 36043, u'TattooArmHealedBulletHoles', u'TATTOO_ARM_HEALED_BULLET_HOLES', 2, 1, 0, 1, 0, 0, 1, 0, 1, u'tattoo_scars_bulletholes_healed', 4, 0, 2, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 106, 106, 212, 312, 412, 512],
36044: [53, 1, 200, 36044, u'TattooArmPirateBrand', u'TATTOO_ARM_PIRATE_BRAND', 2, 1, 0, 1, 0, 0, 1, 0, 1, u'tattoo_scars_piratebrand', 6, 0, 1, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 107, 107, 212, 312, 412, 512],
36045: [53, 1, 200, 36045, u'TattooArmStitchedScar', u'TATTOO_ARM_STITCHED_SCAR', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_scars_traintrack01', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 108, 108, 212, 312, 412, 512],
36046: [53, 1, 200, 36046, u'TattooArmStitchedBulletHoles', u'TATTOO_ARM_STITCHED_BULLET_HOLES', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_stitches_bulletholes', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 129, 129, 212, 312, 412, 512],
36047: [53, 1, 200, 36047, u'TattooArmStitchedX', u'TATTOO_ARM_STITCHED_X', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_stitches_x', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 130, 130, 212, 312, 412, 512],
36048: [53, 1, 200, 36048, u'TattooArmStitchedY', u'TATTOO_ARM_STITCHED_Y', 2, 1, 0, 1, 0, 0, 0, 0, 1, u'tattoo_stitches_y', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 131, 131, 212, 312, 412, 512],
2019: [51, 2, 2810, 2019, u'Foul Bane Pistol', u'FOUL_BANE_PISTOL', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_d', 0, 19, 0, 0, '', 1, 133, 0, 0, 0, 0, 2, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_d', 6, 24, 58, 1, 2, 3503, 0, 0, 0, 53],
3501: [51, 2, 80, 3501, u'Voodoo Doll', u'VOODOO_DOLL', 1, 3, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_a', u'A mystical doll said to be able to bind to the spirit of anything it touches.', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_hnd_dol_spirit_a', 11, 0, 2, 0, 0, 0, 7, 0, 0, 1],
3502: [51, 2, 200, 3502, u'Cloth Doll', u'CLOTH_DOLL', 1, 3, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_a', u'A powerful doll able to bind to the spirits of the living and the dead.', 3, 0, 0, '', 1, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_a', 11, 5, 3, 0, 0, 0, 8, 0, 0, 2.5],
3503: [51, 2, 1200, 3503, u'Witch Doll', u'WITCH_DOLL', 1, 3, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_c', u'An elaborate oriental doll able to strongly bind to the spirits of others.', 8, 0, 0, '', 2, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_c', 11, 10, 4, 0, 0, 0, 9, 0, 0, 4],
3504: [51, 2, 2400, 3504, u'Pirate Doll', u'PIRATE_DOLL', 1, 3, 0, 0, 1, 0, 0, 0, 1, u'pir_t_ico_dol_bane_c', u'A Pirate Doll. Bind to the spirits of the Caribbean with this rare doll.', 13, 0, 0, '', 3, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_c', 11, 15, 6, 0, 0, 0, 10, 0, 0, 6],
3505: [51, 2, 4800, 3505, u'Taboo Doll', u'TABOO_DOLL', 1, 3, 0, 0, 1, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_e', u'A Taboo Doll. Many pirates fear the power and unknowns of this legendary doll.', 18, 0, 0, '', 4, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_e', 11, 20, 8, 0, 0, 0, 11, 0, 0, 8],
3506: [51, 2, 10, 3506, u'Ugly Voodoo Doll', u'UGLY_DOLL', 1, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_a', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_a', 12, 3, 5, 0, 0, 0, 7, 0, 0, 2.5],
3507: [51, 2, 30, 3507, u'Zombie Doll', u'ZOMBIE_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_a', 0, 1, 0, 0, '', 0, 0, 1, 220, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_a', 12, 3, 8, 0, 0, 0, 7, 0, 0, 5],
3508: [51, 2, 50, 3508, u'Ghoul Doll', u'GHOUL_DOLL', 1, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_a', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12602, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_a', 12, 4, 6, 0, 0, 0, 7, 0, 0, 7],
3509: [51, 2, 120, 3509, u'Hypnotic Doll', u'HYPNOTIC_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_a', 0, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_a', 12, 6, 12, 0, 1, 3505, 7, 0, 0, 11],
3510: [51, 2, 580, 3510, u'Manipulation Doll', u'MANIPULATION_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_a', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12609, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_a', 12, 12, 24, 0, 2, 3505, 7, 0, 0, 24],
3511: [51, 2, 840, 3511, u'Mind Control Doll', u'MIND_CONTROL_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_a', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12609, 1, 12604, 0, 0, 1, u'pir_m_hnd_dol_bane_a', 12, 18, 32, 0, 2, 3505, 7, 0, 0, 29],
3512: [51, 2, 1760, 3512, u'Domination Doll', u'DOMINATION_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_a', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12609, 1, 12604, 0, 0, 1, u'pir_m_hnd_dol_bane_a', 12, 24, 44, 0, 3, 3505, 7, 0, 0, 42],
3513: [51, 2, 180, 3513, u'Evil Doll', u'EVIL_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12602, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 12, 19, 0, 1, 2306, 8, 0, 0, 13.5],
3514: [51, 2, 550, 3514, u'Wicked Doll', u'WICKED_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12602, 1, 12604, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 16, 29, 0, 1, 2306, 8, 0, 0, 23.5],
3515: [51, 2, 730, 3515, u'Unholy Doll', u'UNHOLY_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12602, 2, 12604, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 20, 34, 0, 2, 2306, 8, 0, 0, 27],
3516: [51, 2, 1300, 3516, u'Villainy Doll', u'VILLAINY_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12602, 2, 12604, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 24, 44, 0, 2, 2306, 8, 0, 0, 36],
3517: [51, 1, 1560, 3517, u'Tyranny Doll', u'TYRANNY_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12602, 3, 12604, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 28, 49, 0, 3, 2306, 8, 0, 0, 39.5],
3518: [51, 2, 260, 3518, u'Skeleton Doll', u'SKELETON_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', 0, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 11, 18, 0, 1, 2303, 8, 0, 0, 16],
3519: [51, 2, 510, 3519, u'Cemetery Doll', u'CEMETERY_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12602, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 15, 23, 0, 1, 2303, 8, 0, 0, 22.5],
3520: [51, 2, 1190, 3520, u'Crypt Doll', u'CRYPT_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12602, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 19, 33, 0, 2, 2303, 8, 0, 0, 34.5],
3521: [51, 2, 2120, 3521, u'Carrion Doll', u'CARRION_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', 0, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12602, 1, 12607, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 23, 38, 0, 2, 2303, 8, 0, 0, 46],
3522: [51, 2, 3970, 3522, u'Revenant Doll', u'REVENANT_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12602, 2, 12607, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 27, 48, 0, 3, 2303, 8, 0, 0, 63],
3523: [51, 1, 4760, 3523, u'Tomb King Doll', u'TOMB_KING_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12602, 2, 12607, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 30, 52, 0, 3, 2303, 8, 0, 0, 69],
3524: [51, 2, 260, 3524, u'Swashbuckler Doll', u'SWASHBUCKLER_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_c', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_c', 12, 8, 14, 0, 1, 2302, 10, 0, 0, 16],
3525: [51, 2, 460, 3525, u'Buccaneer Doll', u'BUCCANEER_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_c', 0, 10, 0, 0, '', 1, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_c', 12, 12, 19, 0, 1, 2302, 10, 0, 0, 21.5],
3526: [51, 2, 1410, 3526, u'Mutineer Doll', u'MUTINEER_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_c', 0, 14, 0, 0, '', 1, 100, 0, 0, 0, 0, 1, 12605, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_c', 12, 16, 29, 0, 2, 2302, 10, 0, 0, 37.5],
3527: [51, 2, 2030, 3527, u'Privateer Doll', u'PRIVATEER_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_c', 0, 18, 0, 0, '', 2, 100, 0, 0, 0, 0, 2, 12605, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_c', 12, 20, 34, 0, 2, 2302, 10, 0, 0, 45],
3528: [51, 1, 3840, 3528, u'Warmonger Doll', u'WARMONGER_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_c', 0, 22, 0, 0, '', 3, 100, 0, 0, 0, 0, 2, 12605, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_c', 12, 24, 44, 0, 3, 2302, 10, 0, 0, 62],
3529: [51, 1, 6010, 3529, u'Barbossa Doll', u'BARBOSSA_DOLL', 5, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_c', 0, 26, 0, 0, '', 3, 100, 0, 0, 0, 0, 3, 12605, 2, 12601, 0, 0, 1, u'pir_m_hnd_dol_bane_c', 12, 28, 59, 0, 3, 2302, 10, 0, 0, 77.5],
3530: [51, 1, 580, 3530, u'Dark Voodoo Doll', u'DARK_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_e', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12601, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_e', 12, 17, 30, 0, 1, 3501, 7, 0, 0, 24],
3531: [51, 1, 1640, 3531, u'Shadow Voodoo Doll', u'SHADOW_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_e', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12601, 1, 12600, 0, 0, 1, u'pir_m_hnd_dol_bane_e', 12, 22, 37, 0, 2, 3501, 7, 0, 0, 40.5],
3532: [51, 1, 2810, 3532, u'Forbidden Voodoo Doll', u'FORBIDDEN_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_e', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 2, 12600, 0, 0, 1, u'pir_m_hnd_dol_bane_e', 12, 27, 48, 0, 2, 3501, 7, 0, 0, 53],
3533: [51, 1, 4620, 3533, u'Jolly Roger Voodoo Doll', u'JOLLY_ROGER_DOLL', 5, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_e', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 2, 12608, 0, 0, 1, u'pir_m_hnd_dol_bane_e', 12, 30, 62, 0, 3, 3501, 7, 0, 0, 68],
3534: [51, 1, 760, 3534, u'Cursed Doll', u'CURSED_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_d', 0, 20, 0, 0, '', 1, 209, 0, 0, 0, 0, 1, 12608, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_d', 12, 22, 37, 0, 0, 0, 7, 0, 0, 27.5],
3535: [51, 1, 1640, 3535, u'Tormented Voodoo Doll', u'TORMENTED_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_d', 0, 23, 0, 0, '', 2, 209, 0, 0, 0, 0, 2, 12608, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_d', 12, 25, 45, 0, 0, 0, 7, 0, 0, 40.5],
3536: [51, 1, 2700, 3536, u'Overlord Voodoo Doll', u'OVERLORD_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_d', 0, 27, 0, 0, '', 3, 209, 0, 0, 0, 0, 3, 12608, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_d', 12, 29, 50, 0, 0, 0, 7, 0, 0, 52],
3537: [51, 1, 7230, 3537, u'Davy Jones Voodoo Doll', u'DAVY_JONES_DOLL', 5, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_d', 0, 28, 0, 0, '', 3, 209, 0, 0, 0, 0, 3, 12608, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_d', 12, 30, 62, 0, 3, 2302, 7, 0, 0, 85],
3538: [51, 2, 1920, 3538, u'Fury Doll', u'FURY_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_a', u'This dark doll can give other pirates berserk power.', 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_a', 12, 7, 13, 0, 1, 2302, 7, 0, 0, 15.5],
3539: [51, 2, 2740, 3539, u'Rage Doll', u'RAGE_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', u'An evil voodoo doll filled with anger.', 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 12, 19, 0, 1, 2302, 7, 0, 0, 18.5],
3540: [51, 2, 3700, 3540, u'Grudger Doll', u'GRUDGER_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_c', u'A dark doll shaped like a tough pirate.', 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_c', 12, 17, 25, 0, 1, 2302, 7, 0, 0, 21.5],
3541: [51, 2, 5000, 3541, u'Vengeful Doll', u'VENGEFUL_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_b', u'A grim doll that increases the battle fury in others.', 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_b', 12, 22, 32, 0, 1, 2302, 7, 0, 0, 25],
3542: [51, 2, 6270, 3542, u'Wrath Doll', u'WRATH_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_bane_c', u'A doll shaped like an infamous pirate.', 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_bane_c', 12, 27, 38, 0, 1, 2302, 7, 0, 0, 28],
2028: [51, 1, 1760, 2028, u'Runic Pistol', u'RUNIC_PISTOL', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_c', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12204, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_c', 6, 15, 40, 1, 1, 3506, 0, 0, 0, 42],
22501: [52, 2, 300, 22501, u'Vest_Closed_Cloth_TwoButton', u'BUTTONED_VEST', 1, 2, 1, 1, 0, 0, 0, 0, 20, u'pir_t_ico_vst_f_closed', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 0, 0],
20001: [52, 2, 400, 20001, u'Hat_Captain_Leather', u'HIGH_WING_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_hat_captain', 38, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -1, 0],
20002: [52, 2, 400, 20002, u'Hat_Tricorne_Brown', u'ROUGH_TRICORNE', 2, 0, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_hat_tricorn', 38, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 0, -1, 0],
20003: [52, 0, 400, 20003, u'Hat_Tricorne_Orange', u'ORANGE_TRICORNE', 2, 0, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_hat_tricorn', 38, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 1, -1, 0],
20004: [52, 0, 400, 20004, u'Hat_Tricorne_Black_Skull', u'SKULL_TRICORNE', 2, 0, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_hat_tricorn', 38, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 2, -1, 0],
20005: [52, 0, 400, 20005, u'Hat_Tricorne_Navy_Trim', u'NAVY_TRIM_TRICORNE', 2, 0, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_hat_tricorn', 4, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 3, -1, 0],
20006: [52, 2, 400, 20006, u'Hat_Navy_India', u'EITC_HAT', 2, 0, 1, 1, 0, 0, 0, 1, 10, u'pir_t_ico_hat_admiral', 4, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -1, 0, -1, 0],
20007: [52, 2, 400, 20007, u'Hat_Admiral', u'ADMIRAL_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_hat_admiral', 4, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, -1, 0, -1, 0],
20008: [52, 0, 400, 20008, u'Hat_Bandana_Patches', u'PATCHED_BANDANA', 2, 0, 1, 1, 0, 0, 0, 0, 43, u'pir_t_ico_hat_bandana_full', 1, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 3, -1, 0],
20009: [52, 0, 400, 20009, u'Hat_Bandana_ZigZag', u'ZIGZAG_BANDANA', 2, 0, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_hat_bandana_full', 1, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 4, -1, 0],
20010: [52, 0, 400, 20010, u'Hat_Band_Patches', u'PATCHED_BAND', 2, 0, 1, 1, 0, 0, 0, 0, 43, u'pir_t_ico_hat_bandana', 1, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 3, -1, 0],
20011: [52, 0, 400, 20011, u'Hat_Band_ZigZag', u'ZIGZAG_BAND', 2, 0, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_hat_bandana', 1, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 4, -1, 0],
20012: [52, 2, 150, 20012, u'Hat_Cap', u'CAP', 1, 0, 1, 1, 0, 0, 0, 0, 10, u'pir_t_ico_hat_beanie', 1, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 23, 0, 0, 1],
20013: [52, 0, 400, 20013, u'Hat_Cap_Crossbones', u'CROSSBONES_CAP', 2, 0, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_hat_beanie', 1, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 23, 1, 1, 0],
20014: [52, 0, 400, 20014, u'Hat_Beanie_Crossbones', u'CROSSBONE_BEANIE', 2, 0, 1, 1, 0, 0, 0, 0, 32, u'pir_t_ico_hat_beanie', 1, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 23, 2, 2, 0],
20015: [52, 2, 600, 20015, u'Hat_Ostrich_Dark', u'DARK_OSTRICH_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_hat_barbossa', 36, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 0, -1, 0],
20016: [52, 0, 600, 20016, u'Hat_Ostrich_Purple', u'PURPLE_OSTRICH_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 49, u'pir_t_ico_hat_barbossa', 36, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 1, -1, 0],
20017: [52, 0, 600, 20017, u'Hat_Ostrich_Blue', u'BLUE_OSTRICH_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_hat_barbossa', 36, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 2, -1, 0],
20018: [52, 0, 600, 20018, u'Hat_Ostrich_Red', u'RED_OSTRICH_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_hat_barbossa', 36, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 3, -1, 0],
20019: [52, 2, 600, 20019, u'Hat_Ostrich_Magenta', u'MAGENTA_OSTRICH_HAT', 3, 0, 0, 0, 1, 0, 0, 0, 49, u'pir_t_ico_hat_barbossa', 36, 34, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 4, -1, 0],
20020: [52, 2, 900, 20020, u'Hat_Ostrich_Advanced_Outfit', u'ADVENTURE_OSTRICH_HAT', 3, 0, 0, 0, 1, 0, 0, 0, 32, u'pir_t_ico_hat_barbossa', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 5, -1, 0],
20021: [52, 2, 600, 20021, u'Hat_Ostrich_Intermediate_Outfit', u'TRAVELERS_OSTRICH_HAT', 2, 0, 0, 0, 1, 0, 0, 0, 10, u'pir_t_ico_hat_barbossa', 1, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 6, -1, 0],
20022: [52, 1, 900, 20022, u'Hat_Beanie_Green_Silk', u'GREEN_SILK_BEANIE', 3, 0, 0, 1, 0, 0, 0, 0, 5, u'pir_t_ico_hat_beanie', 38, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3907, 8, -1, 3, -1, 0],
20023: [52, 0, 900, 20023, u'Hat_Beanie_Brown_Beads', u'BROWN_BEADS_BEANIE', 3, 0, 0, 1, 0, 0, 0, 0, 7, u'pir_t_ico_hat_beanie', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 23, 4, 4, 0],
20050: [52, 0, 900, 20050, u'Hat_Wing_Baron', u'BARON_WING_HAT', 3, 0, 0, 0, 0, 0, 0, 0, 7, u'pir_t_ico_hat_captain', 44, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3912, 1, -1, 1, -1, 0],
20051: [52, 0, 900, 20051, u'Hat_Wing_Prince', u'PRINCE_WING_HAT', 3, 0, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_hat_captain', 46, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 1, -1, 2, -1, 0],
20052: [52, 0, 600, 20052, u'Hat_Wing_Rogue_Privateer', u'ROGUE_PRIVATEER_WING_HAT', 2, 0, 0, 0, 0, 0, 0, 0, 3, u'pir_t_ico_hat_captain', 47, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3910, 1, -1, 3, -1, 0],
20101: [52, 0, 400, 20101, u'Hat_Bandana_Purple', u'PURPLE_BANDANA', 2, 0, 1, 1, 0, 0, 0, 0, 36, u'pir_t_ico_hat_bandana_full_tie', 1, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 5, -1, 3, 0],
20102: [52, 0, 400, 20102, u'Hat_Bandana_Bohemian', u'BOHEMIAN_BANDANA', 2, 0, 1, 1, 0, 0, 0, 0, 49, u'pir_t_ico_hat_bandana_full_tie', 1, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 5, -1, 4, 0],
20103: [52, 0, 400, 20103, u'Hat_Band_Purple', u'PURPLE_BAND', 2, 0, 1, 1, 0, 0, 0, 0, 36, u'pir_t_ico_hat_bandana', 1, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 6, -1, 3, 0],
20104: [52, 0, 400, 20104, u'Hat_Band_Bohemian', u'BOHEMIAN_BAND', 2, 0, 1, 1, 0, 0, 0, 0, 49, u'pir_t_ico_hat_bandana', 1, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 6, -1, 4, 0],
20105: [52, 2, 600, 20105, u'Hat_Cavalry_Brown', u'BROWN_CAVALRY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_hat_dresshat', 36, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 0, 0],
20106: [52, 0, 600, 20106, u'Hat_Cavalry_Blue', u'BLUE_CAVALRY_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_hat_dresshat', 36, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 1, 0],
20107: [52, 0, 600, 20107, u'Hat_Cavalry_Green', u'GREEN_CAVALRY_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 45, u'pir_t_ico_hat_dresshat', 36, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 2, 0],
20108: [52, 2, 600, 20108, u'Hat_Cavalry_Purple', u'PURPLE_CAVALRY_HAT', 3, 0, 0, 0, 1, 0, 0, 0, 36, u'pir_t_ico_hat_dresshat', 36, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 3, 0],
20109: [52, 0, 600, 20109, u'Hat_Cavalry_Butterfly', u'BUTTERFLY_CAVALRY_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 25, u'pir_t_ico_hat_dresshat', 3, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 4, 0],
20110: [52, 1, 900, 20110, u'Hat_Bandana_Red_Silk', u'RED_SILK_BANDANA', 3, 0, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_hat_bandana', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3907, -1, 5, -1, 7, 0],
20120: [52, 2, 900, 20120, u'Hat_Cavalry_Advanced_Outfit', u'ADVENTURE_CAVALRY_HAT', 3, 0, 0, 0, 1, 0, 0, 0, 29, u'pir_t_ico_hat_dresshat', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 5, 0],
20121: [52, 2, 600, 20121, u'Hat_Cavalry_Intermediate_Outfit', u'TRAVELERS_CAVALRY_HAT', 2, 0, 0, 0, 1, 0, 0, 0, 45, u'pir_t_ico_hat_dresshat', 1, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 6, 0],
20122: [52, 2, 600, 20122, u'Hat_Dinghy', u'DINGHY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_hat_featherhat', 1, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 0, 0],
20123: [52, 2, 400, 20123, u'Hat_Bonnet_Simple', u'SIMPLE_BONNET', 2, 0, 0, 0, 1, 0, 0, 1, 2, u'pir_t_ico_hat_bandana', 42, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 0, 0],
20124: [52, 0, 900, 20124, u'Hat_Baroness', u'BARONESS_HAT', 3, 0, 0, 0, 0, 0, 0, 0, 31, u'pir_t_ico_hat_featherhat', 45, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3912, -1, 3, -1, 1, 0],
20125: [52, 0, 900, 20125, u'Hat_Prince', u'PRINCE_LADY_HAT', 3, 0, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_hat_featherhat', 46, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 3, -1, 2, 0],
20127: [52, 0, 900, 20127, u'Hat_Cavalry_Rogue_Privateer', u'ROGUE_PRIVATEER_CAVALRY_HAT', 3, 0, 0, 0, 0, 0, 0, 0, 3, u'pir_t_ico_hat_dresshat', 47, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3910, -1, 1, -1, 7, 0],
3801: [51, 2, 0, 3801, u'Wax Doll', u'WAX_DOLL', 1, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_b', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_b', 13, 2, 4, 0, 0, 0, 7, 0, 0, 2],
3802: [51, 2, 30, 3802, u'Clay Doll', u'CLAY_DOLL', 1, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_b', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12601, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_b', 13, 3, 5, 0, 0, 0, 7, 0, 0, 5.5],
3803: [51, 2, 310, 3803, u'Silk Doll', u'SILK_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_d', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12601, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_d', 13, 10, 15, 0, 1, 2301, 11, 0, 0, 17.5],
3804: [51, 2, 840, 3804, u'Oriental Doll', u'ORIENTAL_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_d', 0, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12601, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_d', 13, 14, 24, 0, 2, 2301, 11, 0, 0, 29],
3805: [51, 2, 1480, 3805, u'Warrior Doll', u'WARRIOR_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_d', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12601, 1, 12606, 0, 0, 1, u'pir_m_hnd_dol_mojo_d', 13, 18, 29, 0, 2, 2301, 11, 0, 0, 38.5],
3806: [51, 2, 3250, 3806, u'Nomad Doll', u'NOMAD_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_d', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 2, 12606, 0, 0, 1, u'pir_m_hnd_dol_mojo_d', 13, 22, 38, 0, 3, 2301, 11, 0, 0, 57],
3807: [51, 2, 4030, 3807, u'Warlord Doll', u'WARLORD_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_d', 0, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 3, 12606, 0, 0, 1, u'pir_m_hnd_dol_mojo_d', 13, 26, 43, 0, 3, 2301, 11, 0, 0, 63.5],
3808: [51, 1, 5040, 3808, u'Shao Feng Doll', u'SHAO_FENG_DOLL', 5, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_d', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 3, 12606, 0, 0, 1, u'pir_m_hnd_dol_mojo_d', 13, 30, 58, 0, 3, 2301, 11, 0, 0, 71],
3809: [51, 2, 90, 3809, u'Witch Doctor Doll', u'WITCH_DOCTOR_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_a', 0, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12601, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_a', 13, 9, 13, 0, 1, 2306, 12, 0, 0, 9.5],
3810: [51, 2, 260, 3810, u'Bewitcher Doll', u'BEWITCHER_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_a', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12601, 1, 12605, 0, 0, 1, u'pir_m_hnd_dol_mojo_a', 13, 12, 22, 0, 2, 2306, 12, 0, 0, 16],
3811: [51, 2, 530, 3811, u'Siren Doll', u'SIREN_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_a', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12601, 2, 12605, 0, 0, 1, u'pir_m_hnd_dol_mojo_a', 13, 16, 26, 0, 2, 2306, 12, 0, 0, 23],
3812: [51, 2, 960, 3812, u'Occult Doll', u'OCCULT_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_a', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 2, 12605, 0, 0, 1, u'pir_m_hnd_dol_mojo_a', 13, 20, 36, 0, 3, 2306, 12, 0, 0, 31],
3813: [51, 2, 1260, 3813, u'Banshee Doll', u'BANSHEE_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_a', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 3, 12605, 0, 0, 1, u'pir_m_hnd_dol_mojo_a', 13, 24, 41, 0, 3, 2306, 12, 0, 0, 35.5],
3814: [51, 1, 1810, 3814, u'Black Magic Doll', u'BLACK_MAGIC_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_a', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 3, 12605, 1, 12607, 1, u'pir_m_hnd_dol_mojo_a', 13, 28, 45, 0, 3, 2306, 12, 0, 0, 42.5],
3815: [51, 2, 210, 3815, u'Mystic Doll', u'MYSTIC_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_c', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12603, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_c', 13, 7, 11, 0, 1, 2304, 12, 0, 0, 14.5],
3816: [51, 2, 650, 3816, u'Priestess Doll', u'PRIESTESS_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_c', 0, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12603, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_c', 13, 11, 21, 0, 2, 2304, 12, 0, 0, 25.5],
20201: [52, 2, 400, 20201, u'Hat_Navy_Red_Coat', u'REDCOAT_HAT', 2, 0, 0, 0, 1, 0, 0, 1, 29, u'pir_t_ico_hat_tricorn', 4, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 2, 0, 0, 0],
20202: [52, 2, 150, 20202, u'Hat_Bandana', u'BANDANA', 1, 0, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_hat_bandana_full', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 5, 0, 0, 1],
20203: [52, 2, 400, 20203, u'Hat_Bandana_Blue', u'BLUE_BANDANA', 2, 0, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_hat_bandana_full', 1, 3, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 5, 1, 1, 0],
20204: [52, 1, 400, 20204, u'Hat_Bandana_Crossbones', u'CROSSBONES_BANDANA', 2, 0, 1, 1, 0, 0, 0, 0, 32, u'pir_t_ico_hat_bandana_full', 1, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 6, 5, 2, 2, 0],
20205: [52, 2, 400, 20205, u'Hat_Bandana_Basic_Outfit', u'RECRUIT_BANDANA', 2, 0, 0, 0, 1, 0, 0, 0, 3, u'pir_t_ico_hat_bandana_full', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 5, 5, 5, 0],
20206: [52, 1, 400, 20206, u'Hat_Bandana_Thanks', u'LOYALTY_BANDANA', 2, 0, 1, 1, 0, 0, 0, 0, 40, u'pir_t_ico_hat_bandana_full', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 6, 5, 6, 6, 0],
20207: [52, 2, 150, 20207, u'Hat_Band', u'HEAD_BAND', 1, 0, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_hat_bandana', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 6, 0, 0, 1],
20208: [52, 2, 400, 20208, u'Hat_Band_Blue', u'BLUE_BAND', 2, 0, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_hat_bandana', 1, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 6, 1, 1, 0],
20209: [52, 0, 400, 20209, u'Hat_Band_Crossbones', u'CROSSBONES_BAND', 2, 0, 1, 1, 0, 0, 0, 0, 32, u'pir_t_ico_hat_bandana', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 6, 2, 2, 0],
20210: [52, 2, 1200, 20210, u'Hat_Tricorne_French', u'FRENCH_TRICORNE', 3, 0, 0, 1, 0, 0, 1, 0, 29, u'pir_t_ico_hat_french', 38, 15, 0, 5, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 7, 0, 0, 0],
20211: [52, 2, 1200, 20211, u'Hat_Ostrich_Spanish', u'SPANISH_OSTRICH_HAT', 3, 0, 0, 1, 0, 0, 1, 0, 43, u'pir_t_ico_hat_barbossa', 40, 15, 0, 6, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 8, 0, 0, 0],
20212: [52, 0, 1000, 20212, u'Hat_French_Blue', u'BLUE_FRENCH_HAT', 3, 0, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_hat_french1', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3914, 12, 9, 0, 0, 0],
20213: [52, 1, 1000, 20213, u'Hat_French_Green', u'GREEN_FRENCH_HAT', 3, 0, 0, 1, 0, 0, 0, 0, 44, u'pir_t_ico_hat_french1', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3903, 12, 9, 1, 1, 0],
20214: [52, 0, 1000, 20214, u'Hat_French_Violet', u'VIOLET_FRENCH_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 26, u'pir_t_ico_hat_french1', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 9, 2, 2, 0],
20215: [52, 1, 1500, 20215, u'Hat_Bicorne_Black', u'BLACK_BICORNE', 3, 0, 0, 1, 0, 0, 0, 0, 47, u'pir_t_ico_hat_french2', 8, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3901, 13, 10, 0, 0, 0],
20216: [52, 0, 1500, 20216, u'Hat_Bicorne_Brown', u'BROWN_BICORNE', 3, 0, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_hat_french2', 8, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 10, 1, 1, 0],
20217: [52, 1, 2000, 20217, u'Hat_Chapeau_Black', u'BLACK_CHAPEAU', 3, 0, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_hat_french3', 40, 40, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3909, 14, 11, 0, 0, 0],
20218: [52, 1, 2000, 20218, u'Hat_Chapeau_Navy', u'NAVY_CHAPEAU', 3, 0, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_hat_french3', 40, 40, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3904, 14, 11, 1, 1, 0],
20219: [52, 2, 1000, 20219, u'Hat_Gaucho_Black', u'BLACK_GAUCHO', 2, 0, 0, 1, 0, 0, 1, 0, 32, u'pir_t_ico_hat_spanish1', 38, 14, 0, 4, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 12, 0, 0, 0],
20220: [52, 0, 400, 20220, u'Hat_Gaucho_Brown', u'BROWN_GAUCHO', 2, 0, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_hat_spanish1', 5, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 12, 1, 1, 0],
20221: [52, 1, 400, 20221, u'Hat_Gaucho_Red', u'RED_GAUCHO', 2, 0, 0, 1, 0, 0, 0, 0, 40, u'pir_t_ico_hat_spanish1', 5, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3902, 15, 12, 2, 2, 0],
20222: [52, 2, 800, 20222, u'Hat_Cabaset_Bronze', u'BRONZE_CABASET', 3, 0, 0, 1, 0, 0, 1, 0, 1, u'pir_t_ico_hat_spanish2', 41, 16, 0, 3, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 13, 0, 0, 0],
20223: [52, 2, 800, 20223, u'Hat_Cabaset_Steel', u'STEEL_CABASET', 3, 0, 0, 1, 0, 0, 1, 0, 20, u'pir_t_ico_hat_spanish2', 41, 18, 0, 1, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 13, 1, 1, 0],
20224: [52, 2, 800, 20224, u'Hat_Cabaset_Embossed', u'EMBOSSED_CABASET', 3, 0, 0, 1, 0, 0, 1, 0, 10, u'pir_t_ico_hat_spanish2', 41, 20, 0, 1, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 13, 2, 2, 0],
20225: [52, 1, 1000, 20225, u'Hat_Cabaset_Rusted', u'RUSTED_CABASET', 3, 0, 0, 1, 0, 0, 0, 0, 48, u'pir_t_ico_hat_spanish2', 41, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3916, 16, 13, 3, 3, 0],
20226: [52, 1, 600, 20226, u'Hat_Cavalier_Black', u'BLACK_CAVALIER', 2, 0, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_hat_spanish3', 1, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3922, 17, 14, 0, 0, 0],
20227: [52, 0, 600, 20227, u'Hat_Cavalier_Leather', u'LEATHER_CAVALIER', 2, 0, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_hat_spanish3', 1, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 14, 1, 1, 0],
20228: [52, 1, 600, 20228, u'Hat_Cavalier_Burgundy', u'BURGUNDY_CAVALIER', 2, 0, 0, 1, 0, 0, 0, 0, 36, u'pir_t_ico_hat_spanish3', 1, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3918, 17, 14, 2, 2, 0],
20229: [52, 1, 600, 20229, u'Hat_Cavalier_Gray', u'GRAY_CAVALIER', 2, 0, 1, 1, 0, 0, 0, 0, 20, u'pir_t_ico_hat_spanish3', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 14, 3, 3, 0],
20230: [52, 2, 1000, 20230, u'Hat_Explorer_Black', u'BLACK_EXPLORER_HAT', 2, 0, 0, 1, 0, 0, 1, 0, 32, u'pir_t_ico_hat_land1', 38, 14, 0, 4, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 15, 0, 0, 0],
20231: [52, 2, 400, 20231, u'Hat_Explorer_Leather', u'LEATHER_EXPLORER_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_hat_land1', 5, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 15, 1, 1, 0],
20232: [52, 2, 400, 20232, u'Hat_Explorer_Straw', u'STRAW_EXPLORER_HAT', 2, 0, 0, 1, 0, 0, 0, 0, 9, u'pir_t_ico_hat_land1', 5, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 15, 2, 2, 0],
20233: [52, 0, 1600, 20233, u'Hat_Parade_Black', u'BLACK_PARADE_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_hat_land2', 3, 35, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 16, 0, 0, 0],
20234: [52, 0, 1600, 20234, u'Hat_Parade_Red', u'RED_PARADE_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 3, u'pir_t_ico_hat_land2', 3, 35, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 16, 1, 1, 0],
20235: [52, 2, 800, 20235, u'Hat_Conquistador_Steel', u'STEEL_CONQUISTADOR', 3, 0, 0, 1, 0, 0, 1, 0, 20, u'pir_t_ico_hat_land3', 41, 35, 0, 2, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 17, 0, 0, 0],
20236: [52, 0, 2100, 20236, u'Hat_Conquistador_Gold', u'GOLD_CONQUISTADOR', 3, 0, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_hat_land3', 41, 35, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 17, 1, 1, 0],
20237: [52, 0, 300, 20237, u'Hat_Stocking_Blue', u'BLUE_STOCKING_CAP', 2, 0, 1, 1, 0, 0, 0, 0, 4, u'pir_t_ico_hat_holiday', 10, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 18, 0, 0, 0],
20238: [52, 0, 300, 20238, u'Hat_Stocking_Green', u'GREEN_STOCKING_CAP', 2, 0, 1, 1, 0, 0, 0, 0, 5, u'pir_t_ico_hat_holiday', 10, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 18, 2, 2, 0],
20239: [52, 0, 300, 20239, u'Hat_Stocking_Gold', u'GOLD_STOCKING_CAP', 2, 0, 1, 1, 0, 0, 0, 0, 22, u'pir_t_ico_hat_holiday', 10, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 18, 3, 3, 0],
20240: [52, 0, 300, 20240, u'Hat_Stocking_Red', u'RED_STOCKING_CAP', 2, 0, 1, 1, 0, 0, 0, 0, 40, u'pir_t_ico_hat_holiday', 10, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 18, 4, 4, 0],
20241: [52, 0, 300, 20241, u'Hat_Stocking_Polar', u'POLAR_STOCKING_CAP', 2, 0, 1, 1, 0, 0, 0, 0, 3, u'pir_t_ico_hat_holiday', 10, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 18, 5, 5, 0],
20242: [52, 0, 300, 20242, u'Hat_Stocking_Violet', u'VIOLET_STOCKING_CAP', 2, 0, 1, 1, 0, 0, 0, 0, 25, u'pir_t_ico_hat_holiday', 10, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 18, 7, 7, 0],
20243: [52, 0, 300, 20243, u'Hat_Stocking_Cotton', u'COTTON_STOCKING_CAP', 2, 0, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_hat_holiday', 10, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 18, 8, 8, 1],
20244: [52, 0, 300, 20244, u'Hat_Stocking_Yellow', u'YELLOW_STOCKING_CAP', 2, 0, 1, 1, 0, 0, 0, 0, 6, u'pir_t_ico_hat_holiday', 10, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 18, 9, 9, 0],
20245: [52, 2, 300, 20245, u'Hat_Party_Blue_Red', u'BLUE_RED_PARTY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 39, u'pir_t_ico_hat_party1', 39, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 19, 0, 0, 0],
20246: [52, 0, 300, 20246, u'Hat_Party_Green_Orange', u'GREEN_ORANGE_PARTY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 21, u'pir_t_ico_hat_party1', 39, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 19, 1, 1, 0],
20247: [52, 0, 300, 20247, u'Hat_Party_Pink', u'PINK_PARTY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 15, u'pir_t_ico_hat_party1', 39, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 19, 2, 2, 0],
20248: [52, 0, 300, 20248, u'Hat_Party_Orange_Green', u'ORANGE_GREEN_PARTY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 13, u'pir_t_ico_hat_party1', 39, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 19, 3, 3, 0],
20249: [52, 0, 300, 20249, u'Hat_Party_Sky_Blue', u'SKY_BLUE_PARTY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 33, u'pir_t_ico_hat_party1', 39, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 19, 4, 4, 0],
20250: [52, 0, 300, 20250, u'Hat_Party_Purple_Yellow', u'PURPLE_YELLOW_PARTY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 14, u'pir_t_ico_hat_party1', 39, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 19, 5, 5, 0],
20251: [52, 0, 500, 20251, u'Hat_Stove_Black', u'BLACK_STOVE_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_hat_party2', 5, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 20, 0, 0, 0],
20252: [52, 0, 500, 20252, u'Hat_Stove_Blue', u'BLUE_STOVE_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_hat_party2', 5, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 20, 1, 1, 0],
20253: [52, 1, 500, 20253, u'Hat_Stove_Brown', u'BROWN_STOVE_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_hat_party2', 5, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 20, 2, 2, 0],
20254: [52, 1, 500, 20254, u'Hat_Stove_Green', u'GREEN_STOVE_HAT', 2, 0, 0, 1, 0, 0, 0, 0, 42, u'pir_t_ico_hat_party2', 5, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3915, 23, 20, 3, 3, 0],
20255: [52, 2, 550, 20255, u'Hat_Buccaneer_Black', u'BLACK_BUCCANEER_HAT', 3, 0, 0, 0, 0, 1, 0, 0, 49, u'pir_t_ico_hat_french', 1, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 21, 0, 0, 0],
20256: [52, 0, 550, 20256, u'Hat_Buccaneer_Green', u'GREEN_BUCCANEER_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 42, u'pir_t_ico_hat_french', 1, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 21, 1, 1, 0],
20257: [52, 0, 550, 20257, u'Hat_Buccaneer_Fancy', u'FANCY_BUCCANEER_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 10, u'pir_t_ico_hat_french', 1, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 21, 2, 2, 0],
20258: [52, 1, 550, 20258, u'Hat_Buccaneer_Crimson', u'CRIMSON_BUCCANEER_HAT', 1, 0, 0, 0, 0, 1, 0, 0, 40, u'pir_t_ico_hat_french', 1, 42, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 21, 3, 3, 0],
20259: [52, 0, 550, 20259, u'Hat_Buccaneer_Mottled', u'MOTTLED_BUCCANEER_HAT', 3, 0, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_hat_french', 1, 39, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 21, 4, 4, 0],
20260: [52, 1, 550, 20260, u'Hat_Buccaneer_Rose', u'ROSE_BUCCANEER_HAT', 3, 0, 0, 1, 0, 0, 0, 0, 40, u'pir_t_ico_hat_french', 1, 34, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3920, 24, 21, 5, 5, 0],
20261: [52, 1, 900, 20261, u'Hat_Tricorne_Valentines', u'VALENTINES_TRICORNE', 3, 0, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_hat_tricorn', 38, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 2, 22, 4, 0, 0],
20262: [52, 1, 900, 20262, u'Hat_Tricorne_Mardi_Gras', u'MARDI_GRAS_TRICORNE', 3, 0, 0, 1, 0, 0, 0, 0, 6, u'pir_t_ico_hat_tricorn', 38, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 2, 7, 5, 1, 0],
20263: [52, 0, 300, 20263, u'Hat_Stocking_Blue_White', u'BLUE_WHITE_STOCKING_CAP', 2, 0, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_hat_holiday', 10, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 18, 1, 1, 0],
20264: [52, 1, 900, 20264, u'Hat_Stocking_Xmas', u'XMAS_CAP', 3, 0, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_hat_holiday', 10, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 21, 18, 10, 10, 0],
20265: [52, 0, 300, 20265, u'Hat_Party_Red_Blue', u'RED_BLUE_PARTY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_hat_party1', 39, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 19, 6, 6, 0],
20266: [52, 0, 300, 20266, u'Hat_Party_Red_Yellow', u'RED_YELLOW_PARTY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 14, u'pir_t_ico_hat_party1', 39, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 19, 7, 7, 0],
20267: [52, 0, 300, 20267, u'Hat_Party_Yellow_Purple', u'YELLOW_PURPLE_PARTY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 25, u'pir_t_ico_hat_party1', 39, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 19, 8, 8, 0],
20268: [52, 0, 300, 20268, u'Hat_Party_Yellow_Red', u'YELLOW_RED_PARTY_HAT', 2, 0, 1, 1, 0, 0, 0, 0, 39, u'pir_t_ico_hat_party1', 39, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 19, 9, 9, 0],
20269: [52, 1, 900, 20269, u'Hat_Stove_Saint_Patricks', u'SAINT_PATRICKS_STOVE_HAT', 3, 0, 0, 1, 0, 0, 0, 0, 41, u'pir_t_ico_hat_party2', 10, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 23, 20, 4, 4, 0],
2013: [51, 2, 2550, 2013, u'Bush Master Pistol', u'BUSH_MASTER_PISTOL', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_b', 0, 15, 0, 0, '', 1, 130, 1, 200, 1, 201, 3, 12202, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_b', 6, 20, 55, 1, 0, 0, 0, 0, 0, 50.5],
2014: [51, 2, 990, 2014, u"Scallywag's Pistol", u'SCALLYWAG_PISTOL', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 5, 0, 0, '', 1, 207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 10, 25, 1, 1, 3505, 0, 0, 0, 31.5],
2015: [51, 2, 2120, 2015, u"Robber's Pistol", u'ROBBER_PISTOL', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 10, 0, 0, '', 1, 207, 0, 0, 0, 0, 1, 12208, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 15, 40, 1, 2, 3505, 0, 0, 0, 46],
2016: [51, 2, 3660, 2016, u"Scoundrel's Pistol", u'SCOUNDREL_PISTOL', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 15, 0, 0, '', 1, 207, 0, 0, 0, 0, 2, 12208, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 20, 55, 1, 3, 3505, 0, 0, 0, 60.5],
2017: [51, 2, 650, 2017, u'Night Hunter Pistol', u'NIGHT_HUNTER_PISTOL', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_d', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_d', 6, 10, 25, 1, 1, 3503, 0, 0, 0, 25.5],
20300: [52, 0, 900, 20300, u'Hat_Tricorn_French_Assassin', u'FRENCH_ASSASSIN_HAT', 3, 0, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_hat_tricorn', 43, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3908, 10, 7, 1, 2, 0],
20301: [52, 0, 400, 20301, u'Hat_Diplomat', u'DIPLOMAT_HAT', 2, 0, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_hat_tricorn', 48, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3919, 3, 2, 1, 1, 0],
20302: [52, 0, 900, 20302, u'Hat_Sea_Serpent_Hunter', u'SEA_SERPENT_HUNTER_HAT', 3, 0, 0, 0, 0, 0, 0, 0, 16, u'pir_t_ico_hat_tricorn', 49, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3917, 3, 2, 2, 2, 0],
20303: [52, 0, 900, 20303, u'Hat_Tricorn_Peacock', u'PEACOCK_TRICORN_HAT', 3, 0, 0, 1, 0, 0, 0, 0, 30, u'pir_t_ico_hat_tricorn', 50, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 7, 2, 3, 0],
20304: [52, 0, 550, 20304, u'Hat_Tricorn_Scourge', u'SCOURGE_TRICORN_HAT', 2, 0, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_hat_tricorn', 51, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3905, 10, 7, 3, 4, 0],
20305: [52, 0, 900, 20305, u'Hat_Spanish_Zombie', u'ZOMBIE_PIRATE_HAT', 3, 0, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_hat_barbossa', 52, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 11, 8, 1, 1, 0],
20306: [52, 0, 550, 20306, u'Hat_Band_South_China', u'SOUTH_CHINA_BAND', 2, 0, 0, 0, 0, 0, 0, 0, 32, u'pir_t_ico_hat_bandana_full', 53, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3921, 6, 5, 7, 8, 0],
20307: [52, 0, 550, 20307, u'Hat_Band_Wildfire', u'WILDFIRE_BAND', 2, 0, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_hat_bandana_full', 54, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 6, 5, 8, 9, 0],
20308: [52, 0, 550, 20308, u'Hat_Band_Bountyhunter', u'BOUNTYHUNTER_BAND', 2, 0, 0, 0, 0, 0, 0, 0, 7, u'pir_t_ico_hat_beanie', 55, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3911, 8, 23, 5, 6, 0],
20309: [52, 0, 550, 20309, u'Hat_Band_Barbary_Corsair', u'BARBARY_CORSAIR_BAND', 2, 0, 0, 0, 0, 0, 0, 0, 32, u'pir_t_ico_hat_beanie', 56, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3906, 8, 23, 6, 5, 0],
20310: [52, 0, 900, 20310, u'Hat_Spanish_Zombies', u'ZOMBIES_PIRATE_HAT', 3, 0, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_hat_barbossa', 52, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3913, 11, 8, 1, 1, 0],
2020: [51, 2, 4420, 2020, u'Fullmoon Special Pistol', u'FULLMOON_SPECIAL_PISTOL', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_d', 0, 25, 0, 0, '', 1, 133, 0, 0, 0, 0, 3, 12209, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_d', 6, 30, 75, 1, 3, 3503, 0, 0, 0, 66.5],
2021: [51, 2, 550, 2021, u"EITC Thug's Pistol", u'EITC_THUG_PISTOL', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_c', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_c', 6, 6, 17, 1, 1, 2346, 0, 0, 0, 23.5],
2022: [51, 2, 700, 2022, u"Navy Sergeant's Pistol", u'NAVY_SERGEANT_PISTOL', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_c', 0, 3, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12207, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_c', 6, 8, 21, 1, 1, 2346, 0, 0, 0, 26.5],
2023: [51, 2, 1850, 2023, u"Duelist's Pistol", u'DUELIST_PISTOL', 3, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_c', 0, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12207, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_c', 6, 16, 42, 1, 2, 2346, 0, 0, 0, 43],
2024: [51, 2, 2970, 2024, u"Executioner's Pistol", u'EXECUTIONER_PISTOL', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_c', 0, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12207, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_c', 6, 24, 63, 1, 2, 2346, 0, 0, 0, 54.5],
10217: [56, 1, 5400, 10217, u"Commander's Spyglass", u'COMMANDER_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_c', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12657, 1, 12654, 0, 0, 1, u'pir_m_inv_can_spyglass_c', 29, 22, 0, 0, 2, 3403, 40],
10218: [56, 1, 4490, 10218, u"Admiral's Spyglass", u'ADMIRAL_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_c', 0, 29, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12657, 2, 12654, 0, 0, 1, u'pir_m_inv_can_spyglass_c', 29, 29, 0, 0, 3, 3403, 36],
10219: [56, 1, 560, 10219, u"Swashbuckler's Spyglass", u'SWASHBUCKLER_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_d', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_d', 29, 18, 0, 0, 1, 3402, 9],
10220: [56, 1, 1750, 10220, u"Buccaneer's Spyglass", u'BUCCANEER_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_d', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12654, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_d', 29, 22, 0, 0, 2, 3402, 26],
10221: [56, 1, 5400, 10221, u"Privateer's Spyglass", u'PRIVATEER_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_d', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12654, 1, 12655, 0, 0, 1, u'pir_m_inv_can_spyglass_d', 29, 26, 0, 0, 2, 3402, 40],
2030: [51, 1, 6810, 2030, u'Arcane Pistol', u'ARCANE_PISTOL', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_c', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12204, 2, 12207, 0, 0, 1, u'pir_m_hnd_gun_pistol_c', 6, 27, 69, 1, 3, 3506, 0, 0, 0, 82.5],
26607: [52, 2, 600, 26607, u'Knee_Boots_Burgundy', u'RICH_KNEE_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 49, u'pir_t_ico_sho_f_knee', 1, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 6, 0],
26608: [52, 2, 600, 26608, u'Knee_Boots_Green', u'FOREST_KNEE_BOOTS', 2, 7, 0, 1, 0, 0, 0, 0, 41, u'pir_t_ico_sho_f_knee', 9, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 7, 0],
26609: [52, 2, 600, 26609, u'Knee_Boots_Tan', u'HARD_KNEE_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 9, u'pir_t_ico_sho_f_knee', 1, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 8, 0],
2034: [51, 1, 7310, 2034, u"Beckette's Pistol", u'BECKETTE_PISTOL', 5, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_c', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12208, 3, 12206, 0, 0, 1, u'pir_m_hnd_gun_pistol_c', 6, 30, 85, 1, 3, 3505, 0, 0, 0, 85.5],
10227: [56, 1, 500, 10227, u'Navy Spyglass', u'NAVY_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_a', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_a', 29, 8, 0, 0, 1, 3401, 7],
10228: [56, 1, 1050, 10228, u'Trading Company Spyglass', u'TRADING_CO_SPYGLASS', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_a', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12653, 0, 0, 0, 0, 1, u'pir_m_inv_can_spyglass_a', 29, 15, 0, 0, 2, 3401, 18],
10229: [56, 1, 3670, 10229, u"Officer's Spyglass", u'OFFICER_SPYGLASS', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_can_spyglass_a', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12653, 1, 12660, 0, 0, 1, u'pir_m_inv_can_spyglass_a', 29, 22, 0, 0, 2, 3401, 32],
2038: [51, 2, 600, 2038, u'Steel Pistol', u'STEEL_PISTOL', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_d', u'A well-crafted steel pistol. Fires one shot.', 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_d', 6, 12, 29, 1, 0, 0, 0, 0, 0, 24.5],
26615: [52, 0, 1800, 26615, u'Knee_Boots_Wildfire', u'WILDFIRE_KNEE_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 21, u'pir_t_ico_sho_f_knee', 54, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 3, -1, 14, 0],
26616: [52, 0, 600, 26616, u'Knee_Boots_Zombie_Pirate', u'ZOMBIE_PIRATE_KNEE_BOOTS', 2, 7, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_f_knee', 52, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, -1, 3, -1, 15, 0],
26617: [52, 0, 600, 26617, u'Knee_Boots_Zombies_Pirate', u'ZOMBIES_PIRATE_KNEE_BOOTS', 2, 7, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_f_knee', 52, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3913, -1, 3, -1, 15, 0],
4101: [51, 2, 0, 4101, u'Rag Doll', u'RAG_DOLL', 1, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_a', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_a', 14, 4, 4, 0, 0, 0, 7, 0, 0, 2],
4102: [51, 2, 50, 4102, u'Cupie Doll', u'CUPIE_DOLL', 1, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_a', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12603, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_a', 14, 8, 8, 0, 0, 0, 7, 0, 0, 7],
4103: [51, 2, 140, 4103, u'Gaze Binder Doll', u'GAZE_BINDER_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_b', 0, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12603, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_b', 14, 14, 18, 0, 1, 2306, 8, 0, 0, 12],
4104: [51, 2, 400, 4104, u'Sight Binder Doll', u'SIGHT_BINDER_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_b', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12603, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_b', 14, 18, 28, 0, 1, 2306, 8, 0, 0, 20],
4105: [51, 2, 580, 4105, u'Far Binder Doll', u'FAR_BINDER_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_b', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12603, 1, 12610, 0, 0, 1, u'pir_m_hnd_dol_spirit_b', 14, 22, 32, 0, 2, 2306, 8, 0, 0, 24],
4106: [51, 2, 1160, 4106, u'Spirit Binder Doll', u'SPIRIT_BINDER_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_b', 0, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12603, 2, 12610, 0, 0, 1, u'pir_m_hnd_dol_spirit_b', 14, 26, 42, 0, 2, 2306, 8, 0, 0, 34],
4107: [51, 1, 1480, 4107, u'Soul Binder Doll', u'SOUL_BINDER_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_b', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12603, 3, 12610, 0, 0, 1, u'pir_m_hnd_dol_spirit_b', 14, 30, 47, 0, 3, 2306, 8, 0, 0, 38.5],
4108: [51, 2, 200, 4108, u'Monkey Doll', u'MONKEY_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_c', 0, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12601, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_c', 14, 9, 12, 0, 1, 3508, 7, 0, 0, 14],
4109: [51, 2, 580, 4109, u'Chimpanzee Doll', u'CHIMPANZEE_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_c', 0, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12601, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_c', 14, 13, 22, 0, 2, 3508, 7, 0, 0, 24],
4110: [51, 2, 870, 4110, u'Baboon Doll', u'BABOON_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_c', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12601, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_c', 14, 17, 27, 0, 2, 3508, 7, 0, 0, 29.5],
4111: [51, 2, 1760, 4111, u'Orangutan Doll', u'ORANGUTAN_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_c', 0, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12601, 1, 12603, 0, 0, 1, u'pir_m_hnd_dol_spirit_c', 14, 21, 36, 0, 3, 3508, 7, 0, 0, 42],
4112: [51, 1, 2260, 4112, u'Gorilla Doll', u'GORILLA_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_c', 0, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 1, 12603, 0, 0, 1, u'pir_m_hnd_dol_spirit_c', 14, 25, 41, 0, 3, 3508, 7, 0, 0, 47.5],
4113: [51, 1, 3310, 4113, u'Jack the Monkey Doll', u'JACK_THE_MONKEY_DOLL', 5, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_c', 0, 27, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 2, 12603, 0, 0, 1, u'pir_m_hnd_dol_spirit_c', 14, 29, 55, 0, 3, 3508, 7, 0, 0, 57.5],
4114: [51, 2, 400, 4114, u'Doll of Cleansing', u'DOLL_OF_CLEANSING', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_b', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12606, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_b', 14, 16, 20, 0, 1, 3500, 8, 0, 0, 20],
4115: [51, 2, 1300, 4115, u'Doll of Purification', u'DOLL_OF_PURIFICATION', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_b', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12606, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_b', 14, 22, 32, 0, 2, 3500, 8, 0, 0, 36],
4116: [51, 2, 2700, 4116, u'Doll of Sacred Rituals', u'DOLL_OF_SACRED_RITUALS', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_b', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12606, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_b', 14, 28, 44, 0, 3, 3500, 8, 0, 0, 52],
4117: [51, 2, 110, 4117, u'Sailor Doll', u'SAILOR_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_e', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_e', 14, 8, 11, 0, 1, 2300, 8, 0, 0, 10.5],
4118: [51, 2, 260, 4118, u'Seafarer Doll', u'SEAFARER_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_e', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12603, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_e', 14, 12, 16, 0, 1, 2300, 8, 0, 0, 16],
4119: [51, 2, 760, 4119, u'Traveler Doll', u'TRAVELER_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_e', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12603, 1, 12609, 0, 0, 1, u'pir_m_hnd_dol_spirit_e', 14, 16, 25, 0, 2, 2300, 8, 0, 0, 27.5],
4120: [51, 2, 1090, 4120, u'Voyager Doll', u'VOYAGER_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_e', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12603, 1, 12609, 0, 0, 1, u'pir_m_hnd_dol_spirit_e', 14, 20, 30, 0, 2, 2300, 8, 0, 0, 33],
4121: [51, 2, 2030, 4121, u'Explorer Doll', u'EXPLORER_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_e', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12603, 2, 12609, 0, 0, 1, u'pir_m_hnd_dol_spirit_e', 14, 24, 40, 0, 3, 2300, 8, 0, 0, 45],
4122: [51, 1, 2700, 4122, u'Adventurer Doll', u'ADVENTURER_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_e', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12603, 3, 12609, 0, 0, 1, u'pir_m_hnd_dol_spirit_e', 14, 28, 44, 0, 3, 2300, 8, 0, 0, 52],
4123: [51, 1, 3910, 4123, u'Elizabeth Swan Doll', u'ELIZABETH_SWAN_DOLL', 5, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_e', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12603, 3, 12609, 1, 12606, 1, u'pir_m_hnd_dol_spirit_e', 14, 30, 57, 0, 3, 2300, 8, 0, 0, 62.5],
4124: [51, 2, 90, 4124, u'Soldier Doll', u'SOLDIER_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_d', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_d', 14, 4, 7, 0, 1, 2305, 11, 0, 0, 9.5],
4125: [51, 2, 200, 4125, u'Hero Doll', u'HERO_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_d', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12601, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_d', 14, 7, 10, 0, 1, 2305, 11, 0, 0, 14],
4126: [51, 2, 780, 4126, u'Fencer Doll', u'FENCER_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_d', 0, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12601, 1, 12603, 0, 0, 1, u'pir_m_hnd_dol_spirit_d', 14, 11, 20, 0, 2, 2305, 11, 0, 0, 28],
4127: [51, 2, 1090, 4127, u'Swordsman Doll', u'SWORDSMAN_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_d', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12601, 1, 12603, 0, 0, 1, u'pir_m_hnd_dol_spirit_d', 14, 15, 24, 0, 2, 2305, 11, 0, 0, 33],
4128: [51, 2, 2210, 4128, u'Conquistador Doll', u'CONQUISTADOR_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_d', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12601, 2, 12603, 0, 0, 1, u'pir_m_hnd_dol_spirit_d', 14, 19, 34, 0, 3, 2305, 11, 0, 0, 47],
4129: [51, 1, 3140, 4129, u'Treasure Hunter Doll', u'TREASURE_HUNTER_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_d', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 3, 12603, 0, 0, 1, u'pir_m_hnd_dol_spirit_d', 14, 24, 40, 0, 3, 2305, 11, 0, 0, 56],
4130: [51, 1, 4690, 4130, u'Will Turner Doll', u'WILL_TURNER_DOLL', 5, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_d', 0, 27, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12601, 3, 12603, 1, 12607, 1, u'pir_m_hnd_dol_spirit_d', 14, 29, 55, 0, 3, 2305, 11, 0, 0, 68.5],
4131: [51, 2, 970, 4131, u'Healing Doll', u'HEALING_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_a', u'Spirit Dolls increase the power of your Healing Voodoo skills.', 3, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_a', 14, 5, 8, 0, 1, 2300, 8, 0, 0, 9],
4132: [51, 2, 1730, 4132, u'Mending Doll', u'MENDING_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_b', u'Increases your ability to heal others.', 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_b', 14, 10, 14, 0, 1, 2300, 8, 0, 0, 12],
4133: [51, 2, 2520, 4133, u'Restoration Doll', u'RESTORATION_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_d', u'A doll filled with good voodoo power.', 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_d', 14, 15, 19, 0, 1, 2300, 8, 0, 0, 14.5],
4134: [51, 2, 3680, 4134, u'Renewal Doll', u'RENEWAL_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_b', u'A festive doll that glows with good voodoo magic.', 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_b', 14, 20, 25, 0, 1, 2300, 8, 0, 0, 17.5],
4135: [51, 2, 5040, 4135, u'Life Doll', u'LIFE_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_spirit_d', u'A powerful voodoo doll with mighty healing powers.', 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_spirit_d', 14, 25, 31, 0, 1, 2300, 8, 0, 0, 20.5],
37001: [53, 2, 1600, 37001, u'TattooFaceSkull', u'TATTOO_FACE_SKULL', 3, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_skull_face', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 21, 601, 701, 0, 0],
37002: [53, 2, 700, 37002, u'TattooFaceTwoClovers', u'TATTOO_FACE_TWO_CLOVERS', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_face_color_face_2clovers', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 40, 40, 602, 702, 0, 0],
37003: [53, 2, 700, 37003, u'TattooFaceHorseShoes', u'TATTOO_FACE_HORSE_SHOES', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_face_color_face_horseshoeclovers', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 41, 41, 602, 702, 0, 0],
37004: [53, 2, 800, 37004, u'TattooFaceTribalForehead', u'TATTOO_FACE_TRIBAL_FOREHEAD', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_face_color_tribal_forehead', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 67, 67, 603, 703, 0, 0],
37005: [53, 1, 800, 37005, u'TattooFaceTribalCheek', u'TATTOO_FACE_TRIBAL_CHEEK', 2, 2, 0, 1, 0, 0, 1, 0, 1, u'tattoo_face_color_tribal_cheek', 16, 0, 4, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 65, 65, 604, 704, 0, 0],
37006: [53, 2, 800, 37006, u'TattooFaceTribalChin', u'TATTOO_FACE_TRIBAL_CHIN', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_face_color_tribal_chin', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 66, 66, 605, 705, 0, 0],
37007: [53, 2, 300, 37007, u'TattooFaceJackEyes', u'TATTOO_FACE_JACK_EYES', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_face_color_jacksparrow', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 64, 606, 706, 0, 0],
37008: [53, 2, 800, 37008, u'TattooFaceTribalGoatee', u'TATTOO_FACE_TRIBAL_GOATEE', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_face_mono_tribal_gotee', 14, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 71, 71, 607, 707, 0, 0],
37009: [53, 2, 800, 37009, u'TattooFaceMaoriNose', u'TATTOO_FACE_MAORI_NOSE', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_face_mono_maori_nose', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 69, 69, 608, 708, 0, 0],
37010: [53, 2, 800, 37010, u'TattooFaceMaoriChin', u'TATTOO_FACE_MAORI_CHIN', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_face_mono_maori_chin', 13, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 68, 68, 609, 709, 0, 0],
37011: [53, 1, 800, 37011, u'TattooFaceNativeEye', u'TATTOO_FACE_NATIVE_EYE', 2, 2, 0, 0, 1, 0, 0, 0, 1, u'tattoo_face_mono_native_eye', 14, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 70, 70, 610, 710, 0, 0],
37012: [53, 2, 500, 37012, u'TattooFaceMothersDayFlower', u'TATTOO_FACE_MOTHERS_DAY_FLOWER', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_face_color_mothersday_flower_lg', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 100, 100, 611, 711, 0, 0],
37013: [53, 2, 500, 37013, u'TattooFaceMothersDayHearts', u'TATTOO_FACE_MOTHERS_DAY_HEARTS', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_face_color_mothersday_hearts', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, None, 102, 102, 612, 712, 0, 0],
37014: [53, 1, 500, 37014, u'TattooFacePirateBrand', u'TATTOO_FACE_PIRATE_BRAND', 2, 2, 0, 1, 0, 0, 1, 0, 1, u'tattoo_scars_piratebrand', 6, 0, 1, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 107, 107, 613, 713, 0, 0],
37015: [53, 1, 450, 37015, u'TattooFaceStitchedScar', u'TATTOO_FACE_STITCHED_SCAR', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_scars_traintrack01', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 108, 108, 614, 714, 0, 0],
37016: [53, 1, 450, 37016, u'TattooFaceStitchedX', u'TATTOO_FACE_STITCHED_X', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_stitches_x', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 130, 130, 615, 715, 0, 0],
37017: [53, 1, 450, 37017, u'TattooFaceStitchedY', u'TATTOO_FACE_STITCHED_Y', 2, 2, 0, 1, 0, 0, 0, 0, 1, u'tattoo_stitches_y', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 131, 131, 616, 716, 0, 0],
37018: [53, 1, 800, 37018, u'TattooFaceColorEye01', u'TATTOO_FACE_COLOR_EYE_01', 2, 2, 0, 1, 0, 0, 1, 0, 1, u'tattoo_face_color_eye_01', 3, 0, 3, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 72, 72, 601, 701, 0, 0],
37019: [53, 1, 800, 37019, u'TattooFaceColorCheek', u'TATTOO_FACE_COLOR_CHEEK', 2, 2, 0, 1, 0, 0, 1, 0, 1, u'tattoo_face_color_cheek', 3, 0, 3, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 73, 73, 601, 701, 0, 0],
37020: [53, 1, 800, 37020, u'TattooFaceColorTribalMouth', u'TATTOO_FACE_COLOR_TRIBAL_MOUTH', 2, 2, 0, 1, 0, 0, 1, 0, 1, u'tattoo_face_color_tribal_mouth', 15, 0, 5, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 76, 76, 601, 701, 0, 0],
37021: [53, 1, 1000, 37021, u'TattooFaceColorVoodoo01', u'TATTOO_FACE_COLOR_VOODOO_01', 2, 2, 0, 1, 0, 0, 1, 0, 1, u'tattoo_face_color_voodoo_01', 15, 0, 5, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 82, 82, 601, 701, 0, 0],
37022: [53, 1, 1000, 37022, u'TattooFaceColorVoodoo02', u'TATTOO_FACE_COLOR_VOODOO_02', 3, 2, 0, 1, 0, 0, 1, 0, 1, u'tattoo_face_color_voodoo_02', 18, 0, 6, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 83, 83, 601, 701, 0, 0],
37023: [53, 1, 1000, 37023, u'TattooFaceColorVoodoo03', u'TATTOO_FACE_COLOR_VOODOO_03', 3, 2, 0, 1, 0, 0, 1, 0, 1, u'tattoo_face_color_voodoo_03', 18, 0, 6, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 84, 84, 601, 701, 0, 0],
37024: [53, 1, 1000, 37024, u'TattooFaceColorVoodoo04', u'TATTOO_FACE_COLOR_VOODOO_04', 3, 2, 0, 1, 0, 0, 1, 0, 1, u'tattoo_face_color_voodoo_04', 19, 0, 7, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 85, 85, 601, 701, 0, 0],
37025: [53, 1, 1000, 37025, u'TattooFaceColorVoodoo05', u'TATTOO_FACE_COLOR_VOODOO_05', 3, 2, 0, 1, 0, 0, 1, 0, 1, u'tattoo_face_color_voodoo_05', 19, 0, 7, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 86, 86, 601, 701, 0, 0],
26602: [52, 2, 600, 26602, u'Knee_Boots_Chains', u'PLANKWALKER_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 32, u'pir_t_ico_sho_f_knee', 1, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 1, 0],
26603: [52, 2, 600, 26603, u'Knee_Boots_Buckle', u'CASTLE_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 31, u'pir_t_ico_sho_f_knee', 2, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 2, 0],
4817: [51, 2, 930, 4817, u'Knives of the Sun Idol', u'KNIVES_OF_THE_SUN_IDOL', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_d', 0, 19, 0, 0, '', 1, 160, 0, 0, 0, 0, 1, 12406, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_d', 16, 19, 31, 0, 1, 3506, 0, 0, 0, 30.5],
21202: [52, 2, 200, 21202, u'Open_V-Neck_Plain1', u'OPEN_COTTON_SHIRT', 1, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_short_sleeve_v_neck_open', 2, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, -1, 1, -1, 1],
4819: [51, 2, 5040, 4819, u'Knives of the Golden Idol', u'KNIVES_OF_THE_GOLDEN_IDOL', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_d', 0, 30, 0, 0, '', 3, 160, 0, 0, 0, 0, 3, 12406, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_d', 16, 30, 52, 0, 3, 3506, 0, 0, 0, 71],
4820: [51, 2, 230, 4820, u'Survival Throwing Knives', u'SURVIVAL_THROWING_KNIVES', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_b', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_b', 16, 10, 12, 0, 1, 3502, 0, 0, 0, 15],
4821: [51, 2, 960, 4821, u'Wilderness Throwing Knives', u'WILDERNESS_THROWING_KNIVES', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_b', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_b', 16, 16, 26, 0, 2, 3502, 0, 0, 0, 31],
26605: [52, 2, 600, 26605, u'Knee_Boots_Ties', u'STITCHED_KNEE_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_f_knee', 1, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 4, 0],
26606: [52, 2, 1800, 26606, u'Knee_Boots_Blue', u'CHILL_BOOTS', 3, 7, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_sho_f_knee', 3, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 5, 0],
4501: [51, 2, 100, 4501, u'Dagger', u'BASIC_DAGGER', 2, 4, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_a', u'A sharp dagger. Small but deadly.', 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_a', 15, 0, 3, 0, 0, 0, 0, 0, 0, 1.5],
4502: [51, 2, 250, 4502, u'Battle Dirk', u'BATTLE_DIRK', 2, 4, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_a', u'A long knife. Well balanced for fighting.', 5, 0, 0, '', 1, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_a', 15, 5, 5, 0, 0, 0, 0, 0, 0, 3.5],
4503: [51, 2, 1250, 4503, u'Main Gauche', u'MAIN_GAUCHE', 2, 4, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_b', u'A fancy blade that is useful for keeping your opponent off guard.', 10, 0, 0, '', 2, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_b', 15, 10, 8, 0, 0, 0, 0, 0, 0, 6],
4504: [51, 2, 7000, 4504, u'Coltello', u'COLTELLO', 3, 4, 0, 0, 1, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_b', u"A Coltello dagger, it is a pirate's best friend in the right fight. ", 15, 0, 0, '', 3, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_b', 15, 15, 11, 0, 0, 0, 0, 0, 0, 8.5],
4505: [51, 2, 14000, 4505, u'Bloodletter', u'BLOODLETTER', 3, 4, 0, 0, 1, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_b', u'A Bloodletter dagger. It is meticulously designed to fend off even the largest of foes.', 20, 0, 0, '', 4, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_b', 15, 20, 15, 0, 0, 0, 0, 0, 0, 11.5],
4506: [51, 2, 10, 4506, u"Gravedigger's Dagger", u'GRAVEDIGGER_DAGGER', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_a', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_a', 15, 1, 7, 0, 0, 0, 0, 0, 0, 3.5],
4507: [51, 2, 20, 4507, u"Mutineer's Dagger", u'MUTINEER_DAGGER', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_b', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_b', 15, 2, 9, 0, 0, 0, 0, 0, 0, 4.5],
4508: [51, 2, 110, 4508, u"Brigand's Dagger", u'BRIGAND_DAGGER', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_b', 0, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_b', 15, 4, 13, 0, 1, 3509, 0, 0, 0, 10.5],
4509: [51, 2, 270, 4509, u"Duelist's Dagger", u'DUELIST_DAGGER', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_b', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12406, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_b', 15, 6, 17, 0, 1, 3502, 0, 0, 0, 16.5],
4510: [51, 2, 160, 4510, u"EITC Thug's Dagger", u'EITC_THUG_DAGGER', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_b', 0, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12401, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_b', 15, 4, 13, 0, 1, 3509, 0, 0, 0, 12.5],
4511: [51, 2, 1370, 4511, u"EITC Hiredgun's Dagger", u'EITC_HIRED_GUN_DAGGER', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_d', 0, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12408, 1, 12409, 0, 0, 1, u'pir_m_hnd_knf_dagger_d', 15, 12, 34, 0, 2, 2285, 0, 0, 0, 37],
4512: [51, 2, 3080, 4512, u"EITC Assassin's Dagger", u'EITC_ASSASSIN_DAGGER', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_c', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12410, 1, 12403, 0, 0, 1, u'pir_m_hnd_knf_dagger_c', 15, 17, 49, 0, 3, 2284, 0, 0, 0, 55.5],
4513: [51, 2, 270, 4513, u'Survival Dagger', u'SURVIVAL_DAGGER', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_a', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12400, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_a', 15, 6, 17, 0, 1, 3502, 0, 0, 0, 16.5],
4514: [51, 2, 680, 4514, u'Wilderness Dagger', u'WILDERNESS_DAGGER', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_a', 0, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12400, 1, 12401, 0, 0, 1, u'pir_m_hnd_knf_dagger_a', 15, 11, 32, 0, 1, 3502, 0, 0, 0, 26],
4515: [51, 2, 1680, 4515, u'Jungle Dagger', u'JUNGLE_DAGGER', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_a', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12401, 1, 12402, 0, 0, 1, u'pir_m_hnd_knf_dagger_a', 15, 16, 42, 0, 2, 3502, 0, 0, 0, 41],
4516: [51, 2, 2550, 4516, u'Swamp Dagger', u'SWAMP_DAGGER', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_a', 0, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12401, 1, 12402, 0, 0, 1, u'pir_m_hnd_knf_dagger_a', 15, 21, 57, 0, 2, 3502, 0, 0, 0, 50.5],
4517: [51, 2, 4560, 4517, u'Bayou Dagger', u'BAYOU_DAGGER', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_a', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12402, 1, 12403, 0, 0, 1, u'pir_m_hnd_knf_dagger_a', 15, 26, 67, 0, 3, 3502, 0, 0, 0, 67.5],
4518: [51, 2, 210, 4518, u'Backstabber Dagger', u'BACKSTABBER_DAGGER', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_c', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_c', 15, 5, 15, 0, 1, 2284, 0, 0, 0, 14.5],
4519: [51, 2, 510, 4519, u'Back Biter Dagger', u'BACK_BITER_DAGGER', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_c', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12408, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_c', 15, 10, 25, 0, 1, 2284, 0, 0, 0, 22.5],
4520: [51, 2, 1600, 4520, u'Deal Breaker Dagger', u'DEAL_BREAKER_DAGGER', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_c', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12408, 1, 12402, 0, 0, 1, u'pir_m_hnd_knf_dagger_c', 15, 14, 38, 0, 2, 2284, 0, 0, 0, 40],
4521: [51, 2, 2600, 4521, u'Double Cross Dagger', u'DOUBLE_CROSS_DAGGER', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_c', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12408, 2, 12402, 0, 0, 1, u'pir_m_hnd_knf_dagger_c', 15, 18, 46, 0, 2, 2284, 0, 0, 0, 51],
4522: [51, 2, 3780, 4522, u"Traitor's Dagger", u'TRAITOR_DAGGER', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_c', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12408, 1, 12403, 0, 0, 1, u'pir_m_hnd_knf_dagger_c', 15, 22, 59, 0, 3, 2284, 0, 0, 0, 61.5],
4523: [51, 2, 5400, 4523, u"Cutthroat's Dagger", u'CUTTHROAT_DAGGER', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_c', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12408, 2, 12403, 0, 0, 1, u'pir_m_hnd_knf_dagger_c', 15, 26, 67, 0, 3, 2284, 0, 0, 0, 73.5],
4524: [51, 2, 270, 4524, u"Sea Dog's Dagger", u'SEA_DOG_DAGGER', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_d', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_d', 15, 8, 21, 0, 1, 2285, 0, 0, 0, 16.5],
4525: [51, 2, 510, 4525, u"Swashbuckler's Dagger", u'SWASHBUCKLER_DAGGER', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_d', 0, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12401, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_d', 15, 12, 29, 0, 1, 2285, 0, 0, 0, 22.5],
4526: [51, 2, 1440, 4526, u"Buccaneer's Dagger", u'BUCCANEER_DAGGER', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_d', 0, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12401, 1, 12404, 0, 0, 1, u'pir_m_hnd_knf_dagger_d', 15, 16, 42, 0, 2, 2285, 0, 0, 0, 38],
4527: [51, 2, 2600, 4527, u"Privateer's Dagger", u'PRIVATEER_DAGGER', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_d', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12402, 2, 12404, 0, 0, 1, u'pir_m_hnd_knf_dagger_d', 15, 20, 50, 0, 2, 2285, 0, 0, 0, 51],
4528: [51, 2, 4030, 4528, u"Corsair's Dagger", u'CORSAIR_DAGGER', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_d', 0, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12402, 2, 12404, 0, 0, 1, u'pir_m_hnd_knf_dagger_d', 15, 24, 63, 0, 3, 2285, 0, 0, 0, 63.5],
4529: [51, 2, 5550, 4529, u'Seven Seas Dagger', u'SEVEN_SEAS_DAGGER', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_d', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12402, 3, 12404, 0, 0, 1, u'pir_m_hnd_knf_dagger_d', 15, 28, 71, 0, 3, 2285, 0, 0, 0, 74.5],
4530: [51, 2, 760, 4530, u'Dagger of the Sun Idol', u'DAGGER_OF_THE_SUN_IDOL', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_e', 0, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12400, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_e', 15, 15, 35, 0, 1, 3506, 0, 0, 0, 27.5],
4531: [51, 2, 1330, 4531, u'Dagger of the Moon Idol', u'DAGGER_OF_THE_MOON_IDOL', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_e', 0, 18, 0, 0, '', 1, 160, 0, 0, 0, 0, 2, 12400, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_e', 15, 18, 41, 0, 1, 3506, 0, 0, 0, 36.5],
4532: [51, 2, 2810, 4532, u'Dagger of the Hawk Idol', u'DAGGER_OF_THE_HAWK_IDOL', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_e', 0, 21, 0, 0, '', 1, 160, 0, 0, 0, 0, 2, 12401, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_e', 15, 21, 52, 0, 2, 3506, 0, 0, 0, 53],
4533: [51, 2, 3720, 4533, u'Dagger of the Bear Idol', u'DAGGER_OF_THE_BEAR_IDOL', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_e', 0, 24, 0, 0, '', 2, 160, 0, 0, 0, 0, 2, 12401, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_e', 15, 24, 58, 0, 2, 3506, 0, 0, 0, 61],
4534: [51, 2, 6320, 4534, u'Dagger of the Golden Idol', u'DAGGER_OF_THE_GOLDEN_IDOL', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_e', 0, 27, 0, 0, '', 2, 160, 0, 0, 0, 0, 2, 12402, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_e', 15, 27, 69, 0, 3, 3506, 0, 0, 0, 79.5],
4535: [51, 2, 8370, 4535, u'Dagger of the Dark Idol', u'DAGGER_OF_THE_DARK_IDOL', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_e', 0, 30, 0, 0, '', 3, 160, 0, 0, 0, 0, 3, 12402, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_e', 15, 30, 75, 0, 3, 3506, 0, 0, 0, 91.5],
4537: [51, 2, 610, 4537, u'Small Dagger', u'SMALL_DAGGER', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_c', u'Daggers cause more damage when attacking enemies in the back.', 3, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_c', 15, 3, 11, 0, 0, 0, 0, 0, 0, 5.5],
4538: [51, 2, 2210, 4538, u'Steel Dagger', u'STEEL_DAGGER', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_b', u'A sneaky blade. Deals more damage when attacking enemies in the back.', 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_b', 15, 8, 21, 0, 0, 0, 0, 0, 0, 10.5],
4539: [51, 2, 4810, 4539, u'Combat Dagger', u'COMBAT_DAGGER', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_a', u'A fighting knife. Deals more damage when attacking enemies in the back.', 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_a', 15, 13, 31, 0, 0, 0, 0, 0, 0, 15.5],
4540: [51, 2, 8410, 4540, u'Battle Dagger', u'BATTLE_DAGGER', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_e', u'Often used in duels. Deals more damage when attacking enemies in the back.', 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_e', 15, 18, 41, 0, 0, 0, 0, 0, 0, 20.5],
4541: [51, 2, 13010, 4541, u'War Dagger', u'WAR_DAGGER', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dagger_d', u'Forged for combat. Deals more damage when attacking enemies in the back.', 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dagger_d', 15, 23, 51, 0, 0, 0, 0, 0, 0, 25.5],
26611: [52, 1, 1800, 26611, u'Knee_Boots_Saint_Patricks', u'SAINT_PATRICKS_KNEE_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 41, u'pir_t_ico_sho_f_knee', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, -1, 3, -1, 10, 0],
26613: [52, 0, 1800, 26613, u'Knee_Boots_Peacock', u'PEACOCK_KNEE_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 25, u'pir_t_ico_sho_f_knee', 50, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 12, 0],
21001: [52, 2, 200, 21001, u'Tank_SweatStained', u'OLD_TANK', 1, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_tanktop', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -1, 1],
21002: [52, 2, 200, 21002, u'Tank_Striped', u'STRIPED_TANK', 1, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_m_tanktop', 28, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 1, -1, 1],
21003: [52, 2, 200, 21003, u'Tank_Plain', u'COTTON_TANK', 1, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_tanktop', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 2, -1, 1],
21004: [52, 2, 200, 21004, u'Tank_Buttoned', u'TRIMMED_TANK', 1, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_m_tanktop', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 3, -1, 0],
21005: [52, 2, 500, 21005, u'Tank_Suspenders', u'SUSPENDER_TANK', 2, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_m_tanktop', 24, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 4, -1, 0],
21006: [52, 0, 500, 21006, u'Tank_Scourge', u'SCOURGE_TANK', 2, 1, 0, 0, 0, 0, 0, 0, 28, u'pir_t_ico_sht_m_tanktop', 51, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3905, 1, -1, 5, -1, 0],
21007: [52, 0, 1200, 21007, u'Tank_Sea_Serpent', u'SEA_SERPENT_TANK', 3, 1, 0, 0, 0, 0, 0, 0, 42, u'pir_t_ico_sht_m_tanktop', 49, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3917, 1, -1, 6, -1, 0],
26614: [52, 0, 600, 26614, u'Knee_Boots_Scourge', u'SCOURGE_KNEE_BOOTS', 3, 7, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sho_f_knee', 51, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3905, -1, 3, -1, 13, 0],
21051: [52, 2, 200, 21051, u'Sleevless_Striped', u'CREW_TANK', 1, 1, 1, 1, 0, 0, 0, 0, 3, u'pir_t_ico_sht_m_tanktop', 12, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 0, -1, 0],
21052: [52, 2, 200, 21052, u'Sleevless_Ties', u'HOOKED_TANK', 1, 1, 1, 1, 0, 0, 0, 0, 20, u'pir_t_ico_sht_m_tanktop', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 1, -1, 1],
21053: [52, 2, 200, 21053, u'Sleevless_Leather_Front', u'REINFORCED_TANK', 1, 1, 1, 1, 0, 0, 0, 0, 20, u'pir_t_ico_sht_m_tanktop', 24, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 2, -1, 1],
21054: [52, 2, 200, 21054, u'Sleevless_Centerseam', u'SEAMED_TANK', 1, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_tanktop', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 3, -1, 1],
21055: [52, 0, 200, 21055, u'Sleevless_Blue_Three_Button', u'MARINER_TANK', 1, 1, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_sht_m_tanktop', 23, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 4, -1, 0],
21056: [52, 0, 200, 21056, u'Sleevless_Pale_Green', u'IMPROVISED_TANK', 1, 1, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_sht_m_tanktop', 7, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 5, -1, 0],
21057: [52, 0, 500, 21057, u'Sleevless_Purple_Buckle', u'MANLY_TANK', 2, 1, 1, 1, 0, 0, 0, 0, 27, u'pir_t_ico_sht_m_tanktop', 3, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 6, -1, 0],
21058: [52, 0, 500, 21058, u'Sleevless_Salmon', u'INDENTURED_TANK', 2, 1, 1, 1, 0, 0, 0, 0, 11, u'pir_t_ico_sht_m_tanktop', 26, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 7, -1, 0],
21059: [52, 0, 500, 21059, u'Sleevless_Flax_Brown', u'STITCHED_TANK', 2, 1, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_sht_m_tanktop', 7, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 8, -1, 0],
21060: [52, 1, 1200, 21060, u'Sleevless_Silk_Blue', u'FRILLY_TANK', 3, 1, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_sht_m_tanktop', 3, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3904, 2, -1, 9, -1, 0],
21061: [52, 0, 1200, 21061, u'Sleevless_Silk_Red', u'SHIELD_TANK', 3, 1, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sht_m_tanktop', 17, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 10, -1, 0],
21062: [52, 0, 1200, 21062, u'Sleevless_Silk_White', u'CRESTED_TANK', 3, 1, 1, 1, 0, 0, 0, 0, 10, u'pir_t_ico_sht_m_tanktop', 3, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 11, -1, 0],
21063: [52, 2, 1200, 21063, u'Sleevless_Advanced_Outfit', u'ADVANCED_TANK', 3, 1, 0, 0, 1, 0, 0, 0, 8, u'pir_t_ico_sht_m_tanktop', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 12, -1, 0],
21101: [52, 2, 500, 21101, u'ShortSleeve_Round_FrontLace', u'LAYERD_SHIRTS', 2, 1, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sht_m_short_sleeve_round_neck', 2, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 0, -1, 0],
21102: [52, 2, 500, 21102, u'ShortSleeve_Round_FrontButtons', u'PUB_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_sht_m_short_sleeve_round_neck', 5, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 1, -1, 0],
21103: [52, 2, 500, 21103, u'ShortSleeve_Round_Stripes', u'SWABBIE_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_sht_m_short_sleeve_round_neck', 13, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 2, -1, 0],
21104: [52, 2, 500, 21104, u'ShortSleeve_Round_Leather_Cloth', u'PANELED_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_sht_m_short_sleeve_round_neck', 1, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 3, -1, 0],
21105: [52, 0, 1200, 21105, u'ShortSleeve_Round_Blue_White_Collar', u'GOVERNORS_EX_SHIRT', 3, 1, 1, 1, 0, 0, 0, 0, 30, u'pir_t_ico_sht_m_short_sleeve_round_neck', 6, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 4, -1, 0],
21106: [52, 1, 500, 21106, u'ShortSleeve_Round_Cloth_Black', u'BIG_BUTTON_SHIRT', 2, 1, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sht_m_short_sleeve_round_neck', 2, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3909, 3, -1, 5, -1, 0],
21107: [52, 0, 1200, 21107, u'ShortSleeve_Round_Cloth_Caramel', u'MAYORS_SHIRT', 3, 1, 1, 1, 0, 0, 0, 0, 9, u'pir_t_ico_sht_m_short_sleeve_round_neck', 6, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 6, -1, 0],
21108: [52, 1, 1200, 21108, u'ShortSleeve_Round_Brown_Buckle', u'TRIMMED_JERKIN', 3, 1, 0, 1, 0, 0, 0, 0, 31, u'pir_t_ico_sht_m_short_sleeve_round_neck', 2, 33, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3920, 3, -1, 7, -1, 0],
21109: [52, 1, 1200, 21109, u'ShortSleeve_Round_Green_White_Collar', u'GREEN_GOLD_WHITE_COLLAR', 3, 1, 0, 1, 0, 0, 0, 0, 44, u'pir_t_ico_sht_m_short_sleeve_round_neck', 27, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3903, 3, -1, 8, -1, 0],
21151: [52, 2, 500, 21151, u'V-Neck_Buttons', u'TREASURE_SHORT_SLEEVE', 2, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_short_sleeve_round_neck', 1, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -1, 0, -1, 0],
21152: [52, 2, 200, 21152, u'V-Neck_Plain1', u'COTTON_SHORT_SLEEVE', 1, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_short_sleeve_round_neck', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -1, 1, -1, 1],
21153: [52, 2, 200, 21153, u'V-Neck_Plain2', u'LINEN_SHORT_SLEEVE', 1, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_short_sleeve_round_neck', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -1, 2, -1, 1],
21154: [52, 0, 500, 21154, u'V-Neck_BrownCollar', u'LACED_SHORT_SLEEVE', 2, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_m_short_sleeve_round_neck', 1, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -1, 3, -1, 0],
21155: [52, 0, 500, 21155, u'V-Neck_Waistband', u'TIED_SHORT_SLEEVE', 2, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_m_short_sleeve_round_neck', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -1, 4, -1, 1],
21156: [52, 0, 500, 21156, u'V-Neck_Leather_Waistband', u'FLAP_SHORT_SLEEVE', 2, 1, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sht_m_short_sleeve_round_neck', 5, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -1, 5, -1, 0],
21157: [52, 0, 500, 21157, u'V-Neck_Yellow_Collar', u'FLEET_SHORT_SLEEVE', 2, 1, 1, 1, 0, 0, 0, 0, 33, u'pir_t_ico_sht_m_short_sleeve_round_neck', 12, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -1, 6, -1, 0],
4801: [51, 2, 0, 4801, u'Rusty Throwing Knives', u'RUSTY_THROWING_KNIVES', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_a', 0, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_a', 16, 4, 3, 0, 0, 0, 0, 0, 0, 1.5],
4802: [51, 2, 20, 4802, u'Balanced Thowing Knives', u'BALANCED_THROWING_KNIVES', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_b', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12406, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_b', 16, 6, 6, 0, 0, 0, 0, 0, 0, 4],
4803: [51, 2, 20, 4803, u'Cutlery Set', u'CUTLERY_SET', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_b', 0, 6, 0, 0, '', 1, 140, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_b', 16, 6, 6, 0, 0, 0, 0, 0, 0, 4],
4804: [51, 2, 1600, 4804, u"Raider's Throwing Knives", u'RAIDER_THROWING_KNIVES', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_a', 0, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12409, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_a', 16, 24, 38, 0, 2, 2287, 0, 0, 0, 40],
4805: [51, 2, 110, 4805, u"Asp's Den Knives", u'ASP_DEN_KNIVES', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_a', 0, 8, 0, 0, '', 1, 140, 0, 0, 0, 0, 1, 12406, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_a', 16, 8, 9, 0, 1, 3509, 0, 0, 0, 10.5],
4806: [51, 2, 380, 4806, u"Adder's Den Knives", u'ADDER_DEN_KNIVES', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_a', 0, 14, 0, 0, '', 1, 141, 0, 0, 0, 0, 1, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_a', 16, 14, 23, 0, 1, 3509, 0, 0, 0, 19.5],
4807: [51, 2, 960, 4807, u"Sidewinder's Den Knives", u'SIDEWINDER_DEN_KNIVES', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_a', 0, 16, 0, 0, '', 1, 142, 0, 0, 0, 0, 1, 12409, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_a', 16, 16, 26, 0, 2, 3509, 0, 0, 0, 31],
4808: [51, 2, 1720, 4808, u"Viper's Den Knives", u'VIPER_DEN_KNIVES', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_a', 0, 21, 0, 0, '', 1, 143, 0, 0, 0, 0, 1, 12410, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_a', 16, 21, 39, 0, 3, 3509, 0, 0, 0, 41.5],
4809: [51, 2, 200, 4809, u"Scallywag's Knives", u'SCALLYWAG_KNIVES', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_a', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12408, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_a', 16, 10, 12, 0, 1, 3505, 0, 0, 0, 14],
4810: [51, 2, 1260, 4810, u"Robber's Knives", u'ROBBER_KNIVES', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_a', 0, 19, 0, 0, '', 1, 207, 0, 0, 0, 0, 2, 12408, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_a', 16, 19, 31, 0, 2, 3505, 0, 0, 0, 35.5],
4811: [51, 2, 2600, 4811, u"Scoundrel's Knives", u'SCOUNDREL_KNIVES', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_a', 0, 26, 0, 0, '', 1, 207, 0, 0, 0, 0, 3, 12408, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_a', 16, 26, 46, 0, 3, 3505, 0, 0, 0, 51],
4812: [51, 2, 230, 4812, u'Black Fang Knives', u'BLACK_FANG_KNIVES', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_c', 0, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_c', 16, 11, 14, 0, 1, 2287, 0, 0, 0, 15],
4813: [51, 2, 1020, 4813, u'Grim Fang Knives', u'GRIM_FANG_KNIVES', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_c', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_c', 16, 17, 28, 0, 2, 2287, 0, 0, 0, 32],
4814: [51, 2, 1370, 4814, u'Raven Fang Knives', u'RAVEN_FANG_KNIVES', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_c', 0, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_c', 16, 21, 34, 0, 2, 2287, 0, 0, 0, 37],
4815: [51, 2, 2970, 4815, u'Shark Fang Knives', u'SHARK_FANG_KNIVES', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_c', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12407, 1, 12405, 0, 0, 1, u'pir_m_hnd_knf_dirk_c', 16, 25, 45, 0, 3, 2287, 0, 0, 0, 54.5],
4816: [51, 2, 4030, 4816, u'Demon Fang Knives', u'DEMON_FANG_KNIVES', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_c', 0, 29, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12407, 2, 12405, 0, 0, 1, u'pir_m_hnd_knf_dirk_c', 16, 29, 51, 0, 3, 2287, 0, 0, 0, 63.5],
21201: [52, 2, 500, 21201, u'Open_V-Neck_Buttons', u'OPEN_TREASURE_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_short_sleeve_v_neck_open', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, -1, 0, -1, 0],
4818: [51, 2, 2810, 4818, u'Knives of the Hawk Idol', u'KNIVES_OF_THE_HAWK_IDOL', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_d', 0, 26, 0, 0, '', 2, 160, 0, 0, 0, 0, 2, 12406, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_d', 16, 26, 46, 0, 2, 3506, 0, 0, 0, 53],
21203: [52, 2, 200, 21203, u'Open_V-Neck_Plain2', u'OPEN_LINEN_SHIRT', 1, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_short_sleeve_v_neck_open', 2, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, -1, 2, -1, 1],
21204: [52, 2, 500, 21204, u'Open_V-Neck_Brown_Collar', u'OPEN_LACED_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_m_short_sleeve_v_neck_open', 1, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, -1, 3, -1, 0],
21205: [52, 2, 500, 21205, u'Open_V-Neck_Yellow_Collar', u'OPEN_FLEET_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 33, u'pir_t_ico_sht_m_short_sleeve_v_neck_open', 12, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, -1, 6, -1, 0],
4822: [51, 2, 1520, 4822, u'Jungle Throwing Knives', u'JUNGLE_THROWING_KNIVES', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_b', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12407, 1, 12409, 0, 0, 1, u'pir_m_hnd_knf_dirk_b', 16, 20, 32, 0, 2, 3502, 0, 0, 0, 39],
4823: [51, 2, 2860, 4823, u'Swamp Throwing Knives', u'SWAMP_THROWING_KNIVES', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_b', 0, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12407, 1, 12409, 0, 0, 1, u'pir_m_hnd_knf_dirk_b', 16, 24, 43, 0, 3, 3502, 0, 0, 0, 53.5],
4824: [51, 2, 3780, 4824, u'Bayou Throwing Knives', u'BAYOU_THROWING_KNIVES', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_b', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12407, 2, 12409, 0, 0, 1, u'pir_m_hnd_knf_dirk_b', 16, 28, 49, 0, 3, 3502, 0, 0, 0, 61.5],
4825: [51, 2, 180, 4825, u"Hunter's Throwing Knives", u'HUNTER_THROWING_KNIVES', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_d', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_d', 16, 8, 9, 0, 1, 2286, 0, 0, 0, 13.5],
4826: [51, 2, 930, 4826, u'Aztec Throwing Knives', u'AZTEC_THROWING_KNIVES', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_d', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12406, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_d', 16, 14, 23, 0, 2, 2286, 0, 0, 0, 30.5],
4827: [51, 2, 1560, 4827, u"Marksman's Throwing Knives", u'MARKSMAN_THROWING_KNIVES', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_d', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12406, 1, 12410, 0, 0, 1, u'pir_m_hnd_knf_dirk_d', 16, 18, 29, 0, 2, 2286, 0, 0, 0, 39.5],
4828: [51, 2, 3480, 4828, u'Amazon Throwing Knives', u'AMAZON_THROWING_KNIVES', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_d', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12406, 2, 12410, 0, 0, 1, u'pir_m_hnd_knf_dirk_d', 16, 22, 40, 0, 3, 2286, 0, 0, 0, 59],
4829: [51, 2, 4620, 4829, u"Assassin's Throwing Knives", u'ASSASSIN_THROWING_KNIVES', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_e', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12406, 3, 12410, 0, 0, 1, u'pir_m_hnd_knf_dirk_e', 16, 26, 46, 0, 3, 2286, 0, 0, 0, 68],
4830: [51, 2, 5330, 4830, u'Silver Freeze', u'SILVER_FREEZE', 5, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_e', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12405, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_e', 16, 30, 62, 0, 3, 2275, 0, 0, 0, 73],
4831: [51, 2, 800, 4831, u'Small Throwing Knives', u'SMALL_THROWING_KNIVES', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_a', u'Throwing Knives are great ranged weapons. Faster recharge on Throwing Skills.', 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_a', 16, 7, 8, 0, 0, 0, 0, 0, 0, 4],
4832: [51, 2, 2810, 4832, u'Iron Throwing Knives', u'IRON_THROWING_KNIVES', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_b', u'Sturdy throwing weapon. Faster recharge on Throwing Skills.', 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_b', 16, 12, 15, 0, 0, 0, 0, 0, 0, 7.5],
4833: [51, 2, 6610, 4833, u'Tribal Throwing Knives', u'TRIBAL_THROWING_KNIVES', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_c', u'Tribal hunting weapon. Faster recharge on Throwing Skills.', 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_c', 16, 17, 23, 0, 0, 0, 0, 0, 0, 11.5],
4834: [51, 2, 11250, 4834, u'Fine Throwing Knives', u'FINE_THROWING_KNIVES', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_d', u'Golden throwing knives. Faster recharge on Throwing Skills.', 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_d', 16, 22, 30, 0, 0, 0, 0, 0, 0, 15],
4835: [51, 2, 18050, 4835, u'Master Throwing Knives', u'MASTER_THROWING_KNIVES', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_dirk_e', u'Master crafted throwing knives. Faster recharge on Throwing Skills.', 27, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_dirk_e', 16, 27, 38, 0, 0, 0, 0, 0, 0, 19],
809: [51, 1, 15000, 809, u"Executioner's Broadsword", u'EXECUTIONER_BROADSWORD', 4, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_a', 0, 14, 0, 0, '', 3, 100, 0, 0, 0, 0, 2, 12109, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_a', 3, 19, 63, 0, 3, 2337, 5, 0, 0, 70.5],
26601: [52, 2, 600, 26601, u'Knee_Boots_Brown', u'LEATHER_KNEE_BOOTS', 2, 7, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sho_f_knee', 2, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 0, 0],
810: [51, 2, 650, 810, u'Sharp Broadsword', u'SHARP_BROADSWORD', 2, 1, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_broadsword_b', 0, 3, 0, 0, '', 1, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_broadsword_b', 3, 8, 34, 0, 0, 0, 5, 0, 0, 20],
21251: [52, 2, 500, 21251, u'PuffyShirt_Ties', u'LACED_PUFFY_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_m_long_sleeve_low_cut_puffy', 25, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 0, -1, 1],
21252: [52, 2, 500, 21252, u'PuffyShirt_Brown_Leather', u'REINFORCED_PUFFY_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_low_cut_puffy', 25, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 1, -1, 0],
21253: [52, 2, 200, 21253, u'PuffyShirt_Plain', u'COTTON_PUFFY_SHIRT', 1, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_low_cut_puffy', 25, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 2, -1, 1],
21254: [52, 2, 200, 21254, u'PuffyShirt_Center_Tie', u'LINEN_PUFFY_SHIRT', 1, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_m_long_sleeve_low_cut_puffy', 25, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 3, -1, 1],
21255: [52, 0, 1200, 21255, u'PuffyShirt_Orange_Vest', u'RENAISSANCE_SHIRT', 3, 1, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_sht_m_long_sleeve_low_cut_puffy', 17, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 4, -1, 0],
21256: [52, 2, 1200, 21256, u'PuffyShirt_Brown_Pillow', u'PADDED_SHIRT', 3, 1, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_sht_m_long_sleeve_low_cut_puffy', 1, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 5, -1, 0],
21257: [52, 0, 1200, 21257, u'PuffyShirt_Brown_Vest', u'LEATHER_PUFFY_SHIRT', 3, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_low_cut_puffy', 1, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 6, -1, 0],
21258: [52, 0, 1200, 21258, u'PuffyShirt_Red_Vest', u'MERCHANT_SHIRT', 3, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_low_cut_puffy', 3, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 7, -1, 0],
21259: [52, 2, 1200, 21259, u'PuffyShirt_Intermediate_Outfit', u'TRAVELERS_PUFFY_SHIRT', 3, 1, 0, 0, 1, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_low_cut_puffy', 8, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 8, -1, 1],
21260: [52, 1, 1200, 21260, u'PuffyShirt_Red_Vest_Gold_Trim', u'MERCHANT_SHIRT_GOLD_TRIM', 3, 1, 0, 1, 0, 0, 0, 0, 36, u'pir_t_ico_sht_m_long_sleeve_low_cut_puffy', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3918, 6, -1, 9, -1, 0],
21301: [52, 2, 500, 21301, u'LongSleeve_Metal_Buttons', u'LONG_TREASURE_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 17, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 0, -1, 0],
21302: [52, 2, 500, 21302, u'LongSleeve_Plain1', u'COTTON_LONG_SLEEVE', 2, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 2, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 1, -1, 1],
21303: [52, 2, 500, 21303, u'LongSleeve_Plain2', u'LINEN_LONG_SLEEVE', 2, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 2, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 2, -1, 1],
21304: [52, 0, 500, 21304, u'LongSleeve_Brown_Collar', u'LACED_LONG_SLEEVE', 2, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 2, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 3, -1, 0],
21305: [52, 0, 500, 21305, u'LongSleeve_Waistband', u'TIED_LONG_SLEEVE', 2, 1, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 5, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 4, -1, 0],
21306: [52, 2, 500, 21306, u'LongSleeve_Leather_Waistband', u'FLAP_LONG_SLEEVE', 2, 1, 0, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 5, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 5, -1, 0],
21307: [52, 0, 500, 21307, u'LongSleeve_Yellow_Collar', u'FLEET_LONG_SLEEVE', 2, 1, 1, 1, 0, 0, 0, 0, 33, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 12, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 6, -1, 0],
21308: [52, 2, 500, 21308, u'LongSleeve_Basic_Outfit', u'RECRUIT_LONG_SLEEVE', 2, 1, 0, 0, 1, 0, 0, 0, 22, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 1, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 7, -1, 0],
21309: [52, 0, 500, 21309, u'LongSleeve_Zombie', u'ZOMBIE_LONG_SLEEVE', 2, 1, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 12, 52, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 7, -1, 8, -1, 0],
21310: [52, 0, 500, 21310, u'LongSleeve_Zombies', u'ZOMBIES_LONG_SLEEVE', 2, 1, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 12, 52, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3913, 7, -1, 8, -1, 0],
21351: [52, 2, 200, 21351, u'Open_LongSleeve_Metal_Buttons', u'OPEN_TREASURE_LONG_SLEEVE', 2, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 1, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, -1, 0, -1, 0],
21352: [52, 2, 200, 21352, u'Open_LongSleeve_Plain1', u'COTTON_OPEN_LONG_SLEEVE', 1, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 2, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, -1, 1, -1, 0],
21353: [52, 2, 200, 21353, u'Open_LongSleeve_Plain2', u'LINEN_OPEN_LONG_SLEEVE', 1, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 2, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, -1, 2, -1, 0],
21354: [52, 2, 500, 21354, u'Dealer_Shirt', u'DEALER_SHIRT', 2, 1, 0, 0, 0, 0, 0, 1, 2, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 6, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, -1, 0, -1, 0],
21401: [52, 0, 1200, 21401, u'Puffy_Cinco_De_Mayo', u'CINCO_DE_MAYO_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_sht_m_holiday', 10, 27, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, -1, 0, -1, 0],
21402: [52, 1, 1200, 21402, u'Puffy_Halloween', u'HALLOWEEN_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sht_m_holiday', 10, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3922, 11, -1, 1, -1, 0],
21403: [52, 1, 1200, 21403, u'Puffy_Thanksgiving', u'THANKSGIVING_SHIRT', 3, 1, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sht_m_holiday', 10, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, -1, 2, -1, 0],
21404: [52, 1, 1200, 21404, u'Puffy_GuyFawkes', u'GUY_FAWKES_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 48, u'pir_t_ico_sht_m_holiday', 10, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3916, 11, -1, 3, -1, 0],
21405: [52, 2, 1200, 21405, u'Puffy_Valentines', u'VALENTINES_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_sht_m_holiday', 10, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 11, -1, 4, -1, 0],
21406: [52, 0, 1200, 21406, u'Puffy_WinterHoliday', u'WINTER_HOLIDAY_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_sht_m_holiday', 10, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, -1, 5, -1, 0],
21407: [52, 0, 1200, 21407, u'Puffy_CaribbeanDay', u'CARIBBEAN_DAY_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 30, u'pir_t_ico_sht_m_holiday', 10, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, -1, 6, -1, 0],
21408: [52, 0, 1200, 21408, u'Puffy_Carnival', u'CARNIVAL_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 4, u'pir_t_ico_sht_m_holiday', 10, 34, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, -1, 7, -1, 0],
21409: [52, 1, 1200, 21409, u'Puffy_Chinese_Newyear', u'CHINESE_NEWYEAR_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 5, u'pir_t_ico_sht_m_holiday', 10, 34, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3907, 11, -1, 8, -1, 0],
21410: [52, 1, 1200, 21410, u'Puffy_First_Fall', u'AUTUMN_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 23, u'pir_t_ico_sht_m_holiday', 10, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3902, 11, -1, 9, -1, 0],
21411: [52, 1, 1200, 21411, u'Puffy_New_Years_Eve', u'NEW_YEARS_EVE_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 47, u'pir_t_ico_sht_m_holiday', 10, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3901, 11, -1, 10, -1, 0],
21412: [52, 1, 1200, 21412, u'Puffy_Saint_Patricks', u'SAINT_PATRICKS_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 13, u'pir_t_ico_sht_m_holiday', 10, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 11, -1, 11, -1, 0],
21413: [52, 1, 1200, 21413, u'Puffy_Summer_Solistic', u'SUMMER_SOLSTICE_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_sht_m_holiday', 10, 34, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3914, 11, -1, 12, -1, 0],
21414: [52, 0, 1200, 21414, u'Puffy_Winter_Solistic', u'WINTER_SOLSTICE_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_holiday', 10, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, -1, 13, -1, 0],
21415: [52, 1, 1200, 21415, u'Puffy_First_Spring', u'SPRING_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 42, u'pir_t_ico_sht_m_holiday', 10, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3915, 11, -1, 14, -1, 0],
21416: [52, 1, 1200, 21416, u'Puffy_Mardi_Gras', u'MARDI_GRAS_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 42, u'pir_t_ico_sht_m_holiday', 10, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 11, -1, 15, -1, 0],
21417: [52, 1, 1200, 21417, u'Puffy_Xmas', u'XMAS_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_sht_m_holiday', 10, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 11, -1, 16, -1, 0],
21418: [52, 0, 200, 21418, u'Highneck_Plain', u'HIGHNECK_PLAIN_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 2, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, -1, 0, -1, 1],
21419: [52, 0, 1200, 21419, u'Highneck_French_Assassin', u'HIGHNECK_FRENCH_ASSASSIN_SHIRT', 3, 1, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 43, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3908, 12, -1, 1, -1, 0],
21420: [52, 0, 1200, 21420, u'Highneck_Baron', u'HIGHNECK_BARON_SHIRT', 3, 1, 0, 0, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 44, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3912, 12, -1, 2, -1, 0],
21421: [52, 0, 1200, 21421, u'Highneck_Peacock', u'HIGHNECK_PEACOCK_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 25, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 50, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, -1, 3, -1, 0],
21422: [52, 0, 1200, 21422, u'Highneck_Prince', u'HIGHNECK_PRINCE_SHIRT', 3, 1, 0, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_m_long_sleeve_v_neck_closed', 46, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 12, -1, 4, -1, 0],
21430: [52, 0, 1200, 21430, u'Puffy_Barbary_Corsair', u'BARBARY_CORSAIR_SHIRT', 3, 1, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sht_m_holiday', 56, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3906, 11, -1, 17, -1, 0],
10401: [56, 2, 20, 10401, u'Faded Sea Chart', u'FADED_SEA_CHART', 1, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 0, 0, 0, '', 1, 1001, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 3, 0, 0, 0, 0, 3],
21451: [52, 2, 0, 21451, u'Apron_Brown', u'BROWN_APRON', 3, 1, 0, 0, 0, 0, 0, 1, 29, u'pir_t_ico_sht_m_short_sleeve_round_neck', 42, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 0, -1, 0],
21452: [52, 2, 0, 21452, u'Apron_Dirty', u'DIRTY_APRON', 3, 1, 0, 0, 0, 0, 0, 1, 29, u'pir_t_ico_sht_m_short_sleeve_round_neck', 42, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 1, -1, 0],
10402: [56, 2, 90, 10402, u"Mechant's Sea Chart", u'MERCHANT_SEA_CHART', 2, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 0, 0, 0, '', 1, 1001, 0, 0, 0, 0, 1, 12652, 0, 0, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 5, 0, 0, 0, 0, 7],
10403: [56, 2, 310, 10403, u"Smuggler's Sea Chart", u'SMUGGLER_SEA_CHART', 2, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 3, 0, 0, '', 2, 1001, 0, 0, 0, 0, 2, 12652, 0, 0, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 8, 0, 0, 0, 0, 14],
10404: [56, 2, 900, 10404, u"Rumrunner's Sea Chart", u'RUMRUNNER_SEA_CHART', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 6, 0, 0, '', 3, 1001, 0, 0, 0, 0, 3, 12652, 0, 0, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 11, 0, 0, 0, 0, 21],
10405: [56, 2, 90, 10405, u"Landlubber's Sea Chart", u'LANDLUBBER_SEA_CHART', 2, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 2, 0, 0, '', 1, 1001, 0, 0, 0, 0, 1, 12653, 0, 0, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 7, 0, 0, 0, 0, 7],
10406: [56, 2, 310, 10406, u"Sailor's Sea Chart", u'SAILOR_SEA_CHART', 2, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 5, 0, 0, '', 2, 1001, 0, 0, 0, 0, 2, 12653, 0, 0, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 10, 0, 0, 0, 0, 14],
10407: [56, 2, 900, 10407, u"Freebooter's Sea Chart", u'FREEBOOTER_SEA_CHART', 3, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 8, 0, 0, '', 3, 1001, 0, 0, 0, 0, 3, 12653, 0, 0, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 13, 0, 0, 0, 0, 21],
5101: [51, 1, 0, 5101, u'Ceremonial Knife', u'CEREMONIAL_KNIVE', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_a', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_a', 17, 2, 3, 0, 0, 0, 0, 0, 0, 1.5],
5102: [51, 1, 30, 5102, u'Tribal Knife', u'TRIBAL_KNIFE', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_a', 0, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12401, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_a', 17, 4, 6, 0, 0, 0, 0, 0, 0, 5],
5103: [51, 1, 40, 5103, u'Ritual Knife', u'RITUAL_KNIFE', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_a', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_a', 17, 6, 9, 0, 0, 0, 0, 0, 0, 6.5],
5104: [51, 1, 160, 5104, u'Knife of the Blood Idol', u'KNIFE_OF_THE_BLOOD_IDOL', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', 0, 10, 0, 0, '', 1, 200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 10, 15, 0, 1, 3503, 0, 0, 0, 12.5],
2216: [51, 2, 4100, 2216, u'Shark Sticker Bayonet', u'SHARK_STICKER_BAYONET', 4, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_a', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 2310, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_a', 10, 23, 66, 1, 3, 3509, 0, 0, 0, 64],
5106: [51, 1, 780, 5106, u'Knife of the Raven Idol', u'KNIFE_OF_THE_RAVEN_IDOL', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', 0, 18, 0, 0, '', 2, 200, 0, 0, 0, 0, 1, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 18, 32, 0, 2, 3503, 0, 0, 0, 28],
5107: [51, 1, 1370, 5107, u'Knife of the War Idol', u'KNIFE_OF_THE_WAR_IDOL', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', 0, 22, 0, 0, '', 2, 200, 1, 201, 0, 0, 2, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 22, 38, 0, 2, 3503, 0, 0, 0, 37],
5108: [51, 1, 2260, 5108, u'Knife of the Demon Idol', u'KNIFE_OF_THE_DEMON_IDOL', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', 0, 26, 0, 0, '', 3, 200, 1, 201, 0, 0, 2, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 26, 49, 0, 3, 3503, 0, 0, 0, 47.5],
5109: [51, 1, 3190, 5109, u'Knife of the Death Idol', u'KNIFE_OF_THE_DEATH_IDOL', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', 0, 30, 0, 0, '', 3, 200, 2, 201, 0, 0, 3, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 30, 55, 0, 3, 3503, 0, 0, 0, 56.5],
5110: [51, 1, 170, 5110, u'Grim Strike Knife', u'GRIM_STRIKE_KNIFE', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_b', 0, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_b', 17, 8, 12, 0, 1, 2288, 0, 0, 0, 13],
2217: [51, 2, 680, 2217, u'Militia Bayonet', u'MILITIA_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_c', 0, 2, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12210, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_c', 10, 7, 24, 1, 0, 0, 0, 0, 0, 26],
5112: [51, 1, 1230, 5112, u'Decay Strike Knife', u'DECAY_STRIKE_KNIFE', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_b', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12401, 1, 12404, 0, 0, 1, u'pir_m_hnd_knf_hollow_b', 17, 18, 32, 0, 2, 2288, 0, 0, 0, 35],
5113: [51, 1, 1940, 5113, u'Fatal Strike Knife', u'FATAL_STRIKE_KNIFE', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_b', 0, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12402, 2, 12404, 0, 0, 1, u'pir_m_hnd_knf_hollow_b', 17, 23, 40, 0, 2, 2288, 0, 0, 0, 44],
5114: [51, 1, 3720, 5114, u'Death Strike Knife', u'DEATH_STRIKE_KNIFE', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_b', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12402, 2, 12404, 0, 0, 1, u'pir_m_hnd_knf_hollow_b', 17, 28, 52, 0, 3, 2288, 0, 0, 0, 61],
5115: [51, 1, 360, 5115, u'Night Hunter Knife', u'NIGHT_HUNTER_KNIFE', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_e', 0, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12404, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_e', 17, 12, 18, 0, 1, 3502, 0, 0, 0, 19],
5116: [51, 1, 1370, 5116, u'Night Chaser Knife', u'NIGHT_CHASER_KNIFE', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_e', 0, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12404, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_e', 17, 19, 34, 0, 2, 3502, 0, 0, 0, 37],
21501: [52, 2, 200, 21501, u'ShortBlouse_Stitch', u'STITCHED_BLOUSE', 1, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_f_short_sleeve_round_neck', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 0, 1],
21502: [52, 2, 200, 21502, u'ShortBlouse_Three_Button', u'BURLAP_BLOUSE', 1, 1, 1, 1, 0, 0, 0, 0, 18, u'pir_t_ico_sht_f_short_sleeve_round_neck', 31, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 1, 0],
21503: [52, 2, 500, 21503, u'ShortBlouse_Collar', u'TRIMMED_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 10, u'pir_t_ico_sht_f_short_sleeve_round_neck', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 2, 0],
5120: [51, 1, 310, 5120, u'Viper Venom Knife', u'VIPER_VENOM_KNIFE', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 17, 31, 0, 1, 2289, 0, 0, 0, 17.5],
5121: [51, 1, 550, 5121, u'Copperhead Venom Knife', u'COPPERHEAD_VENOM_KNIFE', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', 0, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12407, 1, 12408, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 21, 37, 0, 2, 2289, 0, 0, 0, 23.5],
5122: [51, 1, 1160, 5122, u'Mamba Venom Knife', u'MAMBA_VENOM_KNIFE', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12407, 2, 12408, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 25, 48, 0, 2, 2289, 0, 0, 0, 34],
21507: [52, 2, 500, 21507, u'ShortBlouse_YellowGold', u'EMBROIDERED_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_sht_f_short_sleeve_round_neck', 2, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 6, 0],
21508: [52, 0, 1200, 21508, u'ShortBlouse_Scourge', u'SCOURGE_BLOUSE', 3, 1, 0, 0, 0, 0, 0, 0, 28, u'pir_t_ico_sht_f_short_sleeve_round_neck', 51, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3905, -1, 0, -1, 7, 0],
21509: [52, 0, 1200, 21509, u'ShortBlouse_Sea_Serpent', u'SEA_SERPENT_BLOUSE', 3, 1, 0, 0, 0, 0, 0, 0, 42, u'pir_t_ico_sht_f_short_sleeve_round_neck', 49, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3917, -1, 0, -1, 8, 0],
5126: [51, 1, 1120, 5126, u'Poisoned Knife', u'POISONED_KNIFE', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_a', u'Coated with poison. Increases the damage on all your Debuffs.', 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_a', 17, 5, 8, 0, 0, 0, 0, 0, 0, 4],
5127: [51, 1, 3940, 5127, u'Venomous Knife', u'VENOM_KNIFE', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_c', u'Laced with venom. Increases the damage on all your Debuffs.', 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_c', 17, 10, 15, 0, 0, 0, 0, 0, 0, 7.5],
5128: [51, 1, 9260, 5128, u'Toxic Knife', u'TOXIC_KNIFE', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_b', u'Covered in toxic venom. Increases the damage on all your Debuffs.', 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_b', 17, 15, 23, 0, 0, 0, 0, 0, 0, 11.5],
5129: [51, 1, 15750, 5129, u'Plague Knife', u'PLAGUE_KNIFE', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_e', u'A deadly knife. Increases the damage on all your Debuffs.', 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_e', 17, 20, 30, 0, 0, 0, 0, 0, 0, 15],
5130: [51, 1, 25270, 5130, u'Dire Knife', u'DIRE_KNIFE', 2, 4, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', u'Coated in strong poison. Increases the damage on all your Debuffs.', 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 25, 38, 0, 0, 0, 0, 0, 0, 19],
10413: [56, 2, 900, 10413, u'Lost World Sea Chart', u'LOST_WORLD_SEA_CHART', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_charts', 0, 16, 0, 0, '', 1, 1001, 0, 0, 0, 0, 3, 12658, 2, 12657, 0, 0, 1, u'pir_m_inv_sal_charts', 28, 21, 0, 0, 0, 0, 17],
10414: [56, 2, 200, 10414, u"Thieve's Sea Globe", u'THIEVE_SEA_GLOBE', 2, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_globe_b', 0, 12, 0, 0, '', 1, 1001, 0, 0, 0, 0, 1, 12659, 0, 0, 0, 0, 1, u'pir_m_inv_sal_globe_b', 28, 17, 0, 0, 0, 0, 11],
2223: [51, 2, 1520, 2223, u"Swashbuckler's Bayonet", u'SWASHBUCKLER_BAYONET', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_musket_d', 0, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12200, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_musket_d', 10, 14, 38, 1, 1, 2316, 0, 0, 0, 39],
10416: [56, 2, 1110, 10416, u"Robber's Sea Globe", u'ROBBER_SEA_GLOBE', 4, 8, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_sal_globe_b', 0, 22, 0, 0, '', 1, 1001, 0, 0, 0, 0, 1, 12659, 2, 12652, 0, 0, 1, u'pir_m_inv_sal_globe_b', 28, 27, 0, 0, 0, 0, 19],
26801: [52, 2, 0, 26801, u'Shoe_Navy', u'NAVY_SHOES', 1, 7, 0, 0, 0, 0, 0, 1, 29, u'pir_t_ico_sho_m_medium', 42, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 5, 0, 0, 0],
26802: [52, 2, 0, 26802, u'Shoe_EITC', u'EITC_SHOES', 1, 7, 0, 0, 0, 0, 0, 1, 29, u'pir_t_ico_sho_m_medium', 42, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -1, 0, -1, 0],
21551: [52, 2, 200, 21551, u'ShortPuffy_Lace_Front', u'DINGY_PUFF_BLOUSE', 1, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_f_short_sleeve_v_neck_puffy', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 0, 0],
21552: [52, 2, 200, 21552, u'ShortPuffy_Two_Ties', u'COTTON_PUFF_BLOUSE', 1, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_f_short_sleeve_v_neck_puffy', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 1, 1],
21553: [52, 2, 500, 21553, u'ShortPuffy_Three_Button', u'FLAPPED_PUFF_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 37, u'pir_t_ico_sht_f_short_sleeve_v_neck_puffy', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 2, 0],
21554: [52, 2, 500, 21554, u'ShortPuffy_Front_Bow', u'FRILLY_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 18, u'pir_t_ico_sht_f_short_sleeve_v_neck_puffy', 3, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 3, 0],
21555: [52, 0, 200, 21555, u'ShortPuffy_Light_Green', u'COLLAR_PUFF_BLOUSE', 1, 1, 1, 1, 0, 0, 0, 0, 37, u'pir_t_ico_sht_f_short_sleeve_v_neck_puffy', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 4, 0],
21556: [52, 0, 500, 21556, u'ShortPuffy_Powder_Blue', u'SKY_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_sht_f_short_sleeve_v_neck_puffy', 3, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 5, 0],
21557: [52, 0, 1200, 21557, u'ShortPuffy_Red_Trim', u'FESTIVAL_BLOUSE', 3, 1, 1, 1, 0, 0, 0, 0, 40, u'pir_t_ico_sht_f_short_sleeve_v_neck_puffy', 3, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 6, 0],
21558: [52, 1, 1200, 21558, u'ShortPuffy_Gold_Trim', u'FESTIVAL_BLOUSE_GOLD_TRIM', 3, 1, 0, 1, 0, 0, 0, 0, 36, u'pir_t_ico_sht_f_short_sleeve_v_neck_puffy', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3918, -1, 1, -1, 7, 0],
35001: [53, 2, 800, 35001, u'TattooChestDagger', u'TATTOO_CHEST_DAGGER', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_8dagger', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 11, 2, 103, 0, 0],
35002: [53, 2, 800, 35002, u'TattooChestHeartTorch', u'TATTOO_CHEST_HEART_TORCH', 2, 0, 0, 1, 0, 0, 0, 0, 1, u'tattoo_chest_color_heart_screw', 9, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 12, 1, 103, 0, 0],
21601: [52, 2, 200, 21601, u'Top_Collar_Buttons', u'DINGY_TOP', 1, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_f_long_sleeve_square_neck_puffy', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 0, 0],
21602: [52, 2, 1200, 21602, u'Top_Broach', u'SKULL_BROACH_TOP', 3, 1, 1, 1, 0, 0, 0, 0, 35, u'pir_t_ico_sht_f_long_sleeve_square_neck_puffy', 3, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 1, 0],
21603: [52, 2, 200, 21603, u'Top_Front_Tie', u'COTTON_TOP', 1, 1, 1, 1, 0, 0, 0, 0, 37, u'pir_t_ico_sht_f_long_sleeve_square_neck_puffy', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 2, 1],
21604: [52, 2, 200, 21604, u'Top_Stitch', u'PANEL_TOP', 1, 1, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_sht_f_long_sleeve_square_neck_puffy', 1, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 3, 0],
21605: [52, 0, 500, 21605, u'Top_Blue_Cuffs', u'CITY_TOP', 2, 1, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_sht_f_long_sleeve_square_neck_puffy', 5, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 4, 0],
21606: [52, 2, 500, 21606, u'Top_Olive', u'WOODLAND_TOP', 2, 1, 0, 1, 0, 0, 0, 0, 38, u'pir_t_ico_sht_f_long_sleeve_square_neck_puffy', 9, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 5, 0],
21607: [52, 0, 500, 21607, u'Top_Purple_Cuffs', u'VILLA_TOP', 2, 1, 1, 1, 0, 0, 0, 0, 36, u'pir_t_ico_sht_f_long_sleeve_square_neck_puffy', 2, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 6, 0],
21651: [52, 2, 500, 21651, u'ScoopNeck_Ruffle_Corset', u'TAILORED_PUFFY_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 1, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 0, 0],
21652: [52, 2, 500, 21652, u'ScoopNeck_Three_Button', u'SWASHBUCKLER_TOP', 2, 1, 1, 1, 0, 0, 0, 0, 20, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 25, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 1, 1],
21653: [52, 2, 500, 21653, u'ScoopNeck_Ruffles', u'RUFFLE_SHIRT', 1, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 2, 1],
21654: [52, 2, 500, 21654, u'ScoopNeck_Ties', u'FINE_PUFFY_SHIRT', 2, 1, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 3, 1],
21655: [52, 0, 1200, 21655, u'ScoopNeck_Brown_Green_Sleeves', u'MERCHANT_TOP', 3, 1, 1, 1, 0, 0, 0, 0, 41, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 2, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 4, 0],
21656: [52, 0, 1200, 21656, u'ScoopNeck_Pink_Collar', u'FLORID_TOP', 3, 1, 1, 1, 0, 0, 0, 0, 39, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 3, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 5, 0],
21657: [52, 0, 1200, 21657, u'ScoopNeck_Grey_Sleeves', u'LACE_TRIM_TOP', 3, 1, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 3, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 6, 0],
21658: [52, 2, 500, 21658, u'ScoopNeck_Basic_Outfit', u'RECRUIT_TOP', 2, 1, 0, 0, 1, 0, 0, 0, 22, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 1, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 7, 0],
21659: [52, 2, 500, 21659, u'ScoopNeck_Intermediate_Outfit', u'TRAVELERS_TOP', 2, 1, 0, 0, 1, 0, 0, 0, 2, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 8, 1],
21660: [52, 2, 1200, 21660, u'ScoopNeck_Advanced_Outfit', u'ADVENTURE_TOP', 3, 1, 0, 0, 1, 0, 0, 0, 2, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 9, 1],
21661: [52, 0, 1200, 21661, u'ScoopNeck_French_Assassin', u'FRENCH_ASSASSIN_TOP', 3, 1, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 43, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3908, -1, 3, -1, 10, 0],
21662: [52, 0, 1200, 21662, u'ScoopNeck_Barbary_Corsair', u'BARBARY_CORSAIR_TOP', 3, 1, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_sht_f_long_sleeve_v_neck_low_cut1', 43, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3906, -1, 3, -1, 11, 0],
21701: [52, 2, 200, 21701, u'LongSleeve_Collar_Lace', u'HIGH_SEAS_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 10, u'pir_t_ico_sht_f_long_sleeve_v_neck_collar', 25, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 0, 0],
21702: [52, 2, 200, 21702, u'LongSleeve_Collar_Buttons', u'DARK_WATER_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 10, u'pir_t_ico_sht_f_long_sleeve_v_neck_collar', 25, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 1, 0],
21703: [52, 2, 200, 21703, u'LongSleeve_Collar_Stripes', u'BOARDING_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 36, u'pir_t_ico_sht_f_long_sleeve_v_neck_collar', 18, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 2, 0],
21704: [52, 2, 200, 21704, u'LongSleeve_Collar_Stitches', u'DARK_CLOUD_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sht_f_long_sleeve_v_neck_collar', 25, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 3, 0],
21705: [52, 2, 200, 21705, u'LongSleeve_Collar_Brown_Vest', u'ROUGH_WATER_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_sht_f_long_sleeve_v_neck_collar', 25, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 4, 0],
21706: [52, 2, 1200, 21706, u'LongSleeve_Collar_Green_Vest', u'OPERA_BLOUSE', 3, 1, 1, 1, 0, 0, 0, 0, 10, u'pir_t_ico_sht_f_long_sleeve_v_neck_collar', 3, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 5, 0],
21707: [52, 2, 1200, 21707, u'LongSleeve_Collar_Red_Vest', u'MUSKETEER_BLOUSE', 3, 1, 1, 1, 0, 0, 0, 0, 18, u'pir_t_ico_sht_f_long_sleeve_v_neck_collar', 8, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 6, 0],
21751: [52, 0, 1200, 21751, u'LongSleeve_Collar_Caribbean_Day', u'CARIBBEAN_DAY_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 6, u'pir_t_ico_sht_f_holiday', 10, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 7, 0],
21752: [52, 0, 1200, 21752, u'LongSleeve_Collar_Cinco_De_Mayo', u'CINCO_DE_MAYO_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 18, u'pir_t_ico_sht_f_holiday', 10, 27, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 8, 0],
21753: [52, 1, 1200, 21753, u'LongSleeve_Collar_Guy_Fawkes', u'GUY_FAWKES_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 48, u'pir_t_ico_sht_f_holiday', 10, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3916, -1, 4, -1, 9, 0],
21754: [52, 1, 1200, 21754, u'LongSleeve_Collar_Halloween', u'HALLOWEEN_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sht_f_holiday', 10, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3922, -1, 4, -1, 10, 0],
21755: [52, 1, 1200, 21755, u'LongSleeve_Collar_Summer_Solstice', u'SUMMER_SOLSTICE_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_sht_f_holiday', 10, 34, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3914, -1, 4, -1, 11, 0],
21756: [52, 1, 1200, 21756, u'LongSleeve_Collar_Thanksgiving', u'THANKSGIVING_BLOUSE', 3, 1, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_sht_f_holiday', 10, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 12, 0],
21757: [52, 0, 1200, 21757, u'LongSleeve_Collar_Holiday', u'WINTER_HOLIDAY_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_sht_f_holiday', 10, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 13, 0],
21758: [52, 0, 1200, 21758, u'LongSleeve_Collar_Carnival', u'CARNIVAL_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 4, u'pir_t_ico_sht_f_holiday', 10, 34, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 14, 0],
21759: [52, 1, 1200, 21759, u'LongSleeve_Collar_Chinese_Newyear', u'CHINESE_NEWYEAR_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_sht_f_holiday', 10, 34, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3907, -1, 4, -1, 15, 0],
21760: [52, 2, 1200, 21760, u'LongSleeve_Collar_Valentines', u'VALENTINES_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_sht_f_holiday', 10, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, -1, 4, -1, 16, 0],
21761: [52, 1, 1200, 21761, u'LongSleeve_Collar_First_Fall', u'AUTUMN_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 19, u'pir_t_ico_sht_f_holiday', 10, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3902, -1, 4, -1, 17, 0],
21762: [52, 1, 1200, 21762, u'LongSleeve_Collar_First_Spring', u'SPRING_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 42, u'pir_t_ico_sht_f_holiday', 10, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3915, -1, 4, -1, 18, 0],
21763: [52, 1, 1200, 21763, u'LongSleeve_Collar_New_Years_Eve', u'NEW_YEARS_EVE_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 47, u'pir_t_ico_sht_f_holiday', 10, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3901, -1, 4, -1, 19, 0],
21764: [52, 1, 1200, 21764, u'LongSleeve_Collar_Saint_Patricks', u'SAINT_PATRICKS_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 13, u'pir_t_ico_sht_f_holiday', 10, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, -1, 4, -1, 20, 0],
21765: [52, 0, 1200, 21765, u'LongSleeve_Collar_Winter_Soltice', u'WINTER_SOLSTICE_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_f_holiday', 10, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 21, 1],
21766: [52, 1, 1200, 21766, u'LongSleeve_Collar_Xmas', u'XMAS_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_sht_f_holiday', 10, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, -1, 4, -1, 22, 0],
21767: [52, 1, 1200, 21767, u'LongSleeve_Collar_Mardi_Gras', u'MARDI_GRAS_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 6, u'pir_t_ico_sht_f_holiday', 10, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, -1, 4, -1, 23, 0],
21768: [52, 0, 500, 21768, u'LongSleeve_Collar_Diplomat', u'DIPLOMAT_BLOUSE', 2, 1, 0, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_f_holiday', 58, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3919, -1, 4, -1, 24, 0],
21769: [52, 0, 1200, 21769, u'LongSleeve_Collar_Prince', u'PRINCE_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_f_holiday', 46, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 4, -1, 25, 0],
21801: [52, 2, 200, 21801, u'LongSleeve_Tall_Fleur', u'FLEUR_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 5, -1, 0, 0],
21802: [52, 2, 200, 21802, u'LongSleeve_Buttons', u'TEACHERS_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 20, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 2, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 5, -1, 1, 0],
21803: [52, 2, 200, 21803, u'LongSleeve_Stitch', u'RIDING_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 20, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 23, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 5, -1, 2, 0],
21804: [52, 2, 200, 21804, u'LongSleeve_Ties', u'COTTON_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 2, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 5, -1, 3, 1],
21805: [52, 2, 1200, 21805, u'LongSleeve_Green', u'GARDEN_BLOUSE', 3, 1, 1, 1, 0, 0, 0, 0, 13, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 9, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 5, -1, 4, 0],
21806: [52, 2, 1200, 21806, u'LongSleeve_LightBlue', u'DAY_DREAM_BLOUSE', 3, 1, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 15, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 5, -1, 5, 0],
21807: [52, 2, 1200, 21807, u'LongSleeve_Purple_White', u'MORNING_BLOUSE', 3, 1, 1, 1, 0, 0, 0, 0, 26, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 3, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 5, -1, 6, 0],
21808: [52, 1, 1200, 21808, u'LongSleeve_Blue_White', u'BLUE_WHITE_BLOUSE', 3, 1, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 2, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3909, -1, 5, -1, 7, 0],
21809: [52, 0, 1200, 21809, u'LongSleeve_Tall_Baroness', u'BARONESS_BLOUSE', 3, 1, 0, 0, 0, 0, 0, 0, 2, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 45, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3912, -1, 5, -1, 8, 0],
30002: [54, 2, 300, 30002, u'Silver Brow Spike', u'SILVER_BROW_SPIKE', 2, 0, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_spike', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 0],
30003: [54, 2, 400, 30003, u'Golden Brow Ring', u'GOLDEN_BROW_RING', 2, 0, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_brow_lip_ring', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 1, 0],
30004: [54, 2, 300, 30004, u'Silver Brow Ring', u'SILVER_BROW_RING', 2, 0, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_brow_lip_ring', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 0],
30005: [54, 1, 400, 30005, u'Golden Brow Ring Spike', u'GOLDEN_BROW_RING_SPIKE', 3, 0, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_spike_ring', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 1, 1],
30006: [54, 1, 300, 30006, u'Silver Brow Ring Spike', u'SILVER_BROW_RING_SPIKE', 3, 0, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_spike_ring', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 2, 2],
21851: [52, 2, 0, 21851, u'FlareSleeve_Gypsy', u'GYPSY_BLOUSE', 3, 1, 0, 0, 0, 0, 0, 1, 8, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 42, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 6, -1, 0, 0],
21852: [52, 2, 0, 21852, u'FlareSleeve_Bartender', u'BARTENDER_BLOUSE', 3, 1, 0, 0, 0, 0, 0, 1, 23, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 42, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 6, -1, 1, 0],
21853: [52, 2, 0, 21853, u'FlareSleeve_Barmaid', u'BARMAID_BLOUSE', 3, 1, 0, 0, 0, 0, 0, 1, 20, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 42, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 6, -1, 2, 0],
21854: [52, 2, 0, 21854, u'FlareSleeve_Shopkeeper', u'SHOPKEEPER_BLOUSE', 3, 1, 0, 0, 0, 0, 0, 1, 13, u'pir_t_ico_sht_f_long_sleeve_tall_collar', 42, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 6, -1, 3, 0],
5501: [51, 2, 100, 5501, u'Cursed Staff', u'CURSED_STAFF', 2, 6, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_a', u'A tribal fetish used for summoning evil spirits.', 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_a', 18, 0, 2, 0, 0, 0, 13, 0, LVector3f(0, 1.6, 0), 1],
5502: [51, 2, 300, 5502, u'Warped Staff', u'WARPED_STAFF', 2, 6, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_a', u'A powerful fetish used for summoning and controlling spirits.', 20, 0, 0, '', 1, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_a', 18, 5, 3, 0, 0, 0, 14, 0, LVector3f(0, 1.1, 0), 2.5],
5503: [51, 2, 1800, 5503, u'Rend Staff', u'REND_STAFF', 2, 6, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_a', u'Sought after by many, this fetish allows the bearer to speak to the spirits of the dead.', 25, 0, 0, '', 2, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_a', 18, 10, 4, 0, 0, 0, 15, 0, LVector3f(0, 1.7, 0), 4],
5504: [51, 2, 6000, 5504, u'Harrow Staff', u'HARROW_STAFF', 3, 6, 0, 0, 1, 0, 0, 0, 1, u'pir_t_ico_stf_ward_b', u'Harrow Staff. Inflict the secrets of the dead upon your enemies with this rare staff.', 30, 0, 0, '', 3, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_b', 18, 15, 6, 0, 0, 0, 16, 0, LVector3f(0, 1.5, 0), 6],
5505: [51, 2, 12000, 5505, u'Vile Staff', u'VILE_STAFF', 3, 6, 0, 0, 1, 0, 0, 0, 1, u'pir_t_ico_stf_ward_c', u'Vile Staff. Summon the plagues of the dead and unknown against your foes.', 35, 0, 0, '', 4, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_c', 18, 20, 8, 0, 0, 0, 17, 0, LVector3f(0, 1.6, 0), 8],
5506: [51, 2, 10, 5506, u'Dread Staff', u'DREAD_STAFF', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_a', 0, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12701, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_a', 19, 2, 2, 0, 0, 0, 18, 0, LVector3f(0, 1.7, 0), 3],
5507: [51, 1, 260, 5507, u'Haunted Staff', u'HAUNTED_STAFF', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_d', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12701, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_d', 19, 15, 16, 0, 1, 3501, 15, 0, LVector3f(0, 1.7, 0), 16],
5508: [51, 1, 990, 5508, u'Possessed Staff', u'POSSESSED_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_d', 0, 35, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12701, 1, 12707, 0, 0, 1, u'pir_m_hnd_stf_dark_d', 19, 20, 27, 0, 2, 3501, 15, 0, LVector3f(0, 1.7, 0), 31.5],
5509: [51, 1, 2210, 5509, u'Phantom Staff', u'PHANTOM_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_d', 0, 40, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12701, 2, 12707, 0, 0, 1, u'pir_m_hnd_stf_dark_d', 19, 25, 38, 0, 3, 3501, 15, 0, LVector3f(0, 1.7, 0), 47],
5510: [51, 1, 550, 5510, u'Skull Staff', u'SKULL_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_d', 0, 32, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12710, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_d', 19, 17, 23, 0, 1, 2373, 15, 0, LVector3f(0, 1.7, 0), 23.5],
5511: [51, 1, 1440, 5511, u'Dire Staff', u'DIRE_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_d', 0, 36, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12710, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_d', 19, 21, 28, 0, 2, 2373, 15, 0, LVector3f(0, 1.7, 0), 38],
5512: [51, 1, 2300, 5512, u'Diabolic Staff', u'DIABOLIC_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_d', 0, 41, 0, 0, '', 1, 206, 0, 0, 0, 0, 2, 12710, 1, 12701, 0, 0, 1, u'pir_m_hnd_stf_dark_d', 19, 26, 40, 0, 2, 2373, 15, 0, LVector3f(0, 1.7, 0), 48],
5513: [51, 1, 4160, 5513, u'Demon Skull Staff', u'DEMON_SKULL_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_d', 0, 45, 0, 0, '', 1, 206, 0, 0, 0, 0, 3, 12710, 2, 12701, 0, 0, 1, u'pir_m_hnd_stf_dark_d', 19, 30, 45, 0, 3, 2373, 15, 0, LVector3f(0, 1.7, 0), 64.5],
5514: [51, 1, 290, 5514, u'Singed Staff', u'SINGED_STAFF', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_b', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12704, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_b', 19, 10, 10, 0, 1, 2373, 18, 0, LVector3f(0, 1.7, 0), 17],
5515: [51, 1, 780, 5515, u'Burnt Staff', u'BURNT_STAFF', 3, 6, 1, 0, 1, 0, 0, 0, 1, u'pir_t_ico_stf_dark_b', 0, 31, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12704, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_b', 19, 16, 22, 0, 1, 2373, 18, 0, LVector3f(0, 1.7, 0), 28],
5516: [51, 1, 1760, 5516, u'Charred Staff', u'CHARRED_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_b', 0, 37, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12704, 1, 12709, 0, 0, 1, u'pir_m_hnd_stf_dark_b', 19, 22, 30, 0, 2, 2373, 18, 0, LVector3f(0, 1.7, 0), 42],
5517: [51, 1, 3970, 5517, u'Cajun Staff', u'CAJUN_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_b', 0, 43, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12704, 2, 12709, 0, 0, 1, u'pir_m_hnd_stf_dark_b', 19, 28, 42, 0, 3, 2373, 18, 0, LVector3f(0, 1.7, 0), 63],
5518: [51, 1, 110, 5518, u'Spirit Caller Staff', u'SPIRIT_CALLER_STAFF', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_a', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12701, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_a', 19, 5, 3, 0, 1, 2373, 18, 0, LVector3f(0, 1.7, 0), 10.5],
5519: [51, 1, 630, 5519, u'Spirit Burner Staff', u'SPIRIT_BINDER_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_a', 0, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12701, 1, 12700, 0, 0, 1, u'pir_m_hnd_stf_dark_a', 19, 8, 12, 0, 2, 2373, 18, 0, LVector3f(0, 1.7, 0), 25],
5520: [51, 1, 1560, 5520, u'Spirit Shredder Staff', u'SPIRIT_SHREDDER_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_a', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12701, 2, 12700, 0, 0, 1, u'pir_m_hnd_stf_dark_a', 19, 11, 21, 0, 3, 2373, 18, 0, LVector3f(0, 1.7, 0), 39.5],
5521: [51, 1, 420, 5521, u'Soul Harvester Staff', u'SOUL_HARVESTER_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_c', 0, 35, 0, 0, '', 1, 230, 0, 0, 0, 0, 0, 0, 1, 12701, 0, 0, 1, u'pir_m_hnd_stf_dark_c', 19, 20, 27, 0, 0, 0, 15, 0, LVector3f(0, 1.7, 0), 20.5],
5522: [51, 1, 1440, 5522, u'Soul Reaper Staff', u'SOUL_REAPER_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_c', 0, 40, 0, 0, '', 2, 230, 0, 0, 0, 0, 1, 12706, 2, 12701, 0, 0, 1, u'pir_m_hnd_stf_dark_c', 19, 25, 38, 0, 0, 0, 15, 0, LVector3f(0, 1.7, 0), 38],
5523: [51, 1, 2860, 5523, u'Soul Eater Staff', u'SOUL_EATER_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_c', 0, 45, 0, 0, '', 3, 230, 0, 0, 0, 0, 2, 12706, 3, 12701, 0, 0, 1, u'pir_m_hnd_stf_dark_c', 19, 30, 45, 0, 0, 0, 15, 0, LVector3f(0, 1.7, 0), 53.5],
5524: [51, 1, 3780, 5524, u'Dark Omen', u'DARK_OMEN', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_a', 0, 45, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12704, 3, 12707, 0, 0, 1, u'pir_m_hnd_stf_dark_a', 19, 30, 45, 0, 3, 3501, 15, 0, LVector3f(0, 1.7, 0), 61.5],
30101: [54, 1, 600, 30101, u'Turquoise Brow Spike', u'TURQUOISE_BROW_SPIKE', 3, 0, 0, 0, 1, 0, 0, 0, 33, u'pir_t_ico_jwl_spike', 7, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 6, 6],
30102: [54, 1, 800, 30102, u'Sapphire Brow Ring', u'SAPPHIRE_BROW_RING', 3, 0, 0, 0, 1, 0, 0, 0, 12, u'pir_t_ico_jwl_brow_lip_ring', 8, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 5, 5],
5537: [51, 2, 3610, 5537, u'Bone Staff', u'BONE_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_a', u'Radiates a strong aura that increases the Attack Power of nearby friends.', 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_a', 19, 5, 3, 0, 1, 2373, 18, 0, LVector3f(0, 1.7, 0), 8.5],
5538: [51, 2, 7200, 5538, u'Grim Staff', u'GRIM_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_b', u'A grim staff filled with dark power.', 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_b', 19, 10, 10, 0, 1, 2373, 18, 0, LVector3f(0, 1.7, 0), 12],
5539: [51, 2, 11250, 5539, u'Skeletal Staff', u'SKELETAL_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_c', u'Made from the bones of animals. Pulses with dark power.', 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_c', 19, 15, 16, 0, 1, 2373, 18, 0, LVector3f(0, 1.7, 0), 15],
5540: [51, 2, 16200, 5540, u'Undead Staff', u'UNDEAD_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_a', u'A frightening staff that pulses with hostile energy.', 35, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_a', 19, 20, 22, 0, 1, 2373, 18, 0, LVector3f(0, 1.7, 0), 18],
5541: [51, 2, 22050, 5541, u'Death Staff', u'DEATH_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_dark_c', u'-', 40, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_dark_c', 19, 25, 28, 0, 1, 2373, 18, 0, LVector3f(0, 1.7, 0), 21],
22001: [52, 2, 300, 22001, u'Vest_Open_Leather_Silk', u'LEATHER_VEST', 1, 2, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_vst_m_open', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -1, 0],
22002: [52, 2, 300, 22002, u'Vest_Open_Patchwork_Dark', u'PATCHWORK_VEST', 1, 2, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_vst_m_open', 7, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 1, -1, 0],
22003: [52, 2, 700, 22003, u'Vest_Open_Belts', u'BELTED_VEST', 1, 2, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_vst_m_open', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 2, -1, 0],
22004: [52, 2, 700, 22004, u'Vest_Open_Clasp', u'DRESSY_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_vst_m_open', 3, 9, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 3, -1, 0],
22005: [52, 2, 700, 22005, u'Vest_Open_Buttons', u'SUIT_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_vst_m_open', 2, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 4, -1, 0],
22006: [52, 0, 700, 22006, u'Vest_Open_Blue_SilverButtons', u'UNIFORM_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_vst_m_open', 3, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 5, -1, 0],
22007: [52, 0, 700, 22007, u'Vest_Open_Merchant', u'MERCHANT_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_vst_m_open', 2, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 6, -1, 0],
22008: [52, 1, 1500, 22008, u'Vest_Open_Scarf', u'SCARF_VEST', 3, 2, 0, 1, 0, 0, 0, 0, 8, u'pir_t_ico_vst_m_open', 30, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3902, 1, -1, 7, -1, 0],
22009: [52, 0, 700, 22009, u'Vest_Open_Green_Lapel', u'PERFORMER_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 13, u'pir_t_ico_vst_m_open', 5, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 8, -1, 0],
22010: [52, 2, 700, 22010, u'Vest_Open_Silk_Stripe', u'HUNTING_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 9, u'pir_t_ico_vst_m_closed', 30, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 0, -1, 0],
22011: [52, 2, 700, 22011, u'Vest_Open_Closed_Clasp', u'SILK_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_vst_m_closed', 2, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 1, -1, 1],
22012: [52, 2, 700, 22012, u'Vest_Open_Closed_Lapel', u'WOODLAND_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_vst_m_closed', 9, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 2, -1, 0],
22013: [52, 2, 700, 22013, u'Vest_Open_Closed_Leathertop', u'CARPENTER_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_vst_m_closed', 2, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 3, -1, 0],
22014: [52, 2, 700, 22014, u'Vest_Open_Closed_Stripe', u'NIGHT_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_vst_m_closed', 30, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 4, -1, 0],
22015: [52, 2, 1500, 22015, u'Vest_Open_Advanced_outfit', u'OPEN_ADVENTURE_VEST', 3, 2, 0, 0, 1, 0, 0, 0, 8, u'pir_t_ico_vst_m_closed', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 5, -1, 0],
22016: [52, 2, 700, 22016, u'Vest_Long_Closed', u'EMBELLISHED_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 37, u'pir_t_ico_vst_m_long_closed', 19, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 0, -1, 0],
22017: [52, 2, 700, 22017, u'Vest_Long_Closed_White_Collar', u'SHOP_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_vst_m_long_closed', 2, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 1, -1, 0],
22018: [52, 2, 700, 22018, u'Vest_Long_Closed_Rust', u'LONGSHOREMAN_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_vst_m_long_closed', 6, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 2, -1, 0],
22019: [52, 2, 700, 22019, u'Vest_Long_Closed_Rope', u'SACK_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 37, u'pir_t_ico_vst_m_long_closed', 5, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 3, -1, 1],
22020: [52, 0, 1500, 22020, u'Vest_Long_Closed_Yellow_Green', u'BANK_VEST', 3, 2, 1, 1, 0, 0, 0, 0, 42, u'pir_t_ico_vst_m_long_closed', 2, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 4, -1, 0],
22021: [52, 2, 700, 22021, u'Vest_Long_Closed_Intermediate_Outfit', u'TRAVELERS_VEST', 2, 2, 0, 0, 1, 0, 0, 0, 7, u'pir_t_ico_vst_m_long_closed', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 5, -1, 0],
22022: [52, 1, 1500, 22022, u'Vest_Long_Closed_Black_Gold', u'EMBELLISHED_BLACK_GOLD_VEST', 3, 2, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_vst_m_long_closed', 19, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3909, 3, -1, 6, -1, 0],
22023: [52, 0, 1500, 22023, u'Vest_Long_Open_BountyHunter', u'BOUNTYHUNTER_VEST', 3, 2, 0, 0, 0, 0, 0, 0, 7, u'pir_t_ico_vst_m_open', 55, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3911, 1, -1, 9, -1, 0],
22024: [52, 0, 1500, 22024, u'Vest_Closed_Rogue_Privateer', u'ROGUE_PRIVATEER_VEST', 3, 2, 0, 0, 0, 0, 0, 0, 18, u'pir_t_ico_vst_m_closed', 47, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3910, 2, -1, 7, -1, 0],
22025: [52, 0, 1500, 22025, u'Vest_Long_Closed_Wildfire', u'WILDFIRE_VEST', 3, 2, 0, 1, 0, 0, 0, 0, 22, u'pir_t_ico_vst_m_long_closed', 54, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 3, -1, 7, -1, 0],
5801: [51, 2, 0, 5801, u'Rotten Staff', u'ROTTEN_STAFF', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_a', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_a', 20, 3, 2, 0, 0, 0, 14, 0, LVector3f(0, 1.1, 0), 1],
5802: [51, 2, 0, 5802, u'Juju Staff', u'JUJU_STAFF', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_a', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12700, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_a', 20, 5, 1, 0, 0, 0, 14, 0, LVector3f(0, 1.1, 0), 1.5],
5803: [51, 1, 20, 5803, u'Anti-venom Staff', u'ANTIVENOM_STAFF', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_a', 0, 22, 0, 0, '', 1, 200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_a', 20, 7, 4, 0, 0, 0, 14, 0, LVector3f(0, 1.1, 0), 4],
5804: [51, 1, 230, 5804, u'Resistance Staff', u'RESISTANCE_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_a', 0, 26, 0, 0, '', 2, 200, 1, 201, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_a', 20, 11, 14, 0, 0, 0, 14, 0, LVector3f(0, 1.1, 0), 15],
5805: [51, 1, 960, 5805, u'Regrowth Staff', u'REGROWTH_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_a', 0, 30, 0, 0, '', 3, 200, 2, 201, 1, 228, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_a', 20, 15, 24, 0, 0, 0, 14, 0, LVector3f(0, 1.1, 0), 31],
5806: [51, 1, 360, 5806, u'Staff of Mists', u'STAFF_OF_MISTS', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_d', 0, 30, 0, 0, '', 1, 203, 0, 0, 0, 0, 1, 12709, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_d', 20, 15, 14, 0, 1, 2372, 16, 0, LVector3f(0, 1.5, 0), 19],
5807: [51, 1, 1480, 5807, u'Staff of Rain', u'STAFF_OF_RAIN', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_d', 0, 38, 0, 0, '', 2, 203, 0, 0, 0, 0, 2, 12709, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_d', 20, 23, 29, 0, 2, 2372, 16, 0, LVector3f(0, 1.5, 0), 38.5],
5808: [51, 1, 3190, 5808, u'Staff of Storms', u'STAFF_OF_STORMS', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_d', 0, 44, 0, 0, '', 3, 203, 0, 0, 0, 0, 3, 12709, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_d', 20, 29, 41, 0, 3, 2372, 16, 0, LVector3f(0, 1.5, 0), 56.5],
5809: [51, 1, 230, 5809, u'Boa Staff', u'BOA_STAFF', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_b', 0, 24, 0, 0, '', 1, 101, 0, 0, 0, 0, 1, 12702, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_b', 20, 9, 6, 0, 1, 2372, 14, 0, LVector3f(0, 1.1, 0), 15],
5810: [51, 1, 1260, 5810, u'Python Staff', u'PYTHON_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_b', 0, 33, 0, 0, '', 2, 101, 0, 0, 0, 0, 2, 12702, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_b', 20, 18, 23, 0, 2, 2372, 14, 0, LVector3f(0, 1.1, 0), 35.5],
5811: [51, 1, 3030, 5811, u'Serpent Staff', u'SERPENT_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_b', 0, 41, 0, 0, '', 3, 101, 0, 0, 0, 0, 3, 12702, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_b', 20, 26, 38, 0, 3, 2372, 14, 0, LVector3f(0, 1.1, 0), 55],
5812: [51, 1, 290, 5812, u'Misshapen Staff', u'MISSHAPEN_STAFF', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_b', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12709, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_b', 20, 15, 14, 0, 1, 2372, 14, 0, LVector3f(0, 1.1, 0), 17],
5813: [51, 1, 1120, 5813, u'Twisted Staff', u'TWISTED_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_b', 0, 35, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12709, 1, 12700, 0, 0, 1, u'pir_m_hnd_stf_nature_b', 20, 20, 25, 0, 2, 2372, 14, 0, LVector3f(0, 1.1, 0), 33.5],
5814: [51, 1, 1940, 5814, u'Gnarled Staff', u'GNARLED_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_b', 0, 39, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12709, 2, 12700, 0, 0, 1, u'pir_m_hnd_stf_nature_b', 20, 24, 30, 0, 3, 2372, 14, 0, LVector3f(0, 1.1, 0), 44],
5815: [51, 1, 2810, 5815, u'Ancient Staff', u'ANCIENT_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_c', 0, 43, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12709, 3, 12700, 0, 0, 1, u'pir_m_hnd_stf_nature_c', 20, 28, 40, 0, 3, 2372, 14, 0, LVector3f(0, 1.1, 0), 53],
5816: [51, 1, 310, 5816, u'Stone Guard Staff', u'STONE_GUARD_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_c', 0, 24, 0, 0, '', 1, 121, 0, 0, 0, 0, 1, 12707, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_c', 20, 9, 11, 0, 1, 2372, 16, 0, LVector3f(0, 1.5, 0), 17.5],
5817: [51, 1, 1300, 5817, u'Granite Guard Staff', u'GRANITE_GUARD_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_c', 0, 30, 0, 0, '', 2, 121, 0, 0, 0, 0, 2, 12707, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_c', 20, 15, 24, 0, 2, 2372, 16, 0, LVector3f(0, 1.5, 0), 36],
5818: [51, 1, 2600, 5818, u'Earth Guard Staff', u'EARTH_GUARD_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_c', 0, 35, 0, 0, '', 3, 121, 0, 0, 0, 0, 3, 12707, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_c', 20, 20, 30, 0, 3, 2372, 16, 0, LVector3f(0, 1.5, 0), 51],
5831: [51, 2, 4340, 5831, u'Healing Staff', u'HEALING_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_a', u'Pulses with an aura that heals all friendly pirates slowly.', 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_a', 20, 6, 3, 0, 1, 2372, 14, 0, LVector3f(0, 1.1, 0), 8.5],
5832: [51, 2, 7940, 5832, u'Mending Staff', u'MENDING_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_b', u'This wooden staff is filled with healing voodoo energy.', 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_b', 20, 11, 9, 0, 1, 2372, 14, 0, LVector3f(0, 1.1, 0), 11.5],
5833: [51, 2, 12620, 5833, u'Restoration Staff', u'RESTORATION_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_c', u'Glows with healing energy that can heal others nearby.', 31, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_c', 20, 16, 15, 0, 1, 2372, 14, 0, LVector3f(0, 1.1, 0), 14.5],
5834: [51, 2, 18380, 5834, u'Renewal Staff', u'RENEWAL_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_b', u"Uses the earth's elements to heal others.", 36, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_b', 20, 21, 21, 0, 1, 2372, 14, 0, LVector3f(0, 1.1, 0), 17.5],
5835: [51, 2, 26460, 5835, u'Life Staff', u'LIFE_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_nature_c', u'This staff glows with healing power.', 41, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_nature_c', 20, 26, 28, 0, 1, 2372, 14, 0, LVector3f(0, 1.1, 0), 21],
30501: [54, 2, 150, 30501, u'Golden Ear Stud', u'GOLDEN_EAR_STUD', 2, 1, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_stud', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0],
30502: [54, 2, 100, 30502, u'Silver Ear Stud', u'SILVER_EAR_STUD', 2, 1, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_stud', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 0],
30503: [54, 2, 200, 30503, u'Golden Ear Small Loop', u'GOLDEN_EAR_SMALL_LOOP', 2, 1, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_ear_loop', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 15, 1, 0],
30504: [54, 2, 150, 30504, u'Silver Ear Small Loop', u'SILVER_EAR_SMALL_LOOP', 2, 1, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_ear_loop', 1, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 15, 2, 0],
30505: [54, 2, 400, 30505, u'Golden Ear Double Loop', u'GOLDEN_EAR_DOUBLE_LOOP', 3, 1, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_ear_double_loop', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 1, 1],
30506: [54, 2, 300, 30506, u'Silver Ear Double Loop', u'SILVER_EAR_DOUBLE_LOOP', 3, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_jwl_ear_double_loop', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 2, 2],
30507: [54, 2, 350, 30507, u'Gold and Silver Ear Double Loop', u'GOLD_AND_SILVER_EAR_DOUBLE_LOOP', 3, 1, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_jwl_ear_double_loop', 10, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 1, 2],
30508: [54, 2, 200, 30508, u'Golden Small Ear Spike', u'GOLDEN_SMALL_EAR_SPIKE', 2, 1, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_spike', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 1, 0],
30509: [54, 2, 150, 30509, u'Silver Small Ear Spike', u'SILVER_SMALL_EAR_SPIKE', 2, 1, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_spike', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 2, 0],
30510: [54, 2, 250, 30510, u'Golden Large Ear Loop', u'GOLDEN_LARGE_EAR_LOOP', 2, 1, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_ear_loop_large', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 1, 0],
30511: [54, 2, 200, 30511, u'Silver Large Ear Loop', u'SILVER_LARGE_EAR_LOOP', 2, 1, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_ear_loop_large', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 2, 0],
30512: [54, 2, 650, 30512, u'Golden Large Ear Loop With Double Top Ring', u'GOLDEN_LARGE_EAR_LOOP_WITH_DOUBLE', 3, 1, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_ear_loop_large_double', 6, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10, 1, 1],
30513: [54, 2, 500, 30513, u'Silver Large Ear Loop With Double Top Ring', u'SILVER_LARGE_EAR_LOOP_WITH_DOUBLE', 3, 1, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_ear_loop_large_double', 10, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10, 2, 2],
30514: [54, 1, 500, 30514, u'Golden Ear Cuffs', u'GOLDEN_EAR_CUFFS', 3, 1, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_double_ring', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 16, 1, 1],
30515: [54, 1, 400, 30515, u'Silver Ear Cuffs', u'SILVER_EAR_CUFFS', 3, 1, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_double_ring', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 16, 2, 2],
30601: [54, 1, 600, 30601, u'Onyx Large Ear Loop', u'ONYX_LARGE_EAR_LOOP', 3, 1, 0, 0, 1, 0, 0, 0, 29, u'pir_t_ico_jwl_ear_loop_large', 7, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 8, 8],
30602: [54, 1, 800, 30602, u'Ruby and Amethyst Ear Stud and Ring', u'RUBY_AND_AMETHYST_EAR_STUD_AND_RING', 2, 1, 0, 0, 1, 0, 0, 0, 35, u'pir_t_ico_jwl_spike_ring', 8, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 12, 3, 4],
5105: [51, 1, 340, 5105, u'Knife of the Jackal Idol', u'KNIFE_OF_THE_JACKAL_IDOL', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', 0, 15, 0, 0, '', 1, 200, 0, 0, 0, 0, 1, 12407, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 15, 23, 0, 1, 3503, 0, 0, 0, 18.5],
5111: [51, 1, 380, 5111, u'Blight Strike Knife', u'BLIGHT_STRIKE_KNIFE', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_b', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12401, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_b', 17, 14, 21, 0, 1, 2288, 0, 0, 0, 19.5],
6101: [51, 2, 30, 6101, u'Tribal Staff', u'TRIBAL_STAFF', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_b', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12700, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_b', 21, 7, 7, 0, 0, 0, 17, 0, LVector3f(0, 1.6, 0), 5.5],
6102: [51, 1, 170, 6102, u'Staff of Cleansing', u'STAFF_OF_CLEANSING', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_b', 0, 27, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12707, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_b', 21, 12, 10, 0, 1, 3500, 17, 0, LVector3f(0, 1.6, 0), 13],
6103: [51, 1, 840, 6103, u'Staff of Purification', u'STAFF_OF_PURIFICATION', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_b', 0, 34, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12707, 1, 12700, 0, 0, 1, u'pir_m_hnd_stf_ward_b', 21, 19, 24, 0, 2, 3500, 17, 0, LVector3f(0, 1.6, 0), 29],
6104: [51, 1, 1940, 6104, u'Staff of Sacred Rituals', u'STAFF_OF_SACRED_RITUALS', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_b', 0, 40, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12707, 2, 12700, 0, 0, 1, u'pir_m_hnd_stf_ward_b', 21, 25, 36, 0, 3, 3500, 17, 0, LVector3f(0, 1.6, 0), 44],
6105: [51, 1, 230, 6105, u'Staff of Protection', u'STAFF_OF_PROTECTION', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_c', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12703, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_c', 21, 10, 8, 0, 1, 2371, 13, 0, LVector3f(0, 1.6, 0), 15],
6106: [51, 1, 600, 6106, u'Staff of Warding', u'STAFF_OF_WARDING', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_c', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12703, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_c', 21, 15, 19, 0, 1, 2371, 13, 0, LVector3f(0, 1.6, 0), 24.5],
6107: [51, 1, 1300, 6107, u'Staff of Shielding', u'STAFF_OF_SHIELDING', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_c', 0, 34, 0, 0, '', 1, 227, 0, 0, 0, 0, 2, 12703, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_c', 21, 19, 24, 0, 2, 2371, 13, 0, LVector3f(0, 1.6, 0), 36],
6108: [51, 1, 2210, 6108, u'Staff of Defiance', u'STAFF_OF_DEFIANCE', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_c', 0, 38, 0, 0, '', 2, 227, 0, 0, 0, 0, 3, 12703, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_c', 21, 23, 34, 0, 2, 2371, 13, 0, LVector3f(0, 1.6, 0), 47],
6109: [51, 1, 3420, 6109, u'Staff of Sanctuary', u'STAFF_OF_SANCTUARY', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_c', 0, 42, 0, 0, '', 3, 227, 0, 0, 0, 0, 3, 12703, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_c', 21, 27, 39, 0, 3, 2371, 13, 0, LVector3f(0, 1.6, 0), 58.5],
6110: [51, 1, 270, 6110, u'Sage Staff', u'SAGE_STAFF', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_a', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12708, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_a', 21, 13, 11, 0, 1, 2371, 17, 0, LVector3f(0, 1.6, 0), 16.5],
6111: [51, 1, 650, 6111, u'Ritual Staff', u'RITUAL_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_a', 0, 32, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12708, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_a', 21, 17, 21, 0, 1, 2371, 17, 0, LVector3f(0, 1.6, 0), 25.5],
6112: [51, 1, 1370, 6112, u'Banishing Staff', u'BANISHING_STAFF', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_d', 0, 34, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12708, 1, 12705, 0, 0, 1, u'pir_m_hnd_stf_ward_d', 21, 19, 24, 0, 2, 2371, 17, 0, LVector3f(0, 1.6, 0), 37],
6113: [51, 1, 2400, 6113, u'Taboo Staff', u'TABOO_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_d', 0, 38, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12708, 2, 12705, 0, 0, 1, u'pir_m_hnd_stf_ward_d', 21, 23, 34, 0, 2, 2371, 17, 0, LVector3f(0, 1.6, 0), 49],
6114: [51, 1, 3720, 6114, u'Exorcism Staff', u'EXORCISM_STAFF', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_d', 0, 41, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12708, 3, 12705, 0, 0, 1, u'pir_m_hnd_stf_ward_d', 21, 26, 38, 0, 3, 2371, 17, 0, LVector3f(0, 1.6, 0), 61],
6115: [51, 1, 170, 6115, u'Staff of the Sacred Owl', u'STAFF_OF_THE_SACRED_OWL', 2, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_a', 0, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12701, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_a', 21, 10, 8, 0, 1, 2371, 13, 0, LVector3f(0, 1.6, 0), 13],
6116: [51, 1, 810, 6116, u'Staff of the Sacred Moon', u'STAFF_OF_THE_SACRED_MOON', 3, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_a', 0, 32, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12701, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_a', 21, 17, 21, 0, 2, 2371, 13, 0, LVector3f(0, 1.6, 0), 28.5],
6117: [51, 1, 2210, 6117, u'Staff of the Sacred Stars', u'STAFF_OF_THE_SACRED_STARS', 4, 6, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_a', 0, 38, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12701, 1, 12704, 0, 0, 1, u'pir_m_hnd_stf_ward_a', 21, 23, 34, 0, 3, 2371, 13, 0, LVector3f(0, 1.6, 0), 47],
22502: [52, 2, 300, 22502, u'Vest_Closed_Plain', u'COTTON_VEST', 1, 2, 1, 1, 0, 0, 0, 0, 20, u'pir_t_ico_vst_f_closed', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 1, 1],
22503: [52, 2, 700, 22503, u'Vest_Closed_Striped', u'DECK_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 20, u'pir_t_ico_vst_f_closed', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 2, 0],
22504: [52, 2, 700, 22504, u'Vest_Closed_Ties', u'LACED_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_vst_f_closed', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 3, 0],
22505: [52, 2, 700, 22505, u'Vest_Closed_Browngold', u'YEOMAN_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_vst_f_closed', 6, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 4, 0],
22506: [52, 2, 700, 22506, u'Vest_Closed_Brownpruple', u'CORSETED_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_vst_f_closed', 1, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 5, 0],
22507: [52, 2, 700, 22507, u'Vest_Closed_Lightgreen', u'FOREST_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 38, u'pir_t_ico_vst_f_closed', 9, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 6, 0],
22508: [52, 2, 1500, 22508, u'Vest_Closed_Redblack', u'COUNTESS_VEST', 3, 2, 1, 1, 0, 0, 0, 0, 40, u'pir_t_ico_vst_f_closed', 3, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 7, 0],
22509: [52, 2, 700, 22509, u'Vest_Closed_Whiteblue', u'CITY_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_vst_f_closed', 2, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 8, 0],
22510: [52, 2, 700, 22510, u'Vest_Closed_Yellowgreen', u'COUNTRY_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 9, u'pir_t_ico_vst_f_closed', 9, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 9, 0],
22511: [52, 2, 1500, 22511, u'Vest_Closed_Advanced_Outfit', u'ADVENTURE_VEST', 3, 2, 0, 0, 1, 0, 0, 0, 8, u'pir_t_ico_vst_f_closed', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 10, 0],
6131: [51, 2, 5670, 6131, u'Defender Staff', u'DEFENDER_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_b', u'Has a protective aura that increases defense for nearby pirates.', 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_b', 21, 7, 4, 0, 1, 2371, 17, 0, LVector3f(0, 1.6, 0), 9],
6132: [51, 2, 10080, 6132, u'Warden Staff', u'WARDEN_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_a', u'Covered in defensive wards and trinkets.', 27, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_a', 21, 12, 10, 0, 1, 2371, 17, 0, LVector3f(0, 1.6, 0), 12],
6133: [51, 2, 15750, 6133, u'Overseer Staff', u'OVERSEER_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_c', u'A tribal voodoo staff with strong defensive magic.', 32, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_c', 21, 17, 16, 0, 1, 2371, 17, 0, LVector3f(0, 1.6, 0), 15],
6134: [51, 2, 23960, 6134, u'Guardian Staff', u'GUARDIAN_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_b', u"Protects nearby friends when using it's aura.", 37, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_b', 21, 22, 23, 0, 1, 2371, 17, 0, LVector3f(0, 1.6, 0), 18.5],
6135: [51, 2, 32360, 6135, u'Tribal Chief Staff', u'TRIBAL_CHIEF_STAFF', 2, 6, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_stf_ward_a', u'Glows with protective voodoo energy.', 42, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_stf_ward_a', 21, 27, 29, 0, 1, 2371, 17, 0, LVector3f(0, 1.6, 0), 21.5],
5119: [51, 1, 70, 5119, u'Snake Venom Knife', u'SNAKE_VENOM_KNIFE', 2, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', 0, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 11, 17, 0, 1, 2289, 0, 0, 0, 8.5],
21504: [52, 2, 200, 21504, u'ShortBlouse_Ties', u'DINGY_BLOUSE', 1, 1, 1, 1, 0, 0, 0, 0, 10, u'pir_t_ico_sht_f_short_sleeve_round_neck', 31, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 3, 0],
21505: [52, 2, 1200, 21505, u'ShortBlouse_Blue_Lace', u'COURT_BLOUSE', 3, 1, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_sht_f_short_sleeve_round_neck', 3, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 4, 0],
21506: [52, 2, 500, 21506, u'ShortBlouse_Pink_White', u'TAVERN_BLOUSE', 2, 1, 1, 1, 0, 0, 0, 0, 35, u'pir_t_ico_sht_f_short_sleeve_round_neck', 2, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 5, 0],
5123: [51, 1, 1520, 5123, u'Cobra Venom Knife', u'COBRA_VENOM_KNIFE', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_d', 0, 29, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12407, 2, 12408, 0, 0, 1, u'pir_m_hnd_knf_hollow_d', 17, 29, 54, 0, 3, 2289, 0, 0, 0, 39],
22551: [52, 2, 300, 22551, u'Vest_Low_Two_Button', u'LOOSE_BUTTONED_VEST', 1, 2, 1, 1, 0, 0, 0, 0, 37, u'pir_t_ico_vst_f_lowcut', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 0, 0],
22552: [52, 2, 300, 22552, u'Vest_Low_Yellow_Gold', u'LOOSE_LEATHER_VEST', 1, 2, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_vst_f_lowcut', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 1, 0],
22553: [52, 2, 700, 22553, u'Vest_Low_Stripes', u'ORCHARD_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 38, u'pir_t_ico_vst_f_lowcut', 9, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 2, 0],
22554: [52, 2, 700, 22554, u'Vest_Low_Ties', u'MESSENGER_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 3, u'pir_t_ico_vst_f_lowcut', 5, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 3, 0],
22555: [52, 2, 700, 22555, u'Vest_Low_BlueGold', u'COURT_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 30, u'pir_t_ico_vst_f_lowcut', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 4, 0],
22556: [52, 2, 700, 22556, u'Vest_Low_Browngold', u'BAND_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_vst_f_lowcut', 23, 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 5, 0],
22557: [52, 2, 700, 22557, u'Vest_Low_Greenyellow', u'CLOVER_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 5, u'pir_t_ico_vst_f_lowcut', 2, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 6, 0],
22558: [52, 2, 700, 22558, u'Vest_Low_Lightyellow', u'GARDEN_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 38, u'pir_t_ico_vst_f_lowcut', 9, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 7, 0],
22559: [52, 2, 700, 22559, u'Vest_Low_Purplegold', u'PETAL_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 25, u'pir_t_ico_vst_f_lowcut', 3, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 8, 0],
22560: [52, 2, 700, 22560, u'Vest_Low_Redblack', u'DARKNESS_VEST', 2, 2, 1, 1, 0, 0, 0, 0, 40, u'pir_t_ico_vst_f_lowcut', 23, 29, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 9, 0],
22561: [52, 2, 700, 22561, u'Vest_Low_Intermediate_Outfit', u'TRAVELERS_LOOSE_VEST', 2, 2, 0, 0, 1, 0, 0, 0, 7, u'pir_t_ico_vst_f_lowcut', 1, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 10, 0],
22562: [52, 2, 700, 22562, u'Vest_Low_Brown_Pillow', u'BROWN_PILLOW_VEST', 3, 2, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_vst_f_lowcut', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 11, 0],
22563: [52, 1, 700, 22563, u'Vest_Low_Red_Brown', u'RED_BROWN_VEST', 3, 2, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_vst_f_lowcut', 23, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3909, -1, 2, -1, 12, 0],
22564: [52, 1, 700, 22564, u'Vest_Low_Dark_Blue_Gold', u'DARK_BLUE_GOLD_VEST', 3, 2, 0, 1, 0, 0, 0, 0, 49, u'pir_t_ico_vst_f_lowcut', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3902, -1, 2, -1, 13, 0],
22565: [52, 0, 700, 22565, u'Vest_Low_Prince', u'PRINCE_VEST', 3, 2, 0, 1, 0, 0, 0, 0, 2, u'pir_t_ico_vst_f_lowcut', 46, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 2, -1, 14, 0],
26610: [52, 1, 1800, 26610, u'Knee_Boots_Gold_Buttons', u'GOLD_BUTTONS_BOOTS', 3, 7, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_sho_f_knee', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3909, -1, 3, -1, 9, 0],
22601: [52, 2, 1500, 22601, u'Corset_High_Leather_Straps', u'ROYAL_CHEST_CORSET', 3, 2, 1, 1, 0, 0, 0, 0, 10, u'pir_t_ico_vst_f_corset_high', 33, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 0, 0],
22602: [52, 2, 1500, 22602, u'Corset_High_Frilly_Lacy', u'ROSE_PETAL_CORSET', 3, 2, 1, 1, 0, 0, 0, 0, 36, u'pir_t_ico_vst_f_corset_high', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 1, 0],
22603: [52, 2, 1500, 22603, u'Corset_High_SimpleCanvas', u'OLD_GROVE_CORSET', 3, 2, 1, 1, 0, 0, 0, 0, 41, u'pir_t_ico_vst_f_corset_high', 9, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 2, 0],
22604: [52, 2, 1500, 22604, u'Corset_High_Bluegrey', u'GAS_LAMP_CORSET', 3, 2, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_vst_f_corset_high', 5, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 4, 0],
22605: [52, 2, 1500, 22605, u'Corset_High_LightBlue', u'HIGH_TIDE_CORSET', 3, 2, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_vst_f_corset_high', 1, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 5, 0],
22606: [52, 2, 1500, 22606, u'Corset_High_Yellow', u'MORNING_DEW_CORSET', 3, 2, 1, 1, 0, 0, 0, 0, 13, u'pir_t_ico_vst_f_corset_high', 9, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 6, 0],
22607: [52, 2, 1500, 22607, u'Corset_Low_Four_Laces', u'RAWHIDE_CORSET', 3, 2, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_vst_f_corset_low', 7, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 0, 0],
22608: [52, 2, 1500, 22608, u'Corset_Low_Print', u'EMBROIDERED_CORSET', 3, 2, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_vst_f_corset_low', 3, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 1, 0],
22609: [52, 2, 1500, 22609, u'Corset_Low_Ribs', u'COTTON_CORSET', 3, 2, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_vst_f_corset_low', 2, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 2, 1],
22610: [52, 1, 1500, 22610, u'Corset_Low_WhiteCross', u'CHRISTENING_CORSET', 3, 2, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_vst_f_corset_low', 32, 36, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3904, -1, 4, -1, 3, 0],
22611: [52, 1, 1500, 22611, u'Corset_Low_Green_Gold', u'SEAFOAM_CORSET', 3, 2, 0, 1, 0, 0, 0, 0, 44, u'pir_t_ico_vst_f_corset_low', 1, 36, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3903, -1, 4, -1, 4, 0],
22612: [52, 1, 1500, 22612, u'Corset_Low_RedVest', u'FAIRDAY_CORSET', 3, 2, 0, 1, 0, 0, 0, 0, 40, u'pir_t_ico_vst_f_corset_low', 33, 36, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3920, -1, 4, -1, 5, 0],
22613: [52, 0, 1500, 22613, u'Corset_High_Peacock', u'PEACOCK_CORSET', 3, 2, 0, 1, 0, 0, 0, 0, 25, u'pir_t_ico_vst_f_corset_high', 50, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 7, 0],
22614: [52, 0, 1500, 22614, u'Corset_High_Zombie_Pirate', u'ZOMBIE_PIRATE_CORSET', 3, 2, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_vst_f_corset_high', 52, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, -1, 3, -1, 8, 0],
22615: [52, 0, 1500, 22615, u'Corset_Low_Bountyhunter', u'BOUNTYHUNTER_CORSET', 3, 2, 0, 0, 0, 0, 0, 0, 7, u'pir_t_ico_vst_f_corset_low', 55, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3911, -1, 4, -1, 6, 0],
22616: [52, 0, 1500, 22616, u'Corset_Low_Rogue_Privateer', u'ROGUE_PRIVATEER_CORSET', 3, 2, 0, 0, 0, 0, 0, 0, 3, u'pir_t_ico_vst_f_corset_low', 47, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3910, -1, 4, -1, 7, 0],
22617: [52, 0, 1500, 22617, u'Corset_Low_Wildfire', u'WILDFIRE_CORSET', 3, 2, 0, 1, 0, 0, 0, 0, 22, u'pir_t_ico_vst_f_corset_low', 54, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -1, 4, -1, 8, 0],
22618: [52, 0, 1500, 22618, u'Corset_High_Zombies_Pirate', u'ZOMBIES_PIRATE_CORSET', 3, 2, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_vst_f_corset_high', 52, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3913, -1, 3, -1, 8, 0],
5118: [51, 1, 4490, 5118, u'Night Predator Knife', u'NIGHT_PREDATOR_KNIFE', 4, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_e', 0, 29, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12404, 2, 12410, 0, 0, 1, u'pir_m_hnd_knf_hollow_e', 17, 29, 54, 0, 3, 3502, 0, 0, 0, 67],
31001: [54, 2, 300, 31001, u'Golden Nose Loop', u'GOLDEN_NOSE_LOOP', 2, 2, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_nose_loop', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0],
31002: [54, 2, 250, 31002, u'Silver Nose Loop', u'SILVER_NOSE_LOOP', 2, 2, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_nose_loop', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 0],
31003: [54, 2, 300, 31003, u'Golden Nose Center Loop', u'GOLDEN_NOSE_CENTER_LOOP', 2, 2, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_nose_loop', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 1, 0],
31004: [54, 2, 250, 31004, u'Silver Nose Center Loop', u'SILVER_NOSE_CENTER_LOOP', 2, 2, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_nose_loop', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 2, 0],
31005: [54, 2, 350, 31005, u'Golden Nose Spike', u'GOLDEN_NOSE_SPIKE', 2, 2, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_spike', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 1, 0],
31006: [54, 2, 300, 31006, u'Silver Nose Spike', u'SILVER_NOSE_SPIKE', 2, 2, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_spike', 15, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 0],
31007: [54, 2, 700, 31007, u'Golden Double Nose Spike', u'GOLDEN_DOUBLE_NOSE_SPIKE', 3, 2, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_nose_double_spike', 6, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 1, 1],
31008: [54, 2, 600, 31008, u'Silver Double Nose Spike', u'SILVER_DOUBLE_NOSE_SPIKE', 3, 2, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_nose_double_spike', 6, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 2, 2],
31009: [54, 2, 650, 31009, u'Golden Nose Spike with Loop', u'GOLDEN_NOSE_SPIKE_WITH_LOOP', 3, 2, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_nose_loop_spike', 10, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 7, 1, 1],
31010: [54, 2, 550, 31010, u'Silver Nose Spike with Loop', u'SILVER_NOSE_SPIKE_WITH_LOOP', 3, 2, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_nose_loop_spike', 6, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 7, 2, 2],
31011: [54, 1, 1000, 31011, u'Golden Double Nose Spike with Loop', u'GOLDEN_DOUBLE_NOSE_SPIKE_WITH_LOOP', 3, 2, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_nose_loop_double_spike', 9, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 8, 1, 1],
31012: [54, 1, 850, 31012, u'Silver Double Nose Spike with Loop', u'SILVER_DOUBLE_NOSE_SPIKE_WITH_LOOP', 3, 2, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_nose_loop_double_spike', 9, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 8, 2, 2],
3817: [51, 2, 1190, 3817, u'Gypsy Doll', u'GYPSY_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_c', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12603, 1, 12602, 0, 0, 1, u'pir_m_hnd_dol_mojo_c', 13, 15, 25, 0, 2, 2304, 12, 0, 0, 34.5],
5117: [51, 1, 2070, 5117, u'Night Stalker Knife', u'NIGHT_STALKER_KNIFE', 3, 4, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_knf_hollow_e', 0, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12404, 1, 12410, 0, 0, 1, u'pir_m_hnd_knf_hollow_e', 17, 24, 41, 0, 2, 3502, 0, 0, 0, 45.5],
31101: [54, 1, 1200, 31101, u'Emerald Double Nose Spike', u'EMERALD_DOUBLE_NOSE_SPIKE', 3, 2, 0, 0, 1, 0, 0, 0, 5, u'pir_t_ico_jwl_nose_double_spike', 7, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 7, 7],
3819: [51, 2, 3660, 3819, u'Cabal Doll', u'CABAL_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_c', 0, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12603, 3, 12602, 0, 0, 1, u'pir_m_hnd_dol_mojo_c', 13, 26, 43, 0, 3, 2304, 12, 0, 0, 60.5],
3820: [51, 1, 5180, 3820, u"Tia Dalma's Doll", u'TIA_DALMA_DOLL', 5, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_c', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12603, 2, 12602, 2, 12600, 1, u'pir_m_hnd_dol_mojo_c', 13, 30, 58, 0, 3, 2304, 12, 0, 0, 72],
3821: [51, 2, 120, 3821, u'Straw Doll', u'STRAW_DOLL', 2, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_e', 0, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12605, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_e', 13, 8, 12, 0, 1, 3503, 11, 0, 0, 11],
3822: [51, 2, 360, 3822, u'Warding Doll', u'WARDING_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_e', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12605, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_e', 13, 12, 22, 0, 2, 3503, 11, 0, 0, 19],
3823: [51, 2, 630, 3823, u'Hex Watcher Doll', u'HEX_WATCHER_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_e', 0, 14, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12605, 1, 12610, 0, 0, 1, u'pir_m_hnd_dol_mojo_e', 13, 16, 26, 0, 2, 3503, 11, 0, 0, 25],
3824: [51, 2, 1230, 3824, u'Spell Binder Doll', u'SPELL_BINDER_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_e', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12605, 2, 12610, 0, 0, 1, u'pir_m_hnd_dol_mojo_e', 13, 20, 36, 0, 3, 3503, 11, 0, 0, 35],
3825: [51, 2, 1720, 3825, u'Curse Breaker Doll', u'CURSE_BREAKER_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_e', 0, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12605, 3, 12610, 0, 0, 1, u'pir_m_hnd_dol_mojo_e', 13, 24, 41, 0, 3, 3503, 11, 0, 0, 41.5],
3826: [51, 1, 3190, 3826, u"Jack Sparrow's Voodoo Doll", u'JACK_SPARROW_DOLL', 5, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_e', 0, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12605, 3, 12610, 2, 12606, 1, u'pir_m_hnd_dol_mojo_e', 13, 28, 55, 0, 3, 3503, 11, 0, 0, 56.5],
3827: [51, 2, 650, 3827, u'Hex Reflecter Doll', u'HEX_REFLECTER_DOLL', 3, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_b', 0, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 1, 12602, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_b', 13, 15, 25, 0, 1, 3506, 7, 0, 0, 25.5],
3828: [51, 2, 1940, 3828, u'Hex Rebound Doll', u'HEX_REBOUND_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_b', 0, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12602, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_b', 13, 20, 36, 0, 2, 3506, 7, 0, 0, 44],
3829: [51, 2, 3720, 3829, u'Hex Guardian Doll', u'HEX_GUARDIAN_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_b', 0, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12602, 1, 12608, 0, 0, 1, u'pir_m_hnd_dol_mojo_b', 13, 25, 42, 0, 3, 3506, 7, 0, 0, 61],
3830: [51, 1, 6080, 3830, u"Calypso's Radiance", u'CALYPSO_RADIANCE', 5, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_c', 0, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12602, 2, 12608, 0, 0, 1, u'pir_m_hnd_dol_mojo_c', 13, 30, 58, 0, 3, 3506, 12, 0, 0, 78],
3831: [51, 2, 650, 3831, u'Cotton Doll', u'COTTON_SOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_a', u'A mystical doll able to protect others from evil voodoo.', 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_a', 13, 3, 6, 0, 1, 2304, 7, 0, 0, 9],
3832: [51, 2, 1150, 3832, u'Ornate Doll', u'ORNATE_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_b', u'A fancy doll which protects others from harmful voodoo.', 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_b', 13, 8, 12, 0, 1, 2304, 7, 0, 0, 12],
3833: [51, 2, 1800, 3833, u'Enchanted Doll', u'ENCHANTED_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_c', u'Enchanted with strong hexes to ward off evil.', 11, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_c', 13, 13, 18, 0, 1, 2304, 7, 0, 0, 15],
23001: [52, 2, 1000, 23001, u'Coat_Long_BraidsandFloral', u'DINGY_LONG_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 31, u'pir_t_ico_cot_m_long', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -1, 0],
23002: [52, 2, 1000, 23002, u'Coat_Long_Embroidery', u'EMBROIDERED_LONG_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 42, u'pir_t_ico_cot_m_long', 13, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 1, -1, 0],
23003: [52, 2, 1000, 23003, u'Coat_Long_Light_Trim', u'TOURIST_LONG_COAT', 2, 3, 0, 1, 0, 0, 0, 0, 12, u'pir_t_ico_cot_m_long', 3, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 2, -1, 0],
23004: [52, 2, 1000, 23004, u'Coat_Long_Black_Trim', u'TRADER_LONG_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_cot_m_long', 6, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 3, -1, 0],
3834: [51, 2, 2590, 3834, u'Magic Doll', u'MAGIC_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_d', u'A rare doll that pulses with voodoo magic.', 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_d', 13, 18, 24, 0, 1, 2304, 7, 0, 0, 18],
23006: [52, 2, 1000, 23006, u'Coat_Long_French', u'WOOLEN_LONG_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_cot_m_long', 1, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 5, -1, 0],
23007: [52, 2, 1000, 23007, u'Coat_Long_Leather', u'LEATHERCRAFT_LONG_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_cot_m_long', 3, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 6, -1, 0],
23008: [52, 2, 3000, 23008, u'Coat_Long_Afro', u'DARKHEART_LONG_COAT', 3, 3, 1, 1, 0, 0, 0, 0, 9, u'pir_t_ico_cot_m_long', 19, 40, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 7, -1, 0],
23009: [52, 2, 1000, 23009, u'Coat_Long_Taupe', u'WEATHERED_LONG_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 48, u'pir_t_ico_cot_m_long', 1, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 8, -1, 0],
23010: [52, 2, 1000, 23010, u'Coat_Long_Brown', u'MERCHANT_LONG_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_cot_m_long', 6, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 9, -1, 0],
3835: [51, 2, 3360, 3835, u'Mysterious Doll', u'MYSTERIOUS_DOLL', 2, 3, 0, 1, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_e', u'A mysterious voodoo doll with powerful protection spells on it.', 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_dol_mojo_e', 13, 23, 29, 0, 1, 2304, 7, 0, 0, 20.5],
23012: [52, 2, 1500, 23012, u'Coat_Long_Gold_Black', u'KEELHAUL_LONG_COAT', 3, 3, 0, 1, 0, 0, 1, 0, 7, u'pir_t_ico_cot_m_long', 12, 32, 0, 7, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 11, -1, 0],
23013: [52, 1, 1000, 23013, u'Coat_Long_Green_Yellow', u'LUCKY_LONG_COAT', 2, 3, 0, 1, 0, 0, 0, 0, 44, u'pir_t_ico_cot_m_long', 3, 36, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3903, 1, -1, 12, -1, 0],
23014: [52, 1, 3000, 23014, u'Coat_Long_Red_Yellow', u'CLUB_COAT', 3, 3, 0, 1, 0, 0, 0, 0, 40, u'pir_t_ico_cot_m_long', 3, 36, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3920, 1, -1, 13, -1, 0],
23015: [52, 2, 3000, 23015, u'Coat_Long_Advanced_Outfit', u'ADVENTURE_LONG_COAT', 3, 3, 0, 0, 1, 0, 0, 0, 32, u'pir_t_ico_cot_m_long', 8, 32, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 14, -1, 0],
23016: [52, 1, 3000, 23016, u'Coat_Long_Royal', u'ROYAL_LONG_COAT', 3, 3, 0, 1, 0, 0, 0, 0, 43, u'pir_t_ico_cot_m_long', 8, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3909, 1, -1, 15, -1, 0],
23017: [52, 1, 3000, 23017, u'Coat_Long_Black_Gold', u'BLACK_GOLD_LONG_COAT', 3, 3, 0, 1, 0, 0, 0, 0, 47, u'pir_t_ico_cot_m_long', 8, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3901, 1, -1, 16, -1, 0],
23018: [52, 0, 3000, 23018, u'Coat_Long_French_Assassin', u'FRENCH_ASSASSIN_LONG_COAT', 3, 3, 0, 0, 0, 0, 0, 0, 32, u'pir_t_ico_cot_m_long', 43, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3908, 1, -1, 17, -1, 0],
23019: [52, 0, 3000, 23019, u'Coat_Long_Baron', u'BARON_LONG_COAT', 3, 3, 0, 0, 0, 0, 0, 0, 47, u'pir_t_ico_cot_m_long', 44, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3912, 1, -1, 18, -1, 0],
23020: [52, 0, 3000, 23020, u'Coat_Long_Prince', u'PRINCE_LONG_COAT', 3, 3, 0, 1, 0, 0, 0, 0, 25, u'pir_t_ico_cot_m_long', 46, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 1, -1, 19, -1, 0],
23021: [52, 0, 3000, 23021, u'Coat_Long_Rogue_Privateer', u'ROGUE_PRIVATEER_LONG_COAT', 3, 3, 0, 0, 0, 0, 0, 0, 18, u'pir_t_ico_cot_m_long', 47, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3910, 1, -1, 20, -1, 0],
23022: [52, 0, 1000, 23022, u'Coat_Long_Scrouge', u'SCOURGE_LONG_COAT', 2, 3, 0, 0, 0, 0, 0, 0, 40, u'pir_t_ico_cot_m_long', 51, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3905, 1, -1, 21, -1, 0],
23023: [52, 0, 3000, 23023, u'Coat_Long_Wildfire', u'WILDIFRE_LONG_COAT', 3, 3, 0, 1, 0, 0, 0, 0, 22, u'pir_t_ico_cot_m_long', 54, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 1, -1, 22, -1, 0],
23024: [52, 0, 1000, 23024, u'Coat_Long_Zombie_Pirate', u'ZOMBIE_PIRATE_LONG_COAT', 2, 3, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_cot_m_long', 52, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 1, -1, 23, -1, 0],
23025: [52, 0, 1000, 23025, u'Coat_Long_Zombies_Pirate', u'ZOMBIES_PIRATE_LONG_COAT', 2, 3, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_cot_m_long', 52, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3913, 1, -1, 23, -1, 0],
23101: [52, 2, 600, 23101, u'Coat_Short_Black_Stitching', u'SWABBIE_JACKET', 2, 3, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_cot_m_short', 1, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 0, -1, 0],
23102: [52, 2, 600, 23102, u'Coat_Short_Darkleather_Gold', u'MONEY_JACKET', 2, 3, 1, 1, 0, 0, 0, 0, 42, u'pir_t_ico_cot_m_short', 3, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 1, -1, 0],
23103: [52, 2, 600, 23103, u'Coat_Short_Stringties', u'COTTON_JACKET', 2, 3, 1, 1, 0, 0, 0, 0, 37, u'pir_t_ico_cot_m_short', 2, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 2, -1, 1],
23104: [52, 2, 2600, 23104, u'Coat_Short_Red_Black_Leather', u'EVENING_JACKET', 3, 3, 1, 1, 0, 0, 0, 0, 40, u'pir_t_ico_cot_m_short', 32, 29, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 3, -1, 0],
23105: [52, 2, 600, 23105, u'Coat_Short_Wool_Brown', u'SHIPWRIGHT_JACKET', 2, 3, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_cot_m_short', 6, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 4, -1, 0],
23106: [52, 2, 600, 23106, u'Coat_Short_Yellow_Black', u'BOUNCER_JACKET', 2, 3, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_cot_m_short', 1, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 5, -1, 0],
23107: [52, 2, 2600, 23107, u'Coat_Short_Purple_Black', u'WHALING_JACKET', 3, 3, 1, 1, 0, 0, 0, 0, 49, u'pir_t_ico_cot_m_short', 31, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 6, -1, 0],
23108: [52, 2, 600, 23108, u'Coat_Short_Blue_Gold', u'MATEY_JACKET', 2, 3, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_cot_m_short', 1, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 7, -1, 0],
23109: [52, 2, 600, 23109, u'Coat_Short_Black_Checkerboard', u'BLACK_CHECKERBOARD_JACKET', 2, 3, 1, 1, 0, 0, 0, 0, 32, u'pir_t_ico_cot_m_short', 1, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 8, -1, 0],
23110: [52, 2, 600, 23110, u'Coat_Short_Brown_Stripes', u'BROWN_STRIPE_JACKET', 2, 3, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_cot_m_short', 1, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 9, -1, 0],
23111: [52, 0, 3000, 23111, u'Coat_Short_Sea_Serpent', u'SEA_SERPENT_JACKET', 3, 3, 0, 0, 0, 0, 0, 0, 30, u'pir_t_ico_cot_m_short', 49, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3917, 2, -1, 10, -1, 0],
2009: [51, 2, 510, 2009, u"Swashbuckler's Pistol", u'SWASHBUCKLER_PISTOL', 2, 2, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_gun_pistol_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 2, 12201, 0, 0, 0, 0, 1, u'pir_m_hnd_gun_pistol_a', 6, 4, 13, 1, 1, 3509, 0, 0, 0, 22.5],
'columnHeadings': {u'SPECIAL_ATTACK_RANK': 39, u'SKILLBOOST_1_RANK': 27, u'AUTO_TONIC': 38, u'ITEM_ID': 3, u'ATTRIBUTE_2': 24, u'HOLIDAY': 34, u'SKILLBOOST_1': 28, u'SKILLBOOST_3': 32, u'GOLD_COST': 2, u'CONSTANT_NAME': 5, u'FEMALE_ORIENTATION_2': 40, u'POWER': 37, u'ITEM_NAME': 4, u'ATTRIBUTE_3_RANK': 25, u'ITEM_TYPE': 7, u'ITEM_SEA_INFAMY_REQ': 19, u'RATING': 41, u'SPECIAL_ATTACK': 40, u'FROM_QUEST': 10, u'FEMALE_ORIENTATION': 38, u'VERSION': 1, u'ITEM_MODEL': 34, u'SKILLBOOST_2_RANK': 29, u'ATTRIBUTE_2_RANK': 23, u'FROM_PVP': 12, u'VFX_OFFSET': 43, u'RARITY': 6, u'ATTRIBUTE_1_RANK': 21, u'ITEM_COLOR': 14, u'ITEM_NOTORIETY_REQ': 17, u'ATTRIBUTE_3': 26, u'ATTRIBUTE_1': 22, u'FROM_LOOT': 8, u'VFX_TYPE_2': 42, u'SUBTYPE': 35, u'ITEM_ICON': 15, u'STACK_LIMIT': 37, u'USE_SKILL': 36, u'BARRELS': 38, u'QUEST_REQ': 20, u'MALE_TEXTURE_ID': 37, u'SECONDARY_COLOR': 38, u'SKILLBOOST_3_RANK': 31, u'SKILLBOOST_2': 30, u'MALE_MODEL_ID': 35, u'VFX_TYPE_1': 41, u'FROM_SHOP': 9, u'CAN_DYE_ITEM': 39, u'FROM_NPC': 13, u'FROM_PROMO': 11, u'MALE_ORIENTATION': 37, u'ITEM_SUBTYPE': 35, u'FLAVOR_TEXT': 16, u'ITEM_LAND_INFAMY_REQ': 18, u'FEMALE_TEXTURE_ID': 38, u'FEMALE_MODEL_ID': 36, 'ITEM_CLASS': 0, u'MALE_ORIENTATION_2': 39, u'VELVET_ROPE': 33, u'PRIMARY_COLOR': 37, u'WEAPON_REQ': 36},
15002: [57, 2, 6, 15002, u'Remedy', u'REMEDY', 1, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_pot_tonic', 0, 3, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_inv_pot_tonic', 27, 11201, 5, 1],
15003: [57, 2, 9, 15003, u'Holy Water', u'HOLY_WATER', 1, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_pot_holyWater', 0, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_inv_pot_holyWater', 27, 11202, 5, 1],
15004: [57, 2, 15, 15004, u'Elixir', u'ELIXIR', 1, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_pot_elixir', 0, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_inv_pot_elixir', 27, 11203, 5, 1],
15005: [57, 2, 30, 15005, u'Miracle Water', u'MIRACLE_WATER', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_pot_miracleWater', 0, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_inv_pot_miracleWater', 27, 11204, 5, 1],
15006: [57, 0, 500, 15006, u'Ship Repair Kit', u'SHIP_REPAIR_KIT', 2, 11, 0, 0, 0, 0, 0, 0, 1, u'sail_come_about', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 27, 11205, 5, 0],
15007: [57, 2, 60, 15007, u'Roast Pork', u'ROAST_PORK', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_pot_porkTonic', 0, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_inv_pot_porkTonic', 27, 11206, 5, 1],
15008: [57, 2, 500, 15008, u'Cannoneer Draft I', u'POTION_CANNON_1', 1, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_cannonDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11207, 5, 0],
15009: [57, 2, 1000, 15009, u'Cannoneer Draft II', u'POTION_CANNON_2', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_cannonDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11208, 5, 0],
15010: [57, 2, 1500, 15010, u'Cannoneer Draft III', u'POTION_CANNON_3', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_cannonDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11209, 5, 0],
15011: [57, 2, 500, 15011, u'Marksman Draught I', u'POTION_PISTOL_1', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_gunDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11210, 5, 0],
15012: [57, 2, 1000, 15012, u'Marksman Draught II', u'POTION_PISTOL_2', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_gunDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11211, 5, 0],
15013: [57, 2, 1500, 15013, u'Marksman Draught III', u'POTION_PISTOL_3', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_gunDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11212, 5, 0],
15014: [57, 2, 500, 15014, u'Swashbuckler Stew I', u'POTION_CUTLASS_1', 1, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_swordDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11213, 5, 0],
15015: [57, 2, 1000, 15015, u'Swashbuckler Stew II', u'POTION_CUTLASS_2', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_swordDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11214, 5, 0],
15016: [57, 2, 1500, 15016, u'Swashbuckler Stew III', u'POTION_CUTLASS_3', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_swordDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11215, 5, 0],
15017: [57, 2, 500, 15017, u'Mystic Mixture I', u'POTION_DOLL_1', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_voodooDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11216, 5, 0],
15018: [57, 2, 1000, 15018, u'Mystic Mixture II', u'POTION_DOLL_2', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_voodooDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11217, 5, 0],
15019: [57, 2, 1500, 15019, u'Mystic Mixture III', u'POTION_DOLL_3', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_voodooDmgUp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11218, 5, 0],
15020: [57, 2, 500, 15020, u'Swift Foot I', u'POTION_SPEED_1', 1, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_haste', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11219, 5, 0],
15021: [57, 2, 1000, 15021, u'Swift Foot II', u'POTION_SPEED_2', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_haste', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11220, 5, 0],
15022: [57, 2, 1500, 15022, u'Swift Foot III', u'POTION_SPEED_3', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_haste', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11221, 5, 0],
15023: [57, 2, 1000, 15023, u'Hardy Matey I', u'POTION_REP_1', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_repBoost', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11222, 5, 0],
15024: [57, 2, 2000, 15024, u'Hardy Matey II', u'POTION_REP_2', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_repBoost', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11223, 5, 0],
15025: [57, 2, 1000, 15025, u'Plunder Potion I', u'POTION_GOLD_1', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_gold', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11224, 5, 0],
15026: [57, 2, 2000, 15026, u'Plunder Potion II', u'POTION_GOLD_2', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_gold', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11225, 5, 0],
15027: [57, 2, 1000, 15027, u'Phantom Spirits I', u'POTION_INVIS_1', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_invisibility', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11226, 5, 0],
15028: [57, 2, 2000, 15028, u'Phantom Spirits II', u'POTION_INVIS_2', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_invisibility', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11227, 5, 0],
15029: [57, 2, 500, 15029, u'Lively Bucko Brew I', u'POTION_REGEN_1', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_healthRegeneration', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11228, 5, 0],
15030: [57, 2, 1000, 15030, u'Lively Bucko Brew II', u'POTION_REGEN_2', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_healthRegeneration', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11229, 5, 0],
15031: [57, 2, 1500, 15031, u'Lively Bucko Brew III', u'POTION_REGEN_3', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_healthRegeneration', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11230, 5, 0],
15032: [57, 2, 2000, 15032, u'Lively Bucko Brew IV', u'POTION_REGEN_4', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_healthRegeneration', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11231, 5, 0],
15033: [57, 2, 500, 15033, u"Belchin' Brew", u'POTION_BURP', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_burp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11232, 5, 0],
15034: [57, 2, 500, 15034, u'Flatulent Fizz', u'POTION_FART', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_fart', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11233, 5, 0],
15035: [57, 2, 500, 15035, u'Puke Potion', u'POTION_VOMIT', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_puke', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11234, 5, 0],
15036: [57, 0, 500, 15036, u'Big Head? Is this In-Game?', u'POTION_HEADGROW', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_headIncrease', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11235, 5, 0],
15037: [57, 2, 500, 15037, u'Ghastly Visage', u'POTION_FACECOLOR', 1, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_faceColor', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11236, 5, 0],
15038: [57, 2, 500, 15038, u"Shrinkin' Grog", u'POTION_SHRINK', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_avatarDecrease', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11237, 5, 0],
15039: [57, 2, 500, 15039, u"Growin' Grog", u'POTION_GROW', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_avatarIncrease', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11238, 5, 0],
15040: [57, 2, 500, 15040, u'Addled Elixir', u'POTION_HEADONFIRE', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_headOnFire', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11239, 5, 0],
15041: [57, 1, 500, 15041, u'Stinger Stew', u'POTION_SCORPION', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_scorpionTransform', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11240, 5, 0],
15042: [57, 1, 500, 15042, u'Gator Grog', u'POTION_ALLIGATOR', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_alligatorTransform', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11241, 5, 0],
15043: [57, 1, 500, 15043, u'Crab Transform', u'POTION_CRAB', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_crabTransform', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11242, 5, 0],
15044: [57, 2, 500, 15044, u'Deadeye I', u'POTION_ACC_1', 1, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_targeted', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11243, 5, 0],
15045: [57, 2, 1000, 15045, u'Deadeye II', u'POTION_ACC_2', 2, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_targeted', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11244, 5, 0],
15046: [57, 2, 1500, 15046, u'Deadeye III', u'POTION_ACC_3', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_targeted', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11245, 5, 0],
15047: [57, 2, 500, 15047, u"Clap o'Thunder", u'POTION_GROG', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_removeGroggy', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11246, 5, 0],
15048: [57, 2, 2000, 15048, u'Reputation Booster', u'POTION_REP_3', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_repBoost', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11247, 5, 0],
15049: [57, 2, 2000, 15049, u'Super Flatulent Fizz', u'POTION_FART_2', 3, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_gui_pot_fart', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11248, 5, 0],
15050: [57, 2, 2000, 15050, u'Staff Enchant I', u'STAFF_ENCHANT_1', 3, 11, 0, 0, 1, 0, 0, 0, 1, u'pir_t_gui_pot_fart', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11249, 5, 0],
15051: [57, 2, 2000, 15051, u'Staff Enchant II', u'STAFF_ENCHANT_2', 3, 11, 0, 0, 1, 0, 0, 0, 1, u'pir_t_gui_pot_fart', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11250, 5, 0],
15052: [57, 2, 2000, 15052, u'Summon Chicken', u'POTION_SUMMON_CHICKEN', 3, 11, 0, 0, 0, 0, 0, 0, 1, u'pir_t_gui_pot_burp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11251, 5, 0],
15053: [57, 2, 2000, 15053, u"Jack's Brew", u'POTION_REP_COMP', 1, 11, 0, 0, 0, 1, 0, 0, 1, u'pir_t_gui_pot_repBoost', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11252, 5, 0],
15054: [57, 2, 2000, 15054, u'Summon Monkey', u'POTION_SUMMON_MONKEY', 3, 11, 0, 0, 0, 0, 0, 0, 1, u'pir_t_gui_pot_burp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11253, 5, 0],
15055: [57, 2, 2000, 15055, u'Summon Wasp', u'POTION_SUMMON_WASP', 3, 11, 0, 0, 0, 0, 0, 0, 1, u'pir_t_gui_pot_burp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11254, 5, 0],
15056: [57, 2, 2000, 15056, u'Summon Dog', u'POTION_SUMMON_DOG', 3, 11, 0, 0, 0, 0, 0, 0, 1, u'pir_t_gui_pot_burp', 0, 1, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'bottle_high', 31, 11255, 5, 0],
31501: [54, 2, 300, 31501, u'Golden Lip Ring', u'GOLDEN_LIP_RING', 2, 3, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_brow_lip_ring', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 1, 0],
31502: [54, 2, 250, 31502, u'Silver Lip Ring', u'SILVER_LIP_RING', 2, 3, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_brow_lip_ring', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 0],
31503: [54, 2, 250, 31503, u'Golden Lip Spike', u'GOLDEN_LIP_SPIKE', 2, 3, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_spike', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 1, 0],
31504: [54, 2, 200, 31504, u'Silver Lip Spike', u'SILVER_LIP_SPIKE', 2, 3, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_spike', 3, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 2, 0],
31505: [54, 2, 600, 31505, u'Golden Double Lip Ring', u'GOLDEN_DOUBLE_LIP_RING', 3, 3, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_double_ring', 9, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 7, 1, 1],
31506: [54, 2, 500, 31506, u'Silver Double Lip Ring', u'SILVER_DOUBLE_LIP_RING', 3, 3, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_double_ring', 9, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 7, 2, 2],
23005: [52, 2, 1000, 23005, u'Coat_Long_Fabric_Leather', u'FOREST_LONG_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 41, u'pir_t_ico_cot_m_long', 9, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 4, -1, 0],
31200: [54, 1, 55, 31200, u'Fake Moustache Nose', u'FAKE_MOUSTACHE_NOSE', 3, 2, 0, 0, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_nose_loop', 9, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 10, 0, 0],
23011: [52, 1, 3000, 23011, u'Coat_Long_Blue_Yellow', u'FLEET_LONG_COAT', 3, 3, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_cot_m_long', 3, 36, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3904, 1, -1, 10, -1, 0],
31601: [54, 1, 750, 31601, u'Ruby Lip Ring', u'RUBY_LIP_RING', 3, 3, 0, 0, 1, 0, 0, 0, 3, u'pir_t_ico_jwl_brow_lip_ring', 7, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 3, 3],
23501: [52, 2, 1000, 23501, u'Coat_Tails_Patchwork', u'PATCHWORK_RIDING_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_cot_f_long', 7, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 0, 1],
23502: [52, 2, 1000, 23502, u'Coat_Tails_2Button', u'LEATHER_RIDING_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_cot_f_long', 1, 24, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 1, 0],
23503: [52, 2, 1000, 23503, u'Coat_Tails_3Button', u'HIGHWAY_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_cot_f_long', 31, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 2, 0],
23504: [52, 2, 1000, 23504, u'Coat_Tails_Pockets', u'MILITIA_RIDING_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 49, u'pir_t_ico_cot_f_long', 5, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 3, 0],
23505: [52, 2, 1000, 23505, u'Coat_Tails_Browngold', u'GOLD_THREAD_RIDING_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 9, u'pir_t_ico_cot_f_long', 3, 32, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 4, 0],
23506: [52, 0, 1000, 23506, u'Coat_Tails_Black_White', u'IMPROVISED_RIDING_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 10, u'pir_t_ico_cot_f_long', 7, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 5, 0],
23507: [52, 1, 3000, 23507, u'Coat_Tails_Blue_White', u'VALHALLA_RIDING_COAT', 3, 3, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_cot_f_long', 23, 36, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3904, -1, 1, -1, 6, 0],
23508: [52, 0, 3000, 23508, u'Coat_Tails_Red_White', u'VICTORIAN_RIDING_COAT', 3, 3, 1, 1, 0, 0, 0, 0, 3, u'pir_t_ico_cot_f_long', 3, 32, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 7, 0],
23509: [52, 0, 3000, 23509, u'Coat_Tails_Purple', u'ROYALIST_RIDING_COAT', 3, 3, 1, 1, 0, 0, 0, 0, 36, u'pir_t_ico_cot_f_long', 32, 32, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 8, 0],
23510: [52, 2, 1000, 23510, u'Coat_Tails_Advanced_Outfit', u'ADVENTURE_RIDING_COAT', 2, 3, 0, 0, 1, 0, 0, 0, 32, u'pir_t_ico_cot_f_long', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 9, 0],
23511: [52, 1, 3000, 23511, u'Coat_Tails_Purple_Gold_Trim', u'ROYAL_RIDING_COAT_TRIM', 3, 3, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_cot_f_long', 31, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3909, -1, 1, -1, 10, 0],
23512: [52, 1, 3000, 23512, u'Coat_Tails_Advanced_Outfit_Trim', u'ADVENTURE_RIDING_COAT_TRIM', 3, 3, 0, 1, 0, 0, 0, 0, 47, u'pir_t_ico_cot_f_long', 8, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3901, -1, 1, -1, 11, 0],
23513: [52, 1, 3000, 23513, u'Coat_Tails_Red_Gold_White', u'VICTORIAN_RIDING_COAT_GOLD', 3, 3, 0, 1, 0, 0, 0, 0, 40, u'pir_t_ico_cot_f_long', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3920, -1, 1, -1, 12, 0],
23514: [52, 0, 3000, 23514, u'Coat_Frock_Rogue_Privateer', u'ROGUE_PRIVATEER_FROCK_COAT', 3, 3, 0, 0, 0, 0, 0, 0, 18, u'pir_t_ico_cot_f_long', 47, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3910, -1, 1, -1, 13, 0],
23515: [52, 0, 1000, 23515, u'Coat_Frock_Scourge', u'SCOURGE_FROCK_COAT', 2, 3, 0, 0, 0, 0, 0, 0, 40, u'pir_t_ico_cot_f_long', 51, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3905, -1, 1, -1, 14, 0],
23516: [52, 0, 3000, 23516, u'Coat_Frock_Sea_Serpent', u'SEA_SERPENT_FROCK_COAT', 3, 3, 0, 0, 0, 0, 0, 0, 30, u'pir_t_ico_cot_f_long', 49, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3917, -1, 1, -1, 15, 0],
23517: [52, 0, 1000, 23517, u'Coat_Frock_Zombie_Pirate', u'ZOMBIE_PIRATE_FROCK_COAT', 2, 3, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_cot_f_long', 52, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, -1, 1, -1, 16, 0],
23518: [52, 0, 1000, 23518, u'Coat_Frock_Zombies_Pirate', u'ZOMBIES_PIRATE_FROCK_COAT', 2, 3, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_cot_f_long', 52, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3913, -1, 1, -1, 16, 0],
23551: [52, 2, 1000, 23551, u'Coat_Frock_Crocodile', u'CROCODILE_FROCK_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 38, u'pir_t_ico_cot_f_short', 6, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 0, 0],
23552: [52, 2, 1000, 23552, u'Coat_Frock_2Button', u'WOOLEN_FROCK_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_cot_f_short', 2, 17, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 1, 0],
23553: [52, 2, 1000, 23553, u'Coat_Frock_Pockets', u'LEATHER_FROCK_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_cot_f_short', 1, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 2, 0],
23554: [52, 2, 1000, 23554, u'Coat_Frock_Stripes', u'SUEDE_FROCK_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_cot_f_short', 3, 21, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 3, 0],
23555: [52, 2, 1000, 23555, u'Coat_Frock_BlueGold', u'HARBOR_FROCK_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 16, u'pir_t_ico_cot_f_short', 1, 27, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 4, 0],
23556: [52, 0, 3000, 23556, u'Coat_Frock_Blue_Black', u'TOURIST_FROCK_COAT', 3, 3, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_cot_f_short', 3, 29, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 5, 0],
23557: [52, 0, 1000, 23557, u'Coat_Frock_Gold_Black', u'BORROWED_FROCK_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 22, u'pir_t_ico_cot_f_short', 6, 23, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 6, 0],
23558: [52, 0, 1000, 23558, u'Coat_Frock_Grey_Gold', u'TAILORED_FROCK_COAT', 2, 3, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_cot_f_short', 5, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 7, 0],
23559: [52, 2, 1500, 23559, u'Coat_Frock_Gold_Filagree', u'GOLD_FAD_FROCK_COAT', 3, 3, 0, 1, 0, 0, 1, 0, 1, u'pir_t_ico_cot_f_short', 12, 40, 0, 7, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 8, 0],
23560: [52, 0, 3000, 23560, u'Coat_Frock_French_Assassin', u'FRENCH_ASSASSIN_FROCK_COAT', 3, 3, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_cot_f_short', 43, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3908, -1, 2, -1, 9, 0],
23561: [52, 0, 3000, 23561, u'Coat_Frock_Baroness', u'BARONESS_FROCK_COAT', 3, 3, 0, 0, 0, 0, 0, 0, 47, u'pir_t_ico_cot_f_short', 45, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3912, -1, 2, -1, 10, 0],
23562: [52, 0, 1000, 23562, u'Coat_Frock_Diplomat', u'DIPLOMAT_FROCK_COAT', 2, 3, 0, 1, 0, 0, 0, 0, 7, u'pir_t_ico_cot_f_short', 60, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3919, -1, 2, -1, 11, 0],
23563: [52, 0, 3000, 23563, u'Coat_Frock_Prince', u'PRINCE_FROCK_COAT', 3, 3, 0, 1, 0, 0, 0, 0, 25, u'pir_t_ico_cot_f_short', 46, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -1, 2, -1, 12, 0],
23601: [52, 0, 3000, 23601, u'Coat_Lady_Closed_China_Warrior', u'CHINA_WARROIR_CLOSED_LADY_COAT', 3, 3, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_cot_f_long', 53, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3921, -1, 4, -1, 1, 0],
7251: [51, 1, 0, 7251, u"Maitre's Epee", u'EPEE_1', 1, 10, 0, 0, 0, 0, 0, 0, 1, u'pir_t_ico_swd_epee_a', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'pir_m_hnd_swd_epee_a', 25, 0, -3, 0, 0, 0, 0, 0, 0, -1.5],
23701: [52, 2, 0, 23701, u'Coat_NPC_Navy_Red', u'NAVY_RED_COAT', 3, 3, 0, 0, 0, 0, 0, 1, 3, u'pir_t_ico_cot_m_long', 42, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 0, 0, 0],
23702: [52, 2, 0, 23702, u'Coat_NPC_EITC_Black', u'EITC_BLACK_COAT', 3, 3, 0, 0, 0, 0, 0, 1, 16, u'pir_t_ico_cot_m_long', 42, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0],
32001: [54, 2, 200, 32001, u'Golden Band', u'GOLDEN_BAND', 2, 4, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_ring_no_gem', 17, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 1, 0],
32002: [54, 2, 200, 32002, u'Silver Band', u'SILVER_BAND', 2, 4, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_ring_no_gem', 4, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 2, 0],
32003: [54, 2, 500, 32003, u'Golden Ruby Ring', u'GOLDEN_RUBY_RING', 3, 4, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_jwl_ring_gem', 8, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 1, 3],
32004: [54, 2, 400, 32004, u'Silver Ruby Ring', u'SILVER_RUBY_RING', 3, 4, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_jwl_ring_gem', 13, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 2, 3],
32005: [54, 2, 600, 32005, u'Golden Amethist Ring', u'GOLDEN_AMETHIST_RING', 3, 4, 0, 1, 0, 0, 0, 0, 26, u'pir_t_ico_jwl_ring_gem', 8, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 1, 4],
32006: [54, 2, 500, 32006, u'Silver Amethist Ring', u'SILVER_AMETHIST_RING', 3, 4, 0, 1, 0, 0, 0, 0, 26, u'pir_t_ico_jwl_ring_gem', 8, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 2, 4],
32007: [54, 2, 500, 32007, u'Golden Sapphire Ring', u'GOLDEN_SAPPHIRE_RING', 3, 4, 0, 1, 0, 0, 0, 0, 12, u'pir_t_ico_jwl_ring_gem', 13, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 1, 5],
32008: [54, 2, 400, 32008, u'Silver Sapphire Ring', u'SILVER_SAPPHIRE_RING', 3, 4, 0, 1, 0, 0, 0, 0, 12, u'pir_t_ico_jwl_ring_gem', 8, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 2, 5],
32009: [54, 2, 400, 32009, u'Golden Turquoise Ring', u'GOLDEN_TURQUOISE_RING', 3, 4, 0, 1, 0, 0, 0, 0, 33, u'pir_t_ico_jwl_ring_gem', 13, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 1, 6],
32010: [54, 2, 300, 32010, u'Silver Turquoise Ring', u'SILVER_TURQUOISE_RING', 3, 4, 0, 1, 0, 0, 0, 0, 33, u'pir_t_ico_jwl_ring_gem', 8, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 2, 6],
32011: [54, 2, 600, 32011, u'Golden Emerald Ring', u'GOLDEN_EMERALD_RING', 3, 4, 0, 1, 0, 0, 0, 0, 13, u'pir_t_ico_jwl_ring_gem', 13, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 1, 7],
32012: [54, 2, 500, 32012, u'Silver Emerald Ring', u'SILVER_EMERALD_RING', 3, 4, 0, 1, 0, 0, 0, 0, 13, u'pir_t_ico_jwl_ring_gem', 8, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 2, 7],
32013: [54, 2, 800, 32013, u'Golden Onyx Ring', u'GOLDEN_ONYX_RING', 3, 4, 0, 1, 0, 0, 0, 0, 31, u'pir_t_ico_jwl_ring_gem', 13, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 1, 8],
32014: [54, 2, 700, 32014, u'Silver Onyx Ring', u'SILVER_ONYX_RING', 3, 4, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_jwl_ring_gem', 8, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 2, 8],
32015: [54, 2, 600, 32015, u'Golden Double Band', u'GOLDEN_DOUBLE_BAND', 3, 4, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_ring_doubleband', 16, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 11, 1, 1],
32016: [54, 2, 600, 32016, u'Silver Double Band', u'SILVER_DOUBLE_BAND', 3, 4, 0, 1, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_ring_doubleband', 8, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 11, 2, 2],
32101: [54, 1, 3000, 32101, u'Golden Knuckles', u'GOLDEN_KNUCKLES', 3, 4, 0, 0, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_knuckles', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 12, 1, 1],
32102: [54, 1, 3000, 32102, u'Silver Knuckles', u'SILVER_KNUCKLES', 3, 4, 0, 0, 0, 0, 0, 0, 37, u'pir_t_ico_jwl_knuckles', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 12, 2, 2],
24001: [52, 2, 300, 24001, u'PantsLongTuckedLeatherGoldButtonsPatch', u'LEATHER_HIGHWATERS', 1, 4, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_pnt_m_long_tucked', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 0],
24002: [52, 2, 300, 24002, u'PantsLongTuckedLeatherGoldButtons', u'COTTON_HIGHWATERS', 1, 4, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_pnt_m_long_tucked', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 1],
24003: [52, 2, 300, 24003, u'PantsLongTuckedCottonSidePocket', u'DENIM_HIGHWATERS', 1, 4, 1, 1, 0, 0, 0, 0, 24, u'pir_t_ico_pnt_m_long_tucked', 23, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 2, -1, 0],
24004: [52, 2, 300, 24004, u'PantsLongTuckedLeatherButtonFront', u'LINEN_HIGHWATERS', 1, 4, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_m_long_tucked', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 3, -1, 1],
24005: [52, 2, 700, 24005, u'PantsLongTuckedLeatherStripes', u'CIRCUS_BREECHES', 2, 4, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_pnt_m_long_tucked', 13, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 0],
24006: [52, 2, 700, 24006, u'PantsLongTuckedLeatherKnives', u'BUTTONED_BREECHES', 2, 4, 1, 1, 0, 0, 0, 0, 31, u'pir_t_ico_pnt_m_long_tucked', 1, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 5, -1, 0],
24007: [52, 1, 700, 24007, u'PantsLongTuckedBlackWithYellowTrim', u'GOLD_TRIM_BREECHES', 2, 4, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_m_long_tucked', 3, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3922, 0, -1, 6, -1, 0],
24008: [52, 1, 700, 24008, u'PantsLongTuckedBlueStripes', u'BARD_BREECHES', 2, 4, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_pnt_m_long_tucked', 2, 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3904, 0, -1, 7, -1, 0],
24009: [52, 2, 1500, 24009, u'PantsLongTuckedBrownSideButtons', u'F44_DUBLOON_BREECHES', 3, 4, 0, 1, 0, 0, 0, 0, 7, u'pir_t_ico_pnt_m_long_tucked', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 8, -1, 0],
24010: [52, 1, 700, 24010, u'PantsLongTuckedGreyGreen', u'DEPUTYS_EX_BREECHES', 2, 4, 0, 1, 0, 0, 0, 0, 44, u'pir_t_ico_pnt_m_long_tucked', 6, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3903, 0, -1, 9, -1, 0],
24011: [52, 1, 1500, 24011, u'PantsLongTuckedRedSideBones', u'FESTIVAL_BREECHES', 3, 4, 0, 1, 0, 0, 0, 0, 40, u'pir_t_ico_pnt_m_long_tucked', 3, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3920, 0, -1, 10, -1, 0],
24012: [52, 1, 1500, 24012, u'PantsLongTuckedRedYellowStripes', u'MATADOR_BREECHES', 3, 4, 0, 1, 0, 0, 0, 0, 36, u'pir_t_ico_pnt_m_long_tucked', 6, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3918, 0, -1, 11, -1, 0],
24013: [52, 2, 1500, 24013, u'PantsLongTuckedAdvancedOutfit', u'ADVENTURE_BREECHES', 3, 4, 0, 0, 1, 0, 0, 0, 43, u'pir_t_ico_pnt_m_long_tucked', 8, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 12, -1, 0],
24014: [52, 1, 1500, 24014, u'PantsLongTuckedVioletYellowStripes', u'VIOLET_YELLOW_BREECHES', 3, 4, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_pnt_m_long_tucked', 6, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3914, 0, -1, 13, -1, 0],
24015: [52, 1, 700, 24015, u'PantsLongTuckedGreyGreen', u'GREY_GREEN_BREECHES', 3, 4, 0, 1, 0, 0, 0, 0, 47, u'pir_t_ico_pnt_m_long_tucked', 6, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3901, 0, -1, 14, -1, 0],
24016: [52, 1, 1500, 24016, u'PantsLongTuckedSaint_Patricks', u'SAINT_PATRICKS_BREECHES', 3, 4, 0, 1, 0, 0, 0, 0, 41, u'pir_t_ico_pnt_m_long_tucked', 6, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, -1, 15, -1, 0],
24017: [52, 1, 700, 24017, u'PantsLongTuckedValentines', u'VALENTINES_BREECHES', 3, 4, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_m_long_tucked', 6, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 0, -1, 16, -1, 0],
24018: [52, 0, 1500, 24018, u'PantsLongTuckedFrenchAssassin', u'FRENCH_ASSASSIN_BREECHES', 3, 4, 0, 0, 0, 0, 0, 0, 8, u'pir_t_ico_pnt_m_long_tucked', 43, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3908, 0, -1, 17, -1, 0],
24019: [52, 0, 1500, 24019, u'PantsLongTuckedBaron', u'BARON_BREECHES', 3, 4, 0, 0, 0, 0, 0, 0, 32, u'pir_t_ico_pnt_m_long_tucked', 44, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3912, 0, -1, 18, -1, 0],
24020: [52, 0, 700, 24020, u'PantsLongTuckedBountyhunter', u'BOUNTYHUNTER_BREECHES', 2, 4, 0, 0, 0, 0, 0, 0, 7, u'pir_t_ico_pnt_m_long_tucked', 55, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3911, 0, -1, 19, -1, 0],
24021: [52, 0, 700, 24021, u'PantsLongTuckedChinaWarrior', u'CHINA_WARRIOR_BREECHES', 2, 4, 0, 0, 0, 0, 0, 0, 32, u'pir_t_ico_pnt_m_long_tucked', 53, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3921, 0, -1, 20, -1, 0],
24022: [52, 0, 700, 24022, u'PantsLongTuckedDiplomat', u'DIPLOMAT_BREECHES', 2, 4, 0, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_m_long_tucked', 61, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3919, 0, -1, 21, -1, 0],
24023: [52, 0, 1500, 24023, u'PantsLongTuckedPeacock', u'PEACOCK_BREECHES', 3, 4, 0, 1, 0, 0, 0, 0, 25, u'pir_t_ico_pnt_m_long_tucked', 50, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 22, -1, 0],
24024: [52, 0, 1500, 24024, u'PantsLongTuckedPrince', u'PRINCE_BREECHES', 3, 4, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_m_long_tucked', 46, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, -1, 23, -1, 0],
24025: [52, 0, 700, 24025, u'PantsLongTuckedRoguePrivateer', u'ROGUE_PRIVATEER_BREECHES', 2, 4, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_m_long_tucked', 47, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3910, 0, -1, 24, -1, 0],
24026: [52, 0, 1500, 24026, u'PantsLongTuckedSeaSerpent', u'SEA_SERPENT_BREECHES', 3, 4, 0, 0, 0, 0, 0, 0, 16, u'pir_t_ico_pnt_m_long_tucked', 49, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3917, 0, -1, 25, -1, 0],
24027: [52, 0, 700, 24027, u'PantsLongTuckedZombiePirate', u'ZOMBIE_PIRATE_BREECHES', 2, 4, 0, 1, 0, 0, 0, 0, 31, u'pir_t_ico_pnt_m_long_tucked', 52, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 0, -1, 26, -1, 0],
24028: [52, 0, 700, 24028, u'PantsLongTuckedZombiesPirate', u'ZOMBIES_PIRATE_BREECHES', 2, 4, 0, 1, 0, 0, 0, 0, 31, u'pir_t_ico_pnt_m_long_tucked', 52, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3913, 0, -1, 26, -1, 0],
24050: [52, 2, 700, 24050, u'PantsLongLoosePlain', u'COTTON_TROUSERS', 1, 4, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_pnt_m_untucked', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -1, 1],
24051: [52, 2, 700, 24051, u'PantsLongLooseCeltic', u'CELTIC_TROUSERS', 2, 4, 1, 1, 0, 0, 0, 0, 24, u'pir_t_ico_pnt_m_untucked', 1, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 1, -1, 0],
24052: [52, 2, 300, 24052, u'PantsLongLooseTwoTone', u'SAIL_TROUSERS', 1, 4, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_pnt_m_untucked', 16, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 2, -1, 0],
24053: [52, 2, 700, 24053, u'PantsLongLooseOneTone', u'LINEN_TROUSERS', 2, 4, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_pnt_m_untucked', 2, 4, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 3, -1, 1],
24054: [52, 2, 300, 24054, u'PantsLongLooseLeatherPocketTrim', u'POTATO_SACK_TROUSERS', 1, 4, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_m_untucked', 1, 6, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 4, -1, 0],
24055: [52, 2, 700, 24055, u'PantsLongLooseSuedeSkullsSnaps', u'KAKI_SKULL_SNAP_TROUSERS', 2, 4, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_m_untucked', 1, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 5, -1, 0],
24056: [52, 2, 700, 24056, u'PantsLongLooseSuedeSkullsNoCuffs', u'DENIM_SKULL_SNAP_TROUSERS', 2, 4, 1, 1, 0, 0, 0, 0, 24, u'pir_t_ico_pnt_m_untucked', 2, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 6, -1, 0],
24057: [52, 2, 700, 24057, u'PantsLongLooseSuedeSkullsNoStripe', u'FADED_SKULL_SNAP_TROUSERS', 2, 4, 1, 1, 0, 0, 0, 0, 39, u'pir_t_ico_pnt_m_untucked', 5, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 7, -1, 0],
24058: [52, 2, 300, 24058, u'PantsLongLoosePlain1', u'STOWAWAY_TROUSERS', 1, 4, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_m_untucked', 16, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 8, -1, 0],
24059: [52, 2, 700, 24059, u'PantsLongLoosePlain2', u'DENIM_TROUSERS', 2, 4, 1, 1, 0, 0, 0, 0, 24, u'pir_t_ico_pnt_m_untucked', 2, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 9, -1, 0],
24060: [52, 2, 700, 24060, u'PantsLongLooseLeatherPlain', u'SWAB_THE_DECK_TROUSERS', 2, 4, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_pnt_m_untucked', 1, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 10, -1, 0],
24061: [52, 2, 700, 24061, u'PantsLongLooseZombie', u'ZOMBIE_TROUSERS', 2, 4, 1, 1, 0, 0, 0, 0, 31, u'pir_t_ico_pnt_m_untucked', 6, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 11, -1, 0],
24062: [52, 2, 700, 24062, u'PantsLongLooseBlueGreenSash', u'ARABIAN_TROUSERS', 2, 4, 1, 1, 0, 0, 0, 0, 44, u'pir_t_ico_pnt_m_untucked', 3, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 12, -1, 0],
24063: [52, 2, 700, 24063, u'PantsLongLooseGreenBronzeSash', u'SMITHY_TROUSERS', 2, 4, 1, 1, 0, 0, 0, 0, 45, u'pir_t_ico_pnt_m_untucked', 6, 16, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 13, -1, 0],
24064: [52, 2, 700, 24064, u'PantsLongLooseBlueBasicOutfit', u'RECRUIT_TROUSERS', 2, 4, 0, 0, 1, 0, 0, 0, 44, u'pir_t_ico_pnt_m_untucked', 1, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 14, -1, 0],
24065: [52, 2, 700, 24065, u'PantsLongLooseItermediateOutfit', u'TRAVELERS_TROUSERS', 2, 4, 0, 0, 1, 0, 0, 0, 45, u'pir_t_ico_pnt_m_untucked', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 15, -1, 0],
24066: [52, 1, 1500, 24066, u'PantsLongLooseGreenEmbroidery', u'GREEN_EMBROIDERED_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_m_untucked', 6, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3909, 1, -1, 16, -1, 0],
24067: [52, 1, 1500, 24067, u'PantsLongLooseChaps', u'CHAPS_TROUSERS', 3, 4, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_pnt_m_untucked', 13, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 17, -1, 0],
24068: [52, 1, 1500, 24068, u'PantsLongLooseBrownPatches', u'BROWN_PATCHED_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 48, u'pir_t_ico_pnt_m_untucked', 7, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3916, 1, -1, 18, -1, 0],
24069: [52, 1, 1500, 24069, u'PantsLongLooseGreenSilk', u'GREEN_SILK_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 42, u'pir_t_ico_pnt_m_untucked', 12, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3915, 1, -1, 19, -1, 0],
24070: [52, 1, 1500, 24070, u'PantsLongLooseBlackGold', u'BLACK_GOLD_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 5, u'pir_t_ico_pnt_m_untucked', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3907, 1, -1, 20, -1, 0],
24071: [52, 1, 1500, 24071, u'PantsLongLooseXmas', u'XMAS_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_pnt_m_untucked', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 21, 1, -1, 21, -1, 0],
24072: [52, 1, 1500, 24072, u'PantsLongLooseMardi_Gras', u'MARDI_GRAS_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 42, u'pir_t_ico_pnt_m_untucked', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 1, -1, 22, -1, 0],
24073: [52, 1, 1500, 24073, u'PantsLongLooseBlueGoldTrim', u'BLUE_GOLD_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 4, u'pir_t_ico_pnt_m_untucked', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 23, -1, 0],
24074: [52, 1, 1500, 24074, u'PantsLongLooseTanSideStitch', u'TAN_SIDE_STRIPE_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_m_untucked', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 24, -1, 0],
24075: [52, 1, 1500, 24075, u'PantsLongLooseTanYellowTop', u'TAN_YELLOW_TOP_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_m_untucked', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 25, -1, 0],
24076: [52, 1, 1500, 24076, u'PantsLongLooseWhiteGreenStripes', u'WHITE_GREEN_STRIPE_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 17, u'pir_t_ico_pnt_m_untucked', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 26, -1, 0],
24077: [52, 1, 1500, 24077, u'PantsLongLooseWhiteSidenet', u'WHITE_SIDE_NET_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 2, u'pir_t_ico_pnt_m_untucked', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 27, -1, 0],
24078: [52, 0, 1500, 24078, u'PantsLongTuckedBarbryCorsair', u'BARBARY_CORSAIR_TROUSERS', 3, 4, 0, 0, 0, 0, 0, 0, 32, u'pir_t_ico_pnt_m_untucked', 56, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3906, 1, -1, 28, -1, 0],
24079: [52, 0, 1500, 24079, u'PantsLongTuckedScourge', u'SCOURGE_TROUSERS', 3, 4, 0, 0, 0, 0, 0, 0, 32, u'pir_t_ico_pnt_m_untucked', 51, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3905, 1, -1, 29, -1, 0],
24080: [52, 0, 1500, 24080, u'PantsLongTuckedWildfire', u'WILDFIRE_TROUSERS', 3, 4, 0, 1, 0, 0, 0, 0, 32, u'pir_t_ico_pnt_m_untucked', 54, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 1, -1, 30, -1, 0],
24101: [52, 0, 200, 24101, u'Shorts_ThreeSideButtons', u'DINGY_SHORTS', 1, 4, 1, 1, 0, 0, 0, 0, 17, u'pir_t_ico_pnt_m_shorts_half', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 0, -1, 0],
24102: [52, 0, 200, 24102, u'Shorts_ThreeTies', u'BEACH_SHORTS', 1, 4, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_m_shorts_half', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 1, -1, 0],
24103: [52, 0, 400, 24103, u'Shorts_OneButtonFlap', u'COTTON_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 2, u'pir_t_ico_pnt_m_shorts_half', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 2, -1, 1],
24104: [52, 0, 400, 24104, u'Shorts_ThreeBuckle', u'BUCKLED_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 20, u'pir_t_ico_pnt_m_shorts_half', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 3, -1, 0],
24105: [52, 2, 200, 24105, u'Shorts_BrownCloth', u'SACK_SHORTS', 1, 4, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_pnt_m_shorts_half', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, -1, 4, -1, 0],
24106: [52, 0, 400, 24106, u'Shortpants_TwoToneSash', u'SWIM_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_pnt_m_shorts_boxer', 34, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 0, -1, 0],
24107: [52, 0, 400, 24107, u'Shortpants_SidePocket', u'EXPLORER_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_m_shorts_boxer', 12, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 1, -1, 0],
24108: [52, 0, 200, 24108, u'Shortpants_SimpleCanvas', u'CANVAS_SHORTS', 1, 4, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_pnt_m_shorts_boxer', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 2, -1, 1],
24109: [52, 0, 400, 24109, u'Shortpants_SideLeather', u'BEACH_COMBER_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_pnt_m_shorts_boxer', 34, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 3, -1, 0],
24110: [52, 0, 400, 24110, u'Shortpants_BlueWhiteTop', u'OCEAN_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 4, u'pir_t_ico_pnt_m_shorts_boxer', 1, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 4, -1, 0],
24111: [52, 0, 400, 24111, u'Shortpants_BrownCloth', u'BAG_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_pnt_m_shorts_boxer', 5, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 5, -1, 0],
24112: [52, 0, 400, 24112, u'Shortpants_LightBrown', u'BOXERS', 2, 4, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_m_shorts_boxer', 1, 8, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 6, -1, 0],
24113: [52, 0, 400, 24113, u'Shortpants_Rust', u'SWAMP_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_pnt_m_shorts_boxer', 1, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 7, -1, 0],
24114: [52, 0, 400, 24114, u'Shortpants_Slate', u'BUTTONED_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 44, u'pir_t_ico_pnt_m_shorts_boxer', 2, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, -1, 8, -1, 0],
24115: [52, 1, 400, 24115, u'Shortpants_FancyLightBrown', u'FANCY_LIGHT_BROWN_SHORTS', 3, 4, 0, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_m_shorts_boxer', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3902, 3, -1, 9, -1, 0],
24513: [52, 2, 1500, 24513, u'Capris_Purple', u'PIN_CAPRIS', 3, 4, 1, 1, 0, 0, 0, 0, 47, u'pir_t_ico_pnt_f_short_pant', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 12, 0],
24811: [52, 2, 400, 24811, u'Skirt_Bartender', u'BARTENDER_SKIRT', 3, 4, 0, 0, 0, 0, 0, 1, 7, u'pir_t_ico_pnt_f_skirt', 42, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 4, -1, 0, 0],
24519: [52, 2, 300, 24519, u'Shorts_LightCloth', u'LINEN_SHORTS', 1, 4, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_f_shorts', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 2, 1],
24301: [52, 2, 0, 24301, u'Apron_Tavern', u'TAVERN_APRON', 2, 4, 0, 0, 0, 0, 0, 1, 44, u'pir_t_ico_pnt_m_shorts_boxer', 42, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 0, -1, 0],
24302: [52, 2, 0, 24302, u'Apron_Dark', u'DARK_APRON', 2, 4, 0, 0, 0, 0, 0, 1, 44, u'pir_t_ico_pnt_m_shorts_boxer', 42, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, -1, 1, -1, 0],
30001: [54, 2, 400, 30001, u'Golden Brow Spike', u'GOLDEN_BROW_SPIKE', 2, 0, 0, 1, 0, 0, 0, 0, 50, u'pir_t_ico_jwl_spike', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0],
8001: [51, 2, 30, 8001, u'Grenade Pouch', u'GRENADE_POUCH', 2, 5, 0, 1, 1, 0, 0, 0, 1, u'pir_t_ico_bom_grenade', u'Grenades are highly lethal explosives. They are effective against large crowds of enemies!', 10, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, u'grenade', 39, 0, 10, 0, 0, 0, 0, 0, 0, 5],
15001: [57, 2, 3, 15001, u'Tonic', u'TONIC', 1, 11, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_pot_tonic', 0, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, u'pir_m_inv_pot_tonic', 27, 11200, 5, 1],
24501: [52, 2, 300, 24501, u'Capris_Patchwork', u'PATCHWORK_CAPRIS', 1, 4, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_f_short_pant', 7, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 0, 0],
24502: [52, 2, 300, 24502, u'Capris_FourButtonFlap', u'TAILORED_CAPRIS', 1, 4, 1, 1, 0, 0, 0, 0, 23, u'pir_t_ico_pnt_f_short_pant', 2, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 1, 0],
24503: [52, 2, 700, 24503, u'Capris_FrontTies', u'MISFITTED_CAPRIS', 2, 4, 1, 1, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_f_short_pant', 5, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 2, 0],
24504: [52, 2, 700, 24504, u'Capris_LargeSideStripe', u'SIDE_STRIPE_CAPRIS', 2, 4, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_pnt_f_short_pant', 1, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 3, 0],
24505: [52, 2, 300, 24505, u'Capris_Stitch', u'IMPROVISED_CAPRIS', 1, 4, 1, 1, 0, 0, 0, 0, 1, u'pir_t_ico_pnt_f_short_pant', 7, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 4, 0],
24506: [52, 2, 700, 24506, u'Capris_Striped', u'CANDYSTRIPE_CAPRIS', 2, 4, 1, 1, 0, 0, 0, 0, 28, u'pir_t_ico_pnt_f_short_pant', 13, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 5, 0],
24507: [52, 2, 1500, 24507, u'Capris_Red', u'PARADE_CAPRIS', 3, 4, 1, 1, 0, 0, 0, 0, 40, u'pir_t_ico_pnt_f_short_pant', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 6, 0],
24508: [52, 2, 700, 24508, u'Capris_Blue_Gold_Buttons', u'RUNNERS_CAPRIS', 2, 4, 1, 1, 0, 0, 0, 0, 43, u'pir_t_ico_pnt_f_short_pant', 5, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 7, 0],
24509: [52, 2, 1500, 24509, u'Capris_Bright_Red', u'RED_VELVET_CAPRIS', 3, 4, 1, 1, 0, 0, 0, 0, 40, u'pir_t_ico_pnt_f_short_pant', 23, 25, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 8, 0],
24510: [52, 2, 700, 24510, u'Capris_Brown', u'CASUAL_CAPRIS', 2, 4, 1, 1, 0, 0, 0, 0, 45, u'pir_t_ico_pnt_f_short_pant', 1, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 9, 0],
24511: [52, 2, 700, 24511, u'Capris_Green_Gold_Buttons', u'PATTYS_CAPRIS', 2, 4, 1, 1, 0, 0, 0, 0, 5, u'pir_t_ico_pnt_f_short_pant', 21, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 10, 0],
24512: [52, 2, 700, 24512, u'Capris_Green_Stripes', u'COURT_CAPRIS', 2, 4, 1, 1, 0, 0, 0, 0, 43, u'pir_t_ico_pnt_f_short_pant', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 11, 0],
23200: [52, 0, 3000, 23200, u'Coat_Closed_China_Warrior', u'CHINA_WARRIOR_COAT', 3, 3, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_cot_m_long', 53, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3921, 4, -1, 1, -1, 0],
24514: [52, 2, 700, 24514, u'Capris_Blue_Basic_Outfit', u'RECRUIT_CAPRIS', 2, 4, 0, 0, 1, 0, 0, 0, 44, u'pir_t_ico_pnt_f_short_pant', 1, 15, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 13, 0],
24515: [52, 2, 1500, 24515, u'Capris_Advanced_Outfit', u'ADVENTURE_CAPRIS', 3, 4, 0, 0, 1, 0, 0, 0, 32, u'pir_t_ico_pnt_f_short_pant', 8, 35, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 14, 0],
24516: [52, 2, 700, 24516, u'Capris_Intermediate_Outfit', u'TRAVELERS_CAPRIS', 2, 4, 0, 0, 1, 0, 0, 0, 29, u'pir_t_ico_pnt_f_short_pant', 1, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, -1, 15, 0],
24517: [52, 2, 700, 24517, u'Shorts_Patchwork', u'PATCHWORK_SHORTS', 1, 4, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_pnt_f_shorts', 7, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 0, 0],
24518: [52, 2, 700, 24518, u'Shorts_FrontTie', u'TIE_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 12, u'pir_t_ico_pnt_f_shorts', 1, 0, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 1, 0],
23201: [52, 0, 3000, 23201, u'Coat_Closed_Diplomat', u'DIPLOMAT_COAT', 3, 3, 0, 1, 0, 0, 0, 0, 7, u'pir_t_ico_cot_m_long', 59, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3919, 4, -1, 2, -1, 0],
24520: [52, 2, 700, 24520, u'Shorts_SideButtons', u'FOREST_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 38, u'pir_t_ico_pnt_f_shorts', 9, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 3, 0],
24521: [52, 2, 700, 24521, u'Shorts_SideTies', u'SIDELACE_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 39, u'pir_t_ico_pnt_f_shorts', 3, 5, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 4, 0],
24522: [52, 2, 700, 24522, u'Shorts_Zombie', u'ZOMBIE_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 45, u'pir_t_ico_pnt_f_shorts', 6, 13, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 5, 0],
24523: [52, 0, 700, 24523, u'Shorts_Green_Sidebutton', u'JUNGLE_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 45, u'pir_t_ico_pnt_f_shorts', 9, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 6, 0],
24524: [52, 1, 700, 24524, u'Shorts_Blue_Stripes', u'LAGOON_SHORTS', 2, 4, 0, 1, 0, 0, 0, 0, 16, u'pir_t_ico_pnt_f_shorts', 1, 12, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3904, -1, 1, -1, 7, 0],
24525: [52, 0, 700, 24525, u'Shorts_BrownSilver', u'CASUAL_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 7, u'pir_t_ico_pnt_f_shorts', 2, 7, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 8, 0],
24526: [52, 0, 700, 24526, u'Shorts_PinkGold', u'OLD_ROYAL_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 36, u'pir_t_ico_pnt_f_shorts', 3, 18, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 9, 0],
24527: [52, 1, 1500, 24527, u'Shorts_PurpleGold', u'EMPRESS_SHORTS', 3, 4, 0, 1, 0, 0, 0, 0, 47, u'pir_t_ico_pnt_f_shorts', 8, 22, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3901, -1, 1, -1, 10, 0],
24528: [52, 0, 700, 24528, u'Shorts_RedBlack', u'MIDNIGHT_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 32, u'pir_t_ico_pnt_f_shorts', 23, 26, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 11, 0],
24529: [52, 0, 700, 24529, u'Shorts_RedGold', u'CHERRY_SHORTS', 2, 4, 1, 1, 0, 0, 0, 0, 3, u'pir_t_ico_pnt_f_shorts', 23, 28, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 12, 0],
24530: [52, 1, 700, 24530, u'Shorts_FancyBrownSilver', u'BROWN_SILVER_SHORTS', 3, 4, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_f_shorts', 2, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3920, -1, 1, -1, 13, 0],
24531: [52, 1, 700, 24531, u'Shorts_Green_Purple', u'GREEN_PURPLE_SHORTS', 3, 4, 0, 1, 0, 0, 0, 0, 48, u'pir_t_ico_pnt_f_shorts', 3, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3916, -1, 1, -1, 14, 0],
24532: [52, 1, 700, 24532, u'Shorts_Green_Embroidery', u'GREEN_EMBROIDERY_SHORTS', 3, 4, 1, 1, 0, 0, 0, 0, 8, u'pir_t_ico_pnt_f_shorts', 9, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 1, -1, 15, 0],
24533: [52, 1, 700, 24533, u'Shorts_PinkGoldTrim', u'PINK_GOLD_TRIM_SHORTS', 3, 4, 0, 1, 0, 0, 0, 0, 7, u'pir_t_ico_pnt_f_shorts', 23, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3914, -1, 1, -1, 16, 0],
24534: [52, 1, 700, 24534, u'Shorts_BrownSilverButton', u'BROWN_SILVER_BUTTON_SHORTS', 3, 4, 0, 1, 0, 0, 0, 0, 36, u'pir_t_ico_pnt_f_shorts', 9, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3918, -1, 1, -1, 17, 0],
24535: [52, 1, 700, 24535, u'Shorts_RedSilk', u'RED_SILK_SHORTS', 3, 4, 0, 1, 0, 0, 0, 0, 3, u'pir_t_ico_pnt_f_shorts', 23, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3907, -1, 1, -1, 18, 0],
24536: [52, 1, 1500, 24536, u'Capris_GoldTrim', u'GOLD_TRIM_CAPRIS', 3, 4, 0, 1, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_f_short_pant', 8, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3909, -1, 0, -1, 16, 0],
24537: [52, 1, 700, 24537, u'Shorts_Mardi_Gras', u'MARDI_GRAS_SHORTS', 3, 4, 0, 1, 0, 0, 0, 0, 42, u'pir_t_ico_pnt_f_shorts', 23, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, -1, 1, -1, 19, 0],
24538: [52, 0, 700, 24538, u'Shorts_Bountyhunter', u'BOUNTYHUNTER_SHORTS', 2, 4, 0, 0, 0, 0, 0, 0, 31, u'pir_t_ico_pnt_f_shorts', 55, 20, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3911, -1, 1, -1, 20, 0],
24539: [52, 0, 1500, 24539, u'Shorts_Barbary_Corsair', u'BARBARY_CORSAIR_SHORTS', 3, 4, 0, 0, 0, 0, 0, 0, 29, u'pir_t_ico_pnt_f_shorts', 56, 30, 0, 0, '', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3906, -1, 1, -1, 21, 0],
3818: [51, 2, 2260, 3818, u'Shaman Doll', u'SHAMAN_DOLL', 4, 3, 1, 0, 0, 0, 0, 0, 1, u'pir_t_ico_dol_mojo_c', 0, 19, 0, 0, '', 0, 0, 0, 0, 0, 0, 3, 12603, 2, 12602, 0, 0, 1, u'pir_m_hnd_dol_mojo_c', 13, 21, 37, 0, 2, 2304, 12, 0, 0, 47.5]}
| 244.944444
| 1,297
| 0.541794
| 102,611
| 436,491
| 2.132023
| 0.040873
| 0.274536
| 0.336149
| 0.369504
| 0.618049
| 0.592063
| 0.564751
| 0.555783
| 0.545347
| 0.528814
| 0
| 0.298952
| 0.23082
| 436,491
| 1,782
| 1,298
| 244.944444
| 0.352651
| 0
| 0
| 0
| 0
| 0
| 0.302867
| 0.151606
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.000561
| 0.000561
| 0
| 0.000561
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a293ad273a91af2bcc9145337c179a25014f57f9
| 3,197
|
py
|
Python
|
tests/flow/test_ts_delete.py
|
kirylkrauchuk/redistimeseries
|
761cd6ec3920ac84ef5fe279df462c797dae758b
|
[
"Ruby",
"BSD-3-Clause",
"MIT"
] | null | null | null |
tests/flow/test_ts_delete.py
|
kirylkrauchuk/redistimeseries
|
761cd6ec3920ac84ef5fe279df462c797dae758b
|
[
"Ruby",
"BSD-3-Clause",
"MIT"
] | null | null | null |
tests/flow/test_ts_delete.py
|
kirylkrauchuk/redistimeseries
|
761cd6ec3920ac84ef5fe279df462c797dae758b
|
[
"Ruby",
"BSD-3-Clause",
"MIT"
] | null | null | null |
from RLTest import Env
def test_ts_del_uncompressed():
# total samples = 101
sample_len = 101
with Env().getClusterConnectionIfNeeded() as r:
r.execute_command("ts.create", 'test_key', 'uncompressed')
for i in range(sample_len):
assert i == r.execute_command("ts.add", 'test_key', i, '1')
res = r.execute_command('ts.range', 'test_key', 0, 100)
i = 0
for sample in res:
assert sample == [i, '1'.encode('ascii')]
i += 1
r.execute_command('ts.del', 'test_key', 0, 100)
res = r.execute_command('ts.range', 'test_key', 0, 100)
assert len(res) == 0
def test_ts_del_uncompressed_in_range():
sample_len = 101
with Env().getClusterConnectionIfNeeded() as r:
r.execute_command("ts.create", 'test_key', 'uncompressed')
for i in range(sample_len):
assert i == r.execute_command("ts.add", 'test_key', i, '1')
res = r.execute_command('ts.range', 'test_key', 0, 100)
i = 0
for sample in res:
assert sample == [i, '1'.encode('ascii')]
i += 1
# delete 11 samples
r.execute_command('ts.del', 'test_key', 50, 60)
res = r.execute_command('ts.range', 'test_key', 0, 100)
assert len(res) == 90
def test_ts_del_compressed():
sample_len = 101
with Env().getClusterConnectionIfNeeded() as r:
r.execute_command("ts.create", 'test_key')
for i in range(sample_len):
assert i == r.execute_command("ts.add", 'test_key', i, '1')
res = r.execute_command('ts.range', 'test_key', 0, 100)
i = 0
for sample in res:
assert sample == [i, '1'.encode('ascii')]
i += 1
r.execute_command('ts.del', 'test_key', 0, 100)
res = r.execute_command('ts.range', 'test_key', 0, 100)
assert len(res) == 0
def test_ts_del_compressed_multi_chunk():
sample_len = 1001
with Env().getClusterConnectionIfNeeded() as r:
r.execute_command("ts.create", 'test_key')
for i in range(sample_len):
assert i == r.execute_command("ts.add", 'test_key', i, '1')
res = r.execute_command('ts.range', 'test_key', 0, sample_len - 1)
i = 0
for sample in res:
assert sample == [i, '1'.encode('ascii')]
i += 1
r.execute_command('ts.del', 'test_key', 0, 999)
res = r.execute_command('ts.range', 'test_key', 0, sample_len - 1)
assert len(res) == 1
def test_ts_del_compressed_out_range():
sample_len = 101
with Env().getClusterConnectionIfNeeded() as r:
r.execute_command("ts.create", 'test_key')
for i in range(sample_len):
assert i + 100 == r.execute_command("ts.add", 'test_key', i + 100, '1')
res = r.execute_command('ts.range', 'test_key', 0 + 100, sample_len + 100 - 1)
i = 0
for sample in res:
assert sample == [i + 100, '1'.encode('ascii')]
i += 1
r.execute_command('ts.del', 'test_key', 0, 500)
res = r.execute_command('ts.range', 'test_key', 0 + 100, sample_len + 100 - 1)
assert len(res) == 0
| 34.010638
| 86
| 0.570222
| 453
| 3,197
| 3.836645
| 0.101545
| 0.115075
| 0.215765
| 0.244534
| 0.928654
| 0.87687
| 0.87687
| 0.861335
| 0.845224
| 0.845224
| 0
| 0.052106
| 0.279637
| 3,197
| 93
| 87
| 34.376344
| 0.702562
| 0.011573
| 0
| 0.802817
| 0
| 0
| 0.14064
| 0
| 0
| 0
| 0
| 0
| 0.211268
| 1
| 0.070423
| false
| 0
| 0.014085
| 0
| 0.084507
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a2975ca4c4cb049daefcb0cf82c2a970dcf34551
| 3,984
|
py
|
Python
|
git.py
|
amriteshbhaskar/PyProxy
|
576afc06a67af88f2f665a5185ce307687434cc4
|
[
"MIT"
] | 6
|
2018-11-13T16:30:07.000Z
|
2019-02-08T16:04:01.000Z
|
git.py
|
amriteshbhaskar/PyProxy
|
576afc06a67af88f2f665a5185ce307687434cc4
|
[
"MIT"
] | 1
|
2019-02-08T08:10:06.000Z
|
2019-07-14T11:48:22.000Z
|
git.py
|
amriteshbhaskar/PyProxy
|
576afc06a67af88f2f665a5185ce307687434cc4
|
[
"MIT"
] | 1
|
2019-02-08T07:16:28.000Z
|
2019-02-08T07:16:28.000Z
|
#!/usr/bin/python3
import os
def set_proxy(host_ip, host_port, auth_choice, pc_username, username, password):
result = os.system('which git > /dev/null')
if result == 0 :
if pc_username == 'root' :
file_name = '/root/.gitconfig'
file_temp = open(file_name + '_temp', 'w+')
if os.path.exists(file_name):
file_ob = open(file_name, 'r')
else:
file_ob = open(file_name, 'w+')
for line in file_ob.readlines():
if 'http' in line or 'https' in line or 'proxy' in line or line.isspace():
continue
else:
file_temp.write(line)
if auth_choice == 'yes':
file_temp.write('\n[http]\n\tproxy = http://'+ username + ':'+ password + '@' + host_ip +':' + host_port + '\n')
file_temp.write('[https]\n\tproxy=http://'+ username + ':'+ password + '@' + host_ip + ':' + host_port + '\n' )
elif auth_choice == 'no':
file_temp.write('\n[http]\n\tproxy = http://' + host_ip +':' + host_port + '\n')
file_temp.write('[https]\n\tproxy=http://' + host_ip + ':' + host_port + '\n' )
file_ob.close()
file_temp.close()
os.remove(file_name)
os.rename(file_name +'_temp', file_name)
else:
file_name = '/home/'+ pc_username + '/.gitconfig'
file_temp = open(file_name + '_temp', 'w+')
if os.path.exists(file_name):
file_ob = open(file_name, 'r')
else:
file_ob = open(file_name, 'w+')
for line in file_ob.readlines():
if 'http' in line or 'https' in line or 'proxy' in line or line.isspace():
continue
else:
file_temp.write(line)
if auth_choice == 'yes':
file_temp.write('\n[http]\n\tproxy = http://'+ username + ':'+ password + '@' + host_ip +':' + host_port + '\n')
file_temp.write('[https]\n\tproxy=http://'+ username + ':'+ password + '@' + host_ip + ':' + host_port + '\n' )
elif auth_choice == 'no':
file_temp.write('\n[http]\n\tproxy = http://' + host_ip +':' + host_port + '\n')
file_temp.write('[https]\n\tproxy=http://' + host_ip + ':' + host_port + '\n' )
file_ob.close()
file_temp.close()
os.remove(file_name)
os.rename(file_name +'_temp', file_name)
def unset_proxy(pc_username):
result = os.system('which git > /dev/null')
if result == 0 :
if pc_username == 'root' :
file_name = '/root/.gitconfig'
file_temp = open(file_name + '_temp', 'w+')
if os.path.exists(file_name):
file_ob = open(file_name, 'r')
else:
file_ob = open(file_name, 'w+')
for line in file_ob.readlines() :
if 'http' in line or 'https' in line or 'proxy' in line:
continue
else:
file_temp.write(line)
file_ob.close()
file_temp.close()
os.remove(file_name)
os.rename(file_name +'_temp', file_name)
else:
file_name = '/home/'+ pc_username + '/.gitconfig'
file_temp = open(file_name + '_temp', 'w+')
if os.path.exists(file_name):
file_ob = open(file_name, 'r')
else:
file_ob = open(file_name, 'w+')
for line in file_ob.readlines():
if 'http' in line or 'https' in line or 'proxy' in line:
continue
else:
file_temp.write(line)
file_ob.close()
file_temp.close()
os.remove(file_name)
os.rename(file_name +'_temp', file_name)
| 43.304348
| 129
| 0.483936
| 471
| 3,984
| 3.866242
| 0.118896
| 0.140582
| 0.079077
| 0.069193
| 0.942339
| 0.942339
| 0.942339
| 0.942339
| 0.942339
| 0.942339
| 0
| 0.001196
| 0.370482
| 3,984
| 92
| 130
| 43.304348
| 0.72488
| 0.004267
| 0
| 0.963855
| 0
| 0
| 0.120494
| 0.0242
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024096
| false
| 0.060241
| 0.012048
| 0
| 0.036145
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
a2efcca38a09955fda95b792cba6417ce513a728
| 7,917
|
py
|
Python
|
f5/bigip/tm/sys/test/functional/test_sshd.py
|
nghia-tran/f5-common-python
|
acb23a6e5830a119b460c19a578654113419f5c3
|
[
"Apache-2.0"
] | 272
|
2016-02-23T06:05:44.000Z
|
2022-02-20T02:09:32.000Z
|
f5/bigip/tm/sys/test/functional/test_sshd.py
|
nghia-tran/f5-common-python
|
acb23a6e5830a119b460c19a578654113419f5c3
|
[
"Apache-2.0"
] | 1,103
|
2016-02-11T17:48:03.000Z
|
2022-02-15T17:13:37.000Z
|
f5/bigip/tm/sys/test/functional/test_sshd.py
|
nghia-tran/f5-common-python
|
acb23a6e5830a119b460c19a578654113419f5c3
|
[
"Apache-2.0"
] | 167
|
2016-02-11T17:48:21.000Z
|
2022-01-17T20:13:05.000Z
|
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
V11_SUPPORTED = ['11.5.4', '11.6.0', '11.6.1', '11.6.2']
V12_SUPPORTED = ['12.0.0', '12.1.0']
def setup_sshd_test(request, mgmt_root):
def teardown():
d.allow = ['ALL']
d.banner = 'disabled'
d.bannerText = ''
d.inactivityTimeout = 0
d.logLevel = 'info'
d.login = 'enabled'
if pytest.config.getoption('--release') in V12_SUPPORTED:
d.port = 22
d.update()
request.addfinalizer(teardown)
d = mgmt_root.tm.sys.sshd.load()
return d
@pytest.mark.skipif(pytest.config.getoption('--release') not in V11_SUPPORTED,
reason='Needs v11 TMOS to pass')
class TestSshd11(object):
def test_load(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
assert ssh1.allow == ssh2.allow
assert ssh1.banner == ssh2.banner
assert ssh1.inactivityTimeout == ssh2.inactivityTimeout
assert ssh1.logLevel == ssh2.logLevel
assert ssh1.login == ssh2.login
def test_update_allow(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.allow = ['192.168.1.1']
ssh1.update()
assert ['192.168.1.1'] == ssh1.allow
assert ['192.168.1.1'] != ssh2.allow
# Refresh
ssh2.refresh()
assert ['192.168.1.1'] == ssh2.allow
def test_update_banner(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
banners = ['enabled', 'disabled']
for banner in banners:
ssh1.banner = banner
ssh1.update()
assert banner == ssh1.banner
assert banner != ssh2.banner
# Refresh
ssh2.refresh()
assert banner == ssh2.banner
def test_update_bannerText(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.bannerText = 'foo banner'
ssh1.update()
assert 'foo banner' == ssh1.bannerText
assert not hasattr(ssh2, 'bannerText')
# Refresh
ssh2.refresh()
assert 'foo banner' == ssh2.bannerText
def test_update_inactivityTimeout(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.inactivityTimeout = 10
ssh1.update()
assert 10 == ssh1.inactivityTimeout
assert 10 != ssh2.inactivityTimeout
# Refresh
ssh2.refresh()
assert 10 == ssh2.inactivityTimeout
def test_update_logLevel(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
levels = ['debug', 'debug1', 'debug2', 'debug3', 'error', 'fatal',
'info', 'quiet', 'verbose']
for level in levels:
ssh1.logLevel = level
ssh1.update()
assert level == ssh1.logLevel
assert level != ssh2.logLevel
# Refresh
ssh2.refresh()
assert level == ssh2.logLevel
def test_update_login(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
logins = ['disabled', 'enabled']
for login in logins:
ssh1.login = login
ssh1.update()
assert login == ssh1.login
assert login != ssh2.login
# Refresh
ssh2.refresh()
assert login == ssh2.login
@pytest.mark.skipif(pytest.config.getoption('--release') not in V12_SUPPORTED,
reason='Needs v12 TMOS to pass')
class TestSshd12(object):
def test_load(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
assert ssh1.allow == ssh2.allow
assert ssh1.banner == ssh2.banner
assert ssh1.inactivityTimeout == ssh2.inactivityTimeout
assert ssh1.logLevel == ssh2.logLevel
assert ssh1.login == ssh2.login
assert ssh1.port == ssh2.port
def test_update_allow(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.allow = ['192.168.1.1']
ssh1.update()
assert ['192.168.1.1'] == ssh1.allow
assert ['192.168.1.1'] != ssh2.allow
# Refresh
ssh2.refresh()
assert ['192.168.1.1'] == ssh2.allow
def test_update_banner(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
banners = ['enabled', 'disabled']
for banner in banners:
ssh1.banner = banner
ssh1.update()
assert banner == ssh1.banner
assert banner != ssh2.banner
# Refresh
ssh2.refresh()
assert banner == ssh2.banner
def test_update_bannerText(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.bannerText = 'foo banner'
ssh1.update()
assert 'foo banner' == ssh1.bannerText
assert not hasattr(ssh2, 'bannerText')
# Refresh
ssh2.refresh()
assert 'foo banner' == ssh2.bannerText
def test_update_inactivityTimeout(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.inactivityTimeout = 10
ssh1.update()
assert 10 == ssh1.inactivityTimeout
assert 10 != ssh2.inactivityTimeout
# Refresh
ssh2.refresh()
assert 10 == ssh2.inactivityTimeout
def test_update_logLevel(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
levels = ['debug', 'debug1', 'debug2', 'debug3', 'error', 'fatal',
'info', 'quiet', 'verbose']
for level in levels:
ssh1.logLevel = level
ssh1.update()
assert level == ssh1.logLevel
assert level != ssh2.logLevel
# Refresh
ssh2.refresh()
assert level == ssh2.logLevel
def test_update_login(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
logins = ['disabled', 'enabled']
for login in logins:
ssh1.login = login
ssh1.update()
assert login == ssh1.login
assert login != ssh2.login
# Refresh
ssh2.refresh()
assert login == ssh2.login
def test_update_port(self, request, mgmt_root):
ssh1 = setup_sshd_test(request, mgmt_root)
ssh2 = setup_sshd_test(request, mgmt_root)
ssh1.port = 1234
ssh1.update()
assert 1234 == ssh1.port
assert 1234 != ssh2.port
# Refresh
ssh2.refresh()
assert 1234 == ssh2.port
| 30.925781
| 78
| 0.604269
| 937
| 7,917
| 4.954109
| 0.148346
| 0.081
| 0.148643
| 0.133563
| 0.800086
| 0.800086
| 0.791254
| 0.791254
| 0.791254
| 0.770142
| 0
| 0.051135
| 0.293546
| 7,917
| 255
| 79
| 31.047059
| 0.778831
| 0.083996
| 0
| 0.830409
| 0
| 0
| 0.062985
| 0
| 0
| 0
| 0
| 0
| 0.292398
| 1
| 0.099415
| false
| 0.011696
| 0.005848
| 0
| 0.122807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a2fb358dca530bfbfe18c28092d303af4feccf04
| 1,948
|
py
|
Python
|
stage/configuration/test_whole_file_transformer_processor.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | null | null | null |
stage/configuration/test_whole_file_transformer_processor.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | 1
|
2019-04-24T11:06:38.000Z
|
2019-04-24T11:06:38.000Z
|
stage/configuration/test_whole_file_transformer_processor.py
|
anubandhan/datacollector-tests
|
301c024c66d68353735256b262b681dd05ba16cc
|
[
"Apache-2.0"
] | 2
|
2019-05-24T06:34:37.000Z
|
2020-03-30T11:48:18.000Z
|
import pytest
from streamsets.testframework.decorators import stub
@stub
def test_buffer_size_in_bytes(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_compression_codec(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_data_time_zone(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_dictionary_page_size(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_files_prefix(sdc_builder, sdc_executor):
pass
@stub
def test_files_suffix(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_job_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_max_padding_size(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'on_record_error': 'DISCARD'},
{'on_record_error': 'STOP_PIPELINE'},
{'on_record_error': 'TO_ERROR'}])
def test_on_record_error(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_page_size(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_preconditions(sdc_builder, sdc_executor):
pass
@stub
def test_rate_per_second(sdc_builder, sdc_executor):
pass
@stub
def test_required_fields(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'job_type': 'AVRO_PARQUET'}])
def test_row_group_size(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_temporary_file_directory(sdc_builder, sdc_executor):
pass
| 21.88764
| 83
| 0.735113
| 253
| 1,948
| 5.256917
| 0.217391
| 0.078947
| 0.146617
| 0.236842
| 0.767669
| 0.741353
| 0.741353
| 0.741353
| 0.66015
| 0.657143
| 0
| 0
| 0.152464
| 1,948
| 88
| 84
| 22.136364
| 0.805572
| 0
| 0
| 0.642857
| 0
| 0
| 0.156651
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.267857
| false
| 0.267857
| 0.035714
| 0
| 0.303571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
0c10101c55bd359171fef25c27ba44020f6c537a
| 105
|
py
|
Python
|
app/routes/home/__init__.py
|
ygorazambuja/covid-scrapper
|
6c714a47c00773fe0391d040dcfed57258d98c07
|
[
"MIT"
] | 7
|
2020-03-28T03:50:07.000Z
|
2020-09-30T02:49:10.000Z
|
app/routes/home/__init__.py
|
ygorazambuja/covid-scrapper
|
6c714a47c00773fe0391d040dcfed57258d98c07
|
[
"MIT"
] | 2
|
2020-03-28T03:27:18.000Z
|
2020-11-03T01:37:39.000Z
|
app/routes/home/__init__.py
|
ygorazambuja/covid-scrapper
|
6c714a47c00773fe0391d040dcfed57258d98c07
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
home_blueprint = Blueprint('home_blueprint', __name__)
from . import views
| 17.5
| 54
| 0.8
| 13
| 105
| 6
| 0.538462
| 0.333333
| 0.564103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 105
| 5
| 55
| 21
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
0c13b039839890e59396c0741741582990554409
| 550
|
py
|
Python
|
eval_ricord1a_timm-regnetx_002_GridDistortion.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_ricord1a_timm-regnetx_002_GridDistortion.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_ricord1a_timm-regnetx_002_GridDistortion.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_ricord1a_unetplusplus_timm-regnetx_002_0_GridDistortion.yml",
"python main.py --configs configs/eval_ricord1a_unetplusplus_timm-regnetx_002_1_GridDistortion.yml",
"python main.py --configs configs/eval_ricord1a_unetplusplus_timm-regnetx_002_2_GridDistortion.yml",
"python main.py --configs configs/eval_ricord1a_unetplusplus_timm-regnetx_002_3_GridDistortion.yml",
"python main.py --configs configs/eval_ricord1a_unetplusplus_timm-regnetx_002_4_GridDistortion.yml",
]
for l in ls:
os.system(l)
| 50
| 104
| 0.849091
| 80
| 550
| 5.4625
| 0.3
| 0.114416
| 0.1373
| 0.217391
| 0.887872
| 0.887872
| 0.887872
| 0.887872
| 0.887872
| 0.887872
| 0
| 0.04817
| 0.056364
| 550
| 11
| 105
| 50
| 0.793834
| 0
| 0
| 0
| 0
| 0
| 0.880218
| 0.653358
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0c50fe5848023222200aae75863d1784ac4c0c43
| 129
|
py
|
Python
|
www/src/Lib/zlib.py
|
stefanhoelzl/brython
|
433d272e7bb0e3c0994392f8f265bc39e87854f7
|
[
"BSD-3-Clause"
] | 52
|
2016-08-08T15:08:19.000Z
|
2022-03-23T09:48:53.000Z
|
www/src/Lib/zlib.py
|
SungBeom/BBAM_Brython
|
107036ad20a94af1d43e5ce5bd7c73e6a470d687
|
[
"BSD-3-Clause"
] | 6
|
2016-10-09T19:50:49.000Z
|
2019-08-17T15:34:21.000Z
|
www/src/Lib/zlib.py
|
SungBeom/BBAM_Brython
|
107036ad20a94af1d43e5ce5bd7c73e6a470d687
|
[
"BSD-3-Clause"
] | 15
|
2017-02-03T03:08:57.000Z
|
2021-08-04T06:11:15.000Z
|
#fix me.. but for now lets just pass the data back..
def compress(data):
return data
def decompress(data):
return data
| 16.125
| 52
| 0.689922
| 21
| 129
| 4.238095
| 0.714286
| 0.224719
| 0.314607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.224806
| 129
| 7
| 53
| 18.428571
| 0.89
| 0.395349
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
0c54285841faa7cb2e81a05237bc25df210bdb6b
| 6,620
|
py
|
Python
|
pyaz/cognitiveservices/account/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/cognitiveservices/account/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/cognitiveservices/account/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
'''
Manage Azure Cognitive Services accounts.
'''
from ... pyaz_utils import _call_az
from . import commitment_plan, deployment, identity, keys, network_rule
def create(kind, location, name, resource_group, sku, api_properties=None, assign_identity=None, custom_domain=None, encryption=None, storage=None, tags=None, yes=None):
'''
Manage Azure Cognitive Services accounts.
Required Parameters:
- kind -- the API name of cognitive services account
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- name -- cognitive service account name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- sku -- Name of the Sku of cognitive services account
Optional Parameters:
- api_properties -- Api properties in JSON format or a=b c=d format. Some cognitive services (i.e. QnA Maker) require extra api properties to create the account.
- assign_identity -- Generate and assign an Azure Active Directory Identity for this account.
- custom_domain -- User domain assigned to the account. Name is the CNAME source.
- encryption -- The encryption properties for this resource, in JSON format.
- storage -- The storage accounts for this resource, in JSON array format.
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
- yes -- Do not prompt for terms confirmation
'''
return _call_az("az cognitiveservices account create", locals())
def delete(name, resource_group):
'''
Manage Azure Cognitive Services accounts.
Required Parameters:
- name -- cognitive service account name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az cognitiveservices account delete", locals())
def show(name, resource_group):
'''
Manage Azure Cognitive Services accounts.
Required Parameters:
- name -- cognitive service account name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az cognitiveservices account show", locals())
def update(name, resource_group, api_properties=None, custom_domain=None, encryption=None, sku=None, storage=None, tags=None):
'''
Manage Azure Cognitive Services accounts.
Required Parameters:
- name -- cognitive service account name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- api_properties -- Api properties in JSON format or a=b c=d format. Some cognitive services (i.e. QnA Maker) require extra api properties to create the account.
- custom_domain -- User domain assigned to the account. Name is the CNAME source.
- encryption -- The encryption properties for this resource, in JSON format.
- sku -- Name of the Sku of cognitive services account
- storage -- The storage accounts for this resource, in JSON array format.
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
'''
return _call_az("az cognitiveservices account update", locals())
def list(resource_group=None):
'''
Manage Azure Cognitive Services accounts.
Optional Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az cognitiveservices account list", locals())
def show_deleted(location, name, resource_group):
'''
Show a soft-deleted Azure Cognitive Services account.
Required Parameters:
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- name -- cognitive service account name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az cognitiveservices account show-deleted", locals())
def list_deleted():
'''
List soft-deleted Azure Cognitive Services accounts.
'''
return _call_az("az cognitiveservices account list-deleted", locals())
def purge(location, name, resource_group):
'''
Purge a soft-deleted Azure Cognitive Services account.
Required Parameters:
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- name -- cognitive service account name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az cognitiveservices account purge", locals())
def recover(location, name, resource_group):
'''
Recover a soft-deleted Azure Cognitive Services account.
Required Parameters:
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- name -- cognitive service account name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az cognitiveservices account recover", locals())
def list_skus(kind=None, location=None, name=None, resource_group=None):
'''
Manage Azure Cognitive Services accounts.
Optional Parameters:
- kind -- the API name of cognitive services account
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- name -- cognitive service account name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az cognitiveservices account list-skus", locals())
def list_usage(name, resource_group):
'''
Required Parameters:
- name -- cognitive service account name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az cognitiveservices account list-usage", locals())
def list_kinds():
return _call_az("az cognitiveservices account list-kinds", locals())
| 43.552632
| 169
| 0.71994
| 837
| 6,620
| 5.617682
| 0.126643
| 0.082943
| 0.061463
| 0.057422
| 0.854743
| 0.831986
| 0.801361
| 0.783496
| 0.772437
| 0.754998
| 0
| 0
| 0.187462
| 6,620
| 151
| 170
| 43.84106
| 0.87414
| 0.682779
| 0
| 0
| 0
| 0
| 0.255828
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.461538
| false
| 0
| 0.076923
| 0.038462
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a74aeb3bc0e73cde2e1a6ce31c0f5616db3024d0
| 3,268
|
py
|
Python
|
test/test_dephead.py
|
ulf1/nlptasks
|
07d36448b517a18f76088f5d9cfb853e7602b079
|
[
"Apache-2.0"
] | 2
|
2020-12-30T13:11:09.000Z
|
2021-11-04T19:40:31.000Z
|
test/test_dephead.py
|
ulf1/nlptasks
|
07d36448b517a18f76088f5d9cfb853e7602b079
|
[
"Apache-2.0"
] | 99
|
2020-11-02T14:58:04.000Z
|
2021-04-09T18:01:34.000Z
|
test/test_dephead.py
|
ulf1/nlptasks
|
07d36448b517a18f76088f5d9cfb853e7602b079
|
[
"Apache-2.0"
] | null | null | null |
import nlptasks as nt
import nlptasks.dephead
def test_01():
seqs_token = [[
"Der", "Helmut", "Kohl", "speist", "Schweinshaxe", "mit", "Kohl", "."]]
target = [
(46, 0), (46, 1), (47, 2), (47, 3), (47, 4), (47, 5), (49, 6), (47, 7),
(19, 0), (31, 1), (36, 2), (42, 3), (21, 4), (17, 5), (19, 6), (32, 7)]
maskseqs, seqlens = nt.dephead.factory("spacy-de")(seqs_token)
assert seqlens == [8]
for pair in maskseqs[0]:
assert pair in target
def test_02(): # check if calling pad_dephead causes an error
seqs_token = [[
"Der", "Helmut", "Kohl", "speist", "Schweinshaxe", "mit", "Kohl", "."]]
target = [
(46, 0), (46, 1), (47, 2), (47, 3), (47, 4), (47, 5), (49, 6), (47, 7),
(19, 0), (31, 1), (36, 2), (42, 3), (21, 4), (17, 5), (19, 6), (32, 7)]
maskseqs, seqlens = nt.dephead.factory("spacy-de")(
seqs_token, maxlen=6, padding='post', truncating='post')
assert seqlens == [8]
for pair in maskseqs[0]:
assert pair in target
for pair in maskseqs[0]:
assert pair[1] < 6
def test_03(): # preload model
seqs_token = [[
"Der", "Helmut", "Kohl", "speist", "Schweinshaxe", "mit", "Kohl", "."]]
target = [
(46, 0), (46, 1), (47, 2), (47, 3), (47, 4), (47, 5), (49, 6), (47, 7),
(19, 0), (31, 1), (36, 2), (42, 3), (21, 4), (17, 5), (19, 6), (32, 7)]
identifier = "spacy-de"
model = nt.dephead.get_model(identifier)
fn = nt.dephead.factory(identifier)
maskseqs, seqlens = fn(seqs_token, model=model)
assert seqlens == [8]
for pair in maskseqs[0]:
assert pair in target
def test_11():
seqs_token = [[
"Der", "Helmut", "Kohl", "speist", "Schweinshaxe", "mit", "Kohl", "."]]
target = [
(64, 0), (66, 1), (64, 2), (62, 3), (66, 4), (69, 5), (67, 6), (66, 7),
(24, 0), (45, 1), (35, 2), (58, 3), (49, 4), (9, 5), (42, 6), (56, 7)]
maskseqs, seqlens = nt.dephead.factory("stanza-de")(seqs_token)
assert seqlens == [8]
for pair in maskseqs[0]:
assert pair in target
def test_12(): # check if calling pad_dephead causes an error
seqs_token = [[
"Der", "Helmut", "Kohl", "speist", "Schweinshaxe", "mit", "Kohl", "."]]
target = [
(64, 0), (66, 1), (64, 2), (62, 3), (66, 4), (69, 5), (67, 6), (66, 7),
(24, 0), (45, 1), (35, 2), (58, 3), (49, 4), (9, 5), (42, 6), (56, 7)]
maskseqs, seqlens = nt.dephead.factory("stanza-de")(
seqs_token, maxlen=6, padding='post', truncating='post')
assert seqlens == [8]
for pair in maskseqs[0]:
assert pair in target
for pair in maskseqs[0]:
assert pair[1] < 6
def test_13(): # preload model
seqs_token = [[
"Der", "Helmut", "Kohl", "speist", "Schweinshaxe", "mit", "Kohl", "."]]
target = [
(64, 0), (66, 1), (64, 2), (62, 3), (66, 4), (69, 5), (67, 6), (66, 7),
(24, 0), (45, 1), (35, 2), (58, 3), (49, 4), (9, 5), (42, 6), (56, 7)]
identifier = "stanza-de"
model = nt.dephead.get_model(identifier)
fn = nt.dephead.factory(identifier)
maskseqs, seqlens = fn(seqs_token, model=model)
assert seqlens == [8]
for pair in maskseqs[0]:
assert pair in target
| 33.010101
| 79
| 0.507038
| 477
| 3,268
| 3.427673
| 0.163522
| 0.051376
| 0.044037
| 0.08318
| 0.945566
| 0.945566
| 0.945566
| 0.945566
| 0.945566
| 0.945566
| 0
| 0.131645
| 0.263158
| 3,268
| 98
| 80
| 33.346939
| 0.547342
| 0.035802
| 0
| 0.810811
| 0
| 0
| 0.095677
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 1
| 0.081081
| false
| 0
| 0.027027
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7572a13aa2cca70d43fe14fba16b0babd12f69c
| 71
|
py
|
Python
|
test/test_ui.py
|
OpenMindInnovation/timeflux_ui
|
15abc47f68538cb32e8b64a1d6a065a802fc292d
|
[
"MIT"
] | 7
|
2019-09-22T13:12:29.000Z
|
2021-06-28T12:19:34.000Z
|
test/test_ui.py
|
OpenMindInnovation/timeflux_ui
|
15abc47f68538cb32e8b64a1d6a065a802fc292d
|
[
"MIT"
] | 10
|
2019-04-23T22:44:33.000Z
|
2021-08-17T16:01:01.000Z
|
test/test_ui.py
|
OpenMindInnovation/timeflux_ui
|
15abc47f68538cb32e8b64a1d6a065a802fc292d
|
[
"MIT"
] | 2
|
2019-04-03T09:16:36.000Z
|
2020-01-23T13:13:29.000Z
|
"""Tests for ui.py"""
import pytest
def test_none():
return True
| 10.142857
| 21
| 0.647887
| 11
| 71
| 4.090909
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211268
| 71
| 6
| 22
| 11.833333
| 0.803571
| 0.211268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
a7d4795815102faaed41eba358a696aca659f1c4
| 1,581
|
py
|
Python
|
simple_mooc/core/tests/test_views.py
|
nikolasvargas/simple-mooc
|
626f495e90326441488669c2b833cda200a4d75c
|
[
"MIT"
] | null | null | null |
simple_mooc/core/tests/test_views.py
|
nikolasvargas/simple-mooc
|
626f495e90326441488669c2b833cda200a4d75c
|
[
"MIT"
] | null | null | null |
simple_mooc/core/tests/test_views.py
|
nikolasvargas/simple-mooc
|
626f495e90326441488669c2b833cda200a4d75c
|
[
"MIT"
] | null | null | null |
from django.test import SimpleTestCase
from django.urls import reverse
class HomePageTests(SimpleTestCase):
def test_home_page_status_code(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_view_url_by_name(self):
response = self.client.get(reverse('core:index'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('core:index'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'home.html')
def test_view_does_not_contains_404(self):
response = self.client.get('/')
self.assertNotEqual(response.status_code, 404)
self.assertNotContains(response, '<span>(404)</span>')
class ContactPageTests(SimpleTestCase):
def test_contact_page_status_code(self):
response = self.client.get('/contact')
self.assertEqual(response.status_code, 200)
def test_view_url_by_name(self):
response = self.client.get(reverse('core:contact'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('core:contact'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'contact.html')
def test_view_does_not_contains_404(self):
response = self.client.get('/contact/')
self.assertNotEqual(response.status_code, 404)
self.assertNotContains(response, '<span>(404)</span>')
| 35.133333
| 62
| 0.705882
| 190
| 1,581
| 5.652632
| 0.210526
| 0.09311
| 0.119181
| 0.163873
| 0.8473
| 0.8473
| 0.8473
| 0.840782
| 0.768156
| 0.712291
| 0
| 0.027671
| 0.177103
| 1,581
| 44
| 63
| 35.931818
| 0.797848
| 0
| 0
| 0.6875
| 0
| 0
| 0.075901
| 0
| 0
| 0
| 0
| 0
| 0.375
| 1
| 0.25
| false
| 0
| 0.0625
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
38f76c4c328bd92a458c1243331c5232167268ba
| 11,491
|
py
|
Python
|
src/datasets/dataset_vqa.py
|
Steve-Tod/ClipBERT
|
3543fc7efa7b4848be437c95a55d323618f442ff
|
[
"MIT"
] | null | null | null |
src/datasets/dataset_vqa.py
|
Steve-Tod/ClipBERT
|
3543fc7efa7b4848be437c95a55d323618f442ff
|
[
"MIT"
] | null | null | null |
src/datasets/dataset_vqa.py
|
Steve-Tod/ClipBERT
|
3543fc7efa7b4848be437c95a55d323618f442ff
|
[
"MIT"
] | null | null | null |
import torch
import numpy as np
from torch.utils.data.dataloader import default_collate
from src.utils.basic_utils import flat_list_of_lists
from src.datasets.dataset_base import ClipBertBaseDataset, img_collate
class ClipBertVQATagDataset(ClipBertBaseDataset):
""" This should work for both train and test (where labels are not available).
datalist: list(tuples) each tuple is (img_id, list(dicts)),
each dict {
"type": "image",
"filepath": "/abs/path/to/COCO_val2014_000000401092.jpg",
"text": "A plate of food and a beverage are on a table.",
"labels": {"down": 1, "at table": 0.3, "skateboard": 0.3, "table": 0.3}
"answer_type": "other"
"question_id": 262148000
}
tokenizer:
max_img_size: int,
max_txt_len: int, max text sequence length, including special tokens.
itm_neg_prob: float [0, 1] set to 0 will disable itm.
"""
def __init__(self, datalist, tokenizer, img_lmdb_dir, fps=3,
max_img_size=1000, max_txt_len=20, ans2label=None):
super(ClipBertVQATagDataset, self).__init__(
datalist, tokenizer, img_lmdb_dir, fps=fps,
max_img_size=max_img_size, max_txt_len=max_txt_len,
) # init its parent class
self.ans2label = ans2label
self.num_labels = len(ans2label)
self.label2ans = {v: k for k, v in ans2label.items()}
self.qid2data = {d["question_id"]: d for group in datalist for d in group[1]}
def __len__(self):
return len(self.datalist)
def __getitem__(self, index):
img_id, examples = self.datalist[index] # one image with multiple examples
img_array = self._load_img(img_id) # tensor
examples = [self._get_single_example(e) for e in examples]
return dict(
img=img_array,
examples=examples,
n_examples=len(examples) # used to create image feature copies.
)
def _get_single_example(self, data):
return dict(
text_str=data["txt"],
question_id=data["question_id"],
object_tags = data["object_tags"],
labels=self._get_vqa_targets(
data["labels"]) if "labels" in data else None
)
def _get_vqa_targets(self, ans2score_dict):
"""
Args:
ans2score_dict: {"table": 0.9, "picnic table": 1,
"skateboard": 0.3}
Returns:
A 1D tensor
"""
targets = torch.zeros(self.num_labels)
raw_answers = list(ans2score_dict.keys())
scores = [ans2score_dict[k] for k in raw_answers]
labels = [self.ans2label[ans] for ans in raw_answers]
targets.scatter_(
0, torch.tensor(labels).long(),
torch.tensor(scores).float())
return targets
def evaluate_vqa(self, results):
"""
Args:
results: list(dict), in accordance with VQA online submission format
each dict is
{
"question_id": int,
"answer": str
}
Returns:
VQA score
"""
scores = []
answer_types = []
answer_type2idx = {"yes/no": 0, "number": 1, "other": 2}
for d in results:
qid = d["question_id"]
ans = d["answer"]
raw_data = self.qid2data[qid]
labels = raw_data["labels"]
if ans in labels:
scores.append(labels[ans])
else:
scores.append(0.)
answer_types.append(answer_type2idx[raw_data["answer_type"]])
metrics = dict()
scores = np.array(scores)
metrics["overall_acc"] = float(np.mean(scores))
answer_types = np.array(answer_types)
ratios = dict()
for ans_type, ans_type_idx in answer_type2idx.items():
answer_type_mask = answer_types == ans_type_idx
answer_type_scores = scores[answer_type_mask]
metrics[f"{ans_type}_acc"] = float(np.mean(answer_type_scores))
ratios[f"{ans_type}_ratio"] = [
1. * len(answer_type_scores) / len(scores),
len(answer_type_scores)]
metrics["ratios"] = ratios
return metrics
class VQATagCollator(object):
def __init__(self, tokenizer, max_length=120):
self.tokenizer = tokenizer
self.max_length = max_length
def collate_batch(self, batch):
if isinstance(batch[0]["img"], torch.Tensor):
v_collate = default_collate
else:
v_collate = img_collate
visual_inputs = v_collate([d["img"] for d in batch]) # (B, #frm=1 or T, 3, H, W)
# group data
text_examples = flat_list_of_lists([d["examples"] for d in batch])
n_examples_list = [d["n_examples"] for d in batch] # (B, )
# group elements data
batch_enc = self.tokenizer.batch_encode_plus(
[d["text_str"] + " [SEP] " + d["object_tags"] for d in text_examples],
max_length=self.max_length,
pad_to_max_length=True,
return_tensors="pt"
)
text_input_ids = batch_enc.input_ids # (B, L)
text_input_mask = batch_enc.attention_mask # (B, L)
labels = default_collate(
[d["labels"] for d in text_examples]) \
if text_examples[0]["labels"] is not None else None # (B, #ans)
question_ids = [d["question_id"] for d in text_examples]
return dict(
visual_inputs=visual_inputs, # (B, #frm=1 or T, H, W, C)
text_input_ids=text_input_ids,
text_input_mask=text_input_mask,
question_ids=question_ids,
labels=labels,
n_examples_list=n_examples_list # used to create image feature copies.
)
class ClipBertVQADataset(ClipBertBaseDataset):
""" This should work for both train and test (where labels are not available).
datalist: list(tuples) each tuple is (img_id, list(dicts)),
each dict {
"type": "image",
"filepath": "/abs/path/to/COCO_val2014_000000401092.jpg",
"text": "A plate of food and a beverage are on a table.",
"labels": {"down": 1, "at table": 0.3, "skateboard": 0.3, "table": 0.3}
"answer_type": "other"
"question_id": 262148000
}
tokenizer:
max_img_size: int,
max_txt_len: int, max text sequence length, including special tokens.
itm_neg_prob: float [0, 1] set to 0 will disable itm.
"""
def __init__(self, datalist, tokenizer, img_lmdb_dir, fps=3,
max_img_size=1000, max_txt_len=20, ans2label=None):
super(ClipBertVQADataset, self).__init__(
datalist, tokenizer, img_lmdb_dir, fps=fps,
max_img_size=max_img_size, max_txt_len=max_txt_len,
) # init its parent class
self.ans2label = ans2label
self.num_labels = len(ans2label)
self.label2ans = {v: k for k, v in ans2label.items()}
self.qid2data = {d["question_id"]: d for group in datalist for d in group[1]}
def __len__(self):
return len(self.datalist)
def __getitem__(self, index):
img_id, examples = self.datalist[index] # one image with multiple examples
img_array = self._load_img(img_id) # tensor
examples = [self._get_single_example(e) for e in examples]
return dict(
img=img_array,
examples=examples,
n_examples=len(examples) # used to create image feature copies.
)
def _get_single_example(self, data):
return dict(
text_str=data["txt"],
question_id=data["question_id"],
labels=self._get_vqa_targets(
data["labels"]) if "labels" in data else None
)
def _get_vqa_targets(self, ans2score_dict):
"""
Args:
ans2score_dict: {"table": 0.9, "picnic table": 1,
"skateboard": 0.3}
Returns:
A 1D tensor
"""
targets = torch.zeros(self.num_labels)
raw_answers = list(ans2score_dict.keys())
scores = [ans2score_dict[k] for k in raw_answers]
labels = [self.ans2label[ans] for ans in raw_answers]
targets.scatter_(
0, torch.tensor(labels).long(),
torch.tensor(scores).float())
return targets
def evaluate_vqa(self, results):
"""
Args:
results: list(dict), in accordance with VQA online submission format
each dict is
{
"question_id": int,
"answer": str
}
Returns:
VQA score
"""
scores = []
answer_types = []
answer_type2idx = {"yes/no": 0, "number": 1, "other": 2}
for d in results:
qid = d["question_id"]
ans = d["answer"]
raw_data = self.qid2data[qid]
labels = raw_data["labels"]
if ans in labels:
scores.append(labels[ans])
else:
scores.append(0.)
answer_types.append(answer_type2idx[raw_data["answer_type"]])
metrics = dict()
scores = np.array(scores)
metrics["overall_acc"] = float(np.mean(scores))
answer_types = np.array(answer_types)
ratios = dict()
for ans_type, ans_type_idx in answer_type2idx.items():
answer_type_mask = answer_types == ans_type_idx
answer_type_scores = scores[answer_type_mask]
metrics[f"{ans_type}_acc"] = float(np.mean(answer_type_scores))
ratios[f"{ans_type}_ratio"] = [
1. * len(answer_type_scores) / len(scores),
len(answer_type_scores)]
metrics["ratios"] = ratios
return metrics
class VQACollator(object):
def __init__(self, tokenizer, max_length=20):
self.tokenizer = tokenizer
self.max_length = max_length
def collate_batch(self, batch):
if isinstance(batch[0]["img"], torch.Tensor):
v_collate = default_collate
else:
v_collate = img_collate
visual_inputs = v_collate([d["img"] for d in batch]) # (B, #frm=1 or T, 3, H, W)
# group data
text_examples = flat_list_of_lists([d["examples"] for d in batch])
n_examples_list = [d["n_examples"] for d in batch] # (B, )
# group elements data
batch_enc = self.tokenizer.batch_encode_plus(
[d["text_str"] for d in text_examples],
max_length=self.max_length,
pad_to_max_length=True,
return_tensors="pt"
)
text_input_ids = batch_enc.input_ids # (B, L)
text_input_mask = batch_enc.attention_mask # (B, L)
labels = default_collate(
[d["labels"] for d in text_examples]) \
if text_examples[0]["labels"] is not None else None # (B, #ans)
question_ids = [d["question_id"] for d in text_examples]
return dict(
visual_inputs=visual_inputs, # (B, #frm=1 or T, H, W, C)
text_input_ids=text_input_ids,
text_input_mask=text_input_mask,
question_ids=question_ids,
labels=labels,
n_examples_list=n_examples_list # used to create image feature copies.
)
| 38.820946
| 89
| 0.579062
| 1,432
| 11,491
| 4.395251
| 0.142458
| 0.025421
| 0.015253
| 0.010486
| 0.946934
| 0.946934
| 0.946934
| 0.935812
| 0.935812
| 0.935812
| 0
| 0.019975
| 0.315986
| 11,491
| 295
| 90
| 38.952542
| 0.780789
| 0.206596
| 0
| 0.871287
| 0
| 0
| 0.047608
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.079208
| false
| 0
| 0.024752
| 0.019802
| 0.183168
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac130c22e628000bf167f1273e936dc063867f36
| 175
|
py
|
Python
|
ccal/unmount_volume.py
|
kberkey/ccal
|
92aa8372997dccec2908928f71a11b6c8327d7aa
|
[
"MIT"
] | null | null | null |
ccal/unmount_volume.py
|
kberkey/ccal
|
92aa8372997dccec2908928f71a11b6c8327d7aa
|
[
"MIT"
] | null | null | null |
ccal/unmount_volume.py
|
kberkey/ccal
|
92aa8372997dccec2908928f71a11b6c8327d7aa
|
[
"MIT"
] | null | null | null |
from .run_command import run_command
def unmount_volume(volume_name_or_mount_directory_path):
run_command("sudo umount {}".format(volume_name_or_mount_directory_path))
| 25
| 77
| 0.834286
| 26
| 175
| 5.076923
| 0.576923
| 0.227273
| 0.181818
| 0.257576
| 0.454545
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091429
| 175
| 6
| 78
| 29.166667
| 0.830189
| 0
| 0
| 0
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
ac6d52fca469a68d8ab2732f55016976f6d11b69
| 112,665
|
py
|
Python
|
metal_python/api/machine_api.py
|
metal-stack/metal-python
|
cdf40fa86d2b2944f9818cef1c6723b1eecc506e
|
[
"MIT"
] | 7
|
2020-12-21T05:24:24.000Z
|
2022-02-12T20:55:32.000Z
|
metal_python/api/machine_api.py
|
metal-stack/metal-python
|
cdf40fa86d2b2944f9818cef1c6723b1eecc506e
|
[
"MIT"
] | 6
|
2020-09-16T07:23:34.000Z
|
2022-01-18T12:05:30.000Z
|
metal_python/api/machine_api.py
|
metal-stack/metal-python
|
cdf40fa86d2b2944f9818cef1c6723b1eecc506e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
metal-api
API to manage and control plane resources like machines, switches, operating system images, machine sizes, networks, IP addresses and more # noqa: E501
OpenAPI spec version: v0.15.7
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from metal_python.api_client import ApiClient
class MachineApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def abort_reinstall_machine(self, id, body, **kwargs): # noqa: E501
"""abort reinstall this machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.abort_reinstall_machine(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineAbortReinstallRequest body: (required)
:return: V1BootInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.abort_reinstall_machine_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.abort_reinstall_machine_with_http_info(id, body, **kwargs) # noqa: E501
return data
def abort_reinstall_machine_with_http_info(self, id, body, **kwargs): # noqa: E501
"""abort reinstall this machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.abort_reinstall_machine_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineAbortReinstallRequest body: (required)
:return: V1BootInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method abort_reinstall_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `abort_reinstall_machine`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `abort_reinstall_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/abort-reinstall', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1BootInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_provisioning_event(self, id, body, **kwargs): # noqa: E501
"""adds a machine provisioning event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_provisioning_event(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineProvisioningEvent body: (required)
:return: V1MachineRecentProvisioningEvents
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_provisioning_event_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.add_provisioning_event_with_http_info(id, body, **kwargs) # noqa: E501
return data
def add_provisioning_event_with_http_info(self, id, body, **kwargs): # noqa: E501
"""adds a machine provisioning event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_provisioning_event_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineProvisioningEvent body: (required)
:return: V1MachineRecentProvisioningEvents
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_provisioning_event" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `add_provisioning_event`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_provisioning_event`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/event', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineRecentProvisioningEvents', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def allocate_machine(self, body, **kwargs): # noqa: E501
"""allocate a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.allocate_machine(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineAllocateRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.allocate_machine_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.allocate_machine_with_http_info(body, **kwargs) # noqa: E501
return data
def allocate_machine_with_http_info(self, body, **kwargs): # noqa: E501
"""allocate a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.allocate_machine_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineAllocateRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method allocate_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `allocate_machine`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/allocate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def chassis_identify_led_off(self, id, body, **kwargs): # noqa: E501
"""sends a power-off to the chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.chassis_identify_led_off(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:param str description: reason why the chassis identify LED has been turned off
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.chassis_identify_led_off_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.chassis_identify_led_off_with_http_info(id, body, **kwargs) # noqa: E501
return data
def chassis_identify_led_off_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a power-off to the chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.chassis_identify_led_off_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:param str description: reason why the chassis identify LED has been turned off
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'description'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method chassis_identify_led_off" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `chassis_identify_led_off`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `chassis_identify_led_off`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'description' in params:
query_params.append(('description', params['description'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/chassis-identify-led-off', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def chassis_identify_led_on(self, id, body, **kwargs): # noqa: E501
"""sends a power-on to the chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.chassis_identify_led_on(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:param str description: identifier of the machine
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.chassis_identify_led_on_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.chassis_identify_led_on_with_http_info(id, body, **kwargs) # noqa: E501
return data
def chassis_identify_led_on_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a power-on to the chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.chassis_identify_led_on_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:param str description: identifier of the machine
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'description'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method chassis_identify_led_on" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `chassis_identify_led_on`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `chassis_identify_led_on`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'description' in params:
query_params.append(('description', params['description'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/chassis-identify-led-on', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_machine(self, id, **kwargs): # noqa: E501
"""deletes a machine from the database # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_machine(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_machine_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_machine_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_machine_with_http_info(self, id, **kwargs): # noqa: E501
"""deletes a machine from the database # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_machine_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def finalize_allocation(self, id, body, **kwargs): # noqa: E501
"""finalize the allocation of the machine by reconfiguring the switch, sent on successful image installation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.finalize_allocation(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineFinalizeAllocationRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.finalize_allocation_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.finalize_allocation_with_http_info(id, body, **kwargs) # noqa: E501
return data
def finalize_allocation_with_http_info(self, id, body, **kwargs): # noqa: E501
"""finalize the allocation of the machine by reconfiguring the switch, sent on successful image installation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.finalize_allocation_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineFinalizeAllocationRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method finalize_allocation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `finalize_allocation`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `finalize_allocation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/finalize-allocation', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_ipmi_machine(self, id, **kwargs): # noqa: E501
"""returns a machine including the ipmi connection data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ipmi_machine(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineIPMIResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_ipmi_machine_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.find_ipmi_machine_with_http_info(id, **kwargs) # noqa: E501
return data
def find_ipmi_machine_with_http_info(self, id, **kwargs): # noqa: E501
"""returns a machine including the ipmi connection data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ipmi_machine_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineIPMIResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_ipmi_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `find_ipmi_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/ipmi', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineIPMIResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_ipmi_machines(self, body, **kwargs): # noqa: E501
"""returns machines including the ipmi connection data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ipmi_machines(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineFindRequest body: (required)
:return: list[V1MachineIPMIResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_ipmi_machines_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.find_ipmi_machines_with_http_info(body, **kwargs) # noqa: E501
return data
def find_ipmi_machines_with_http_info(self, body, **kwargs): # noqa: E501
"""returns machines including the ipmi connection data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ipmi_machines_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineFindRequest body: (required)
:return: list[V1MachineIPMIResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_ipmi_machines" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `find_ipmi_machines`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/ipmi/find', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[V1MachineIPMIResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_machine(self, id, **kwargs): # noqa: E501
"""get machine by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_machine(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_machine_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.find_machine_with_http_info(id, **kwargs) # noqa: E501
return data
def find_machine_with_http_info(self, id, **kwargs): # noqa: E501
"""get machine by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_machine_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `find_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_machines(self, body, **kwargs): # noqa: E501
"""find machines by multiple criteria # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_machines(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineFindRequest body: (required)
:return: list[V1MachineResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_machines_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.find_machines_with_http_info(body, **kwargs) # noqa: E501
return data
def find_machines_with_http_info(self, body, **kwargs): # noqa: E501
"""find machines by multiple criteria # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_machines_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineFindRequest body: (required)
:return: list[V1MachineResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_machines" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `find_machines`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/find', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[V1MachineResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def free_machine(self, id, **kwargs): # noqa: E501
"""free a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.free_machine(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.free_machine_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.free_machine_with_http_info(id, **kwargs) # noqa: E501
return data
def free_machine_with_http_info(self, id, **kwargs): # noqa: E501
"""free a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.free_machine_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method free_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `free_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/free', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_machine_console_password(self, body, **kwargs): # noqa: E501
"""get consolepassword for machine by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_machine_console_password(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineConsolePasswordRequest body: (required)
:return: V1MachineConsolePasswordResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_machine_console_password_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.get_machine_console_password_with_http_info(body, **kwargs) # noqa: E501
return data
def get_machine_console_password_with_http_info(self, body, **kwargs): # noqa: E501
"""get consolepassword for machine by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_machine_console_password_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineConsolePasswordRequest body: (required)
:return: V1MachineConsolePasswordResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_machine_console_password" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `get_machine_console_password`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/consolepassword', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineConsolePasswordResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_provisioning_event_container(self, id, **kwargs): # noqa: E501
"""get the current machine provisioning event container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_provisioning_event_container(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineRecentProvisioningEvents
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_provisioning_event_container_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_provisioning_event_container_with_http_info(id, **kwargs) # noqa: E501
return data
def get_provisioning_event_container_with_http_info(self, id, **kwargs): # noqa: E501
"""get the current machine provisioning event container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_provisioning_event_container_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineRecentProvisioningEvents
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_provisioning_event_container" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_provisioning_event_container`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/event', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineRecentProvisioningEvents', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ipmi_report(self, body, **kwargs): # noqa: E501
"""reports IPMI ip addresses leased by a management server for machines # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ipmi_report(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineIpmiReports body: (required)
:return: V1MachineIpmiReportResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.ipmi_report_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.ipmi_report_with_http_info(body, **kwargs) # noqa: E501
return data
def ipmi_report_with_http_info(self, body, **kwargs): # noqa: E501
"""reports IPMI ip addresses leased by a management server for machines # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ipmi_report_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineIpmiReports body: (required)
:return: V1MachineIpmiReportResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ipmi_report" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `ipmi_report`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/ipmi', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineIpmiReportResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_machines(self, **kwargs): # noqa: E501
"""get all known machines # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_machines(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[V1MachineResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_machines_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_machines_with_http_info(**kwargs) # noqa: E501
return data
def list_machines_with_http_info(self, **kwargs): # noqa: E501
"""get all known machines # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_machines_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[V1MachineResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_machines" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[V1MachineResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_bios(self, id, body, **kwargs): # noqa: E501
"""boots machine into BIOS # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_bios(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_bios_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_bios_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_bios_with_http_info(self, id, body, **kwargs): # noqa: E501
"""boots machine into BIOS # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_bios_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_bios" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_bios`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_bios`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/bios', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_cycle(self, id, body, **kwargs): # noqa: E501
"""sends a power cycle to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_cycle(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_cycle_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_cycle_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_cycle_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a power cycle to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_cycle_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_cycle" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_cycle`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_cycle`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/cycle', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_disk(self, id, body, **kwargs): # noqa: E501
"""boots machine from disk # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_disk(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_disk_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_disk_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_disk_with_http_info(self, id, body, **kwargs): # noqa: E501
"""boots machine from disk # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_disk_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_disk" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_disk`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_disk`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/disk', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_off(self, id, body, **kwargs): # noqa: E501
"""sends a power-off to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_off(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_off_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_off_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_off_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a power-off to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_off_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_off" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_off`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_off`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/off', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_on(self, id, body, **kwargs): # noqa: E501
"""sends a power-on to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_on(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_on_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_on_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_on_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a power-on to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_on_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_on" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_on`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_on`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/on', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_pxe(self, id, body, **kwargs): # noqa: E501
"""boots machine from PXE # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_pxe(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_pxe_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_pxe_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_pxe_with_http_info(self, id, body, **kwargs): # noqa: E501
"""boots machine from PXE # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_pxe_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_pxe" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_pxe`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_pxe`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/pxe', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_reset(self, id, body, **kwargs): # noqa: E501
"""sends a reset to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_reset(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_reset_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_reset_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_reset_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a reset to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_reset_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_reset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_reset`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_reset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/reset', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def register_machine(self, body, **kwargs): # noqa: E501
"""register a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_machine(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineRegisterRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.register_machine_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.register_machine_with_http_info(body, **kwargs) # noqa: E501
return data
def register_machine_with_http_info(self, body, **kwargs): # noqa: E501
"""register a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_machine_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineRegisterRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method register_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `register_machine`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/register', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def reinstall_machine(self, id, body, **kwargs): # noqa: E501
"""reinstall this machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reinstall_machine(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineReinstallRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.reinstall_machine_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.reinstall_machine_with_http_info(id, body, **kwargs) # noqa: E501
return data
def reinstall_machine_with_http_info(self, id, body, **kwargs): # noqa: E501
"""reinstall this machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reinstall_machine_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineReinstallRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method reinstall_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `reinstall_machine`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `reinstall_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/reinstall', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_chassis_identify_led_state(self, id, body, **kwargs): # noqa: E501
"""set the state of a chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_chassis_identify_led_state(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1ChassisIdentifyLEDState body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_chassis_identify_led_state_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.set_chassis_identify_led_state_with_http_info(id, body, **kwargs) # noqa: E501
return data
def set_chassis_identify_led_state_with_http_info(self, id, body, **kwargs): # noqa: E501
"""set the state of a chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_chassis_identify_led_state_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1ChassisIdentifyLEDState body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_chassis_identify_led_state" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_chassis_identify_led_state`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `set_chassis_identify_led_state`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/chassis-identify-led-state', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_machine_state(self, id, body, **kwargs): # noqa: E501
"""set the state of a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_machine_state(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineState body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_machine_state_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.set_machine_state_with_http_info(id, body, **kwargs) # noqa: E501
return data
def set_machine_state_with_http_info(self, id, body, **kwargs): # noqa: E501
"""set the state of a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_machine_state_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineState body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_machine_state" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_machine_state`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `set_machine_state`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/state', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_firmware(self, id, body, **kwargs): # noqa: E501
"""sends a firmware command to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineUpdateFirmwareRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_firmware_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_firmware_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_firmware_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a firmware command to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineUpdateFirmwareRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_firmware" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_firmware`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_firmware`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/update-firmware/{id}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.13338
| 156
| 0.599973
| 12,771
| 112,665
| 5.063503
| 0.019027
| 0.049856
| 0.024248
| 0.031176
| 0.983531
| 0.980175
| 0.976541
| 0.973541
| 0.971685
| 0.966953
| 0
| 0.017589
| 0.306155
| 112,665
| 2,878
| 157
| 39.146977
| 0.809638
| 0.312085
| 0
| 0.84125
| 0
| 0
| 0.185479
| 0.044843
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035625
| false
| 0.005
| 0.0025
| 0
| 0.09125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac70c91ebfff3b57245ec74c41cfaacbb3d728cc
| 45
|
py
|
Python
|
address.py
|
asumit499/Python-BootCamp
|
0b99f9cb862189d13ad291eac12a8be6c46357f5
|
[
"MIT"
] | 4
|
2022-03-20T10:59:53.000Z
|
2022-03-25T18:28:04.000Z
|
address.py
|
asumit499/Python-BootCamp
|
0b99f9cb862189d13ad291eac12a8be6c46357f5
|
[
"MIT"
] | null | null | null |
address.py
|
asumit499/Python-BootCamp
|
0b99f9cb862189d13ad291eac12a8be6c46357f5
|
[
"MIT"
] | 15
|
2022-03-12T11:49:10.000Z
|
2022-03-15T06:22:55.000Z
|
print("Address-\nMuzaffarpur\nBihar\nIndia")
| 22.5
| 44
| 0.8
| 5
| 45
| 7.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 45
| 1
| 45
| 45
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0.777778
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ac77518a49d6d3f5b71c1c09d98d2c9ef8a3c2c4
| 248,169
|
py
|
Python
|
wflow/openda_bmi/openda/bmi/thrift/BMIService.py
|
quanpands/wflow
|
b454a55e4a63556eaac3fbabd97f8a0b80901e5a
|
[
"MIT"
] | null | null | null |
wflow/openda_bmi/openda/bmi/thrift/BMIService.py
|
quanpands/wflow
|
b454a55e4a63556eaac3fbabd97f8a0b80901e5a
|
[
"MIT"
] | null | null | null |
wflow/openda_bmi/openda/bmi/thrift/BMIService.py
|
quanpands/wflow
|
b454a55e4a63556eaac3fbabd97f8a0b80901e5a
|
[
"MIT"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface(object):
def initialize(self, file):
"""
Parameters:
- file
"""
def update(self,):
pass
def update_until(self, time):
"""
Parameters:
- time
"""
def update_frac(self, frac):
"""
Parameters:
- frac
"""
def finalize_model(self,):
pass
def get_component_name(self,):
pass
def get_input_var_names(self,):
pass
def get_output_var_names(self,):
pass
def get_var_type(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_var_units(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_var_rank(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_var_size(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_var_nbytes(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_start_time(self,):
pass
def get_current_time(self,):
pass
def get_end_time(self,):
pass
def get_time_step(self,):
pass
def get_time_units(self,):
pass
def get_value(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_value_at_indices(self, long_var_name, inds):
"""
Parameters:
- long_var_name
- inds
"""
def set_value(self, long_var_name, src):
"""
Parameters:
- long_var_name
- src
"""
def set_value_at_indices(self, long_var_name, inds, src):
"""
Parameters:
- long_var_name
- inds
- src
"""
def get_grid_type(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_grid_shape(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_grid_spacing(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_grid_origin(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_grid_x(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_grid_y(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_grid_z(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_grid_connectivity(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def get_grid_offset(self, long_var_name):
"""
Parameters:
- long_var_name
"""
def initialize_config(self, file):
"""
Parameters:
- file
"""
def initialize_model(self,):
pass
def set_start_time(self, start_time):
"""
Parameters:
- start_time
"""
def set_end_time(self, end_time):
"""
Parameters:
- end_time
"""
def get_attribute_names(self,):
pass
def get_attribute_value(self, attribute_name):
"""
Parameters:
- attribute_name
"""
def set_attribute_value(self, attribute_name, attribute_value):
"""
Parameters:
- attribute_name
- attribute_value
"""
def save_state(self, destination_directory):
"""
Parameters:
- destination_directory
"""
def load_state(self, source_directory):
"""
Parameters:
- source_directory
"""
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def initialize(self, file):
"""
Parameters:
- file
"""
self.send_initialize(file)
self.recv_initialize()
def send_initialize(self, file):
self._oprot.writeMessageBegin("initialize", TMessageType.CALL, self._seqid)
args = initialize_args()
args.file = file
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_initialize(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = initialize_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def update(self,):
self.send_update()
self.recv_update()
def send_update(self,):
self._oprot.writeMessageBegin("update", TMessageType.CALL, self._seqid)
args = update_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_update(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = update_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def update_until(self, time):
"""
Parameters:
- time
"""
self.send_update_until(time)
self.recv_update_until()
def send_update_until(self, time):
self._oprot.writeMessageBegin("update_until", TMessageType.CALL, self._seqid)
args = update_until_args()
args.time = time
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_update_until(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = update_until_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def update_frac(self, frac):
"""
Parameters:
- frac
"""
self.send_update_frac(frac)
self.recv_update_frac()
def send_update_frac(self, frac):
self._oprot.writeMessageBegin("update_frac", TMessageType.CALL, self._seqid)
args = update_frac_args()
args.frac = frac
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_update_frac(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = update_frac_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def finalize_model(self,):
self.send_finalize_model()
self.recv_finalize_model()
def send_finalize_model(self,):
self._oprot.writeMessageBegin("finalize_model", TMessageType.CALL, self._seqid)
args = finalize_model_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_finalize_model(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = finalize_model_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def get_component_name(self,):
self.send_get_component_name()
return self.recv_get_component_name()
def send_get_component_name(self,):
self._oprot.writeMessageBegin(
"get_component_name", TMessageType.CALL, self._seqid
)
args = get_component_name_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_component_name(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_component_name_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.error is not None:
raise result.error
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_component_name failed: unknown result",
)
def get_input_var_names(self,):
self.send_get_input_var_names()
return self.recv_get_input_var_names()
def send_get_input_var_names(self,):
self._oprot.writeMessageBegin(
"get_input_var_names", TMessageType.CALL, self._seqid
)
args = get_input_var_names_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_input_var_names(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_input_var_names_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_input_var_names failed: unknown result",
)
def get_output_var_names(self,):
self.send_get_output_var_names()
return self.recv_get_output_var_names()
def send_get_output_var_names(self,):
self._oprot.writeMessageBegin(
"get_output_var_names", TMessageType.CALL, self._seqid
)
args = get_output_var_names_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_output_var_names(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_output_var_names_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_output_var_names failed: unknown result",
)
def get_var_type(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_var_type(long_var_name)
return self.recv_get_var_type()
def send_get_var_type(self, long_var_name):
self._oprot.writeMessageBegin("get_var_type", TMessageType.CALL, self._seqid)
args = get_var_type_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_var_type(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_var_type_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT, "get_var_type failed: unknown result"
)
def get_var_units(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_var_units(long_var_name)
return self.recv_get_var_units()
def send_get_var_units(self, long_var_name):
self._oprot.writeMessageBegin("get_var_units", TMessageType.CALL, self._seqid)
args = get_var_units_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_var_units(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_var_units_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT, "get_var_units failed: unknown result"
)
def get_var_rank(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_var_rank(long_var_name)
return self.recv_get_var_rank()
def send_get_var_rank(self, long_var_name):
self._oprot.writeMessageBegin("get_var_rank", TMessageType.CALL, self._seqid)
args = get_var_rank_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_var_rank(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_var_rank_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT, "get_var_rank failed: unknown result"
)
def get_var_size(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_var_size(long_var_name)
return self.recv_get_var_size()
def send_get_var_size(self, long_var_name):
self._oprot.writeMessageBegin("get_var_size", TMessageType.CALL, self._seqid)
args = get_var_size_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_var_size(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_var_size_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT, "get_var_size failed: unknown result"
)
def get_var_nbytes(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_var_nbytes(long_var_name)
return self.recv_get_var_nbytes()
def send_get_var_nbytes(self, long_var_name):
self._oprot.writeMessageBegin("get_var_nbytes", TMessageType.CALL, self._seqid)
args = get_var_nbytes_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_var_nbytes(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_var_nbytes_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_var_nbytes failed: unknown result",
)
def get_start_time(self,):
self.send_get_start_time()
return self.recv_get_start_time()
def send_get_start_time(self,):
self._oprot.writeMessageBegin("get_start_time", TMessageType.CALL, self._seqid)
args = get_start_time_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_start_time(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_start_time_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_start_time failed: unknown result",
)
def get_current_time(self,):
self.send_get_current_time()
return self.recv_get_current_time()
def send_get_current_time(self,):
self._oprot.writeMessageBegin(
"get_current_time", TMessageType.CALL, self._seqid
)
args = get_current_time_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_current_time(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_current_time_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_current_time failed: unknown result",
)
def get_end_time(self,):
self.send_get_end_time()
return self.recv_get_end_time()
def send_get_end_time(self,):
self._oprot.writeMessageBegin("get_end_time", TMessageType.CALL, self._seqid)
args = get_end_time_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_end_time(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_end_time_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT, "get_end_time failed: unknown result"
)
def get_time_step(self,):
self.send_get_time_step()
return self.recv_get_time_step()
def send_get_time_step(self,):
self._oprot.writeMessageBegin("get_time_step", TMessageType.CALL, self._seqid)
args = get_time_step_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_time_step(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_time_step_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT, "get_time_step failed: unknown result"
)
def get_time_units(self,):
self.send_get_time_units()
return self.recv_get_time_units()
def send_get_time_units(self,):
self._oprot.writeMessageBegin("get_time_units", TMessageType.CALL, self._seqid)
args = get_time_units_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_time_units(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_time_units_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_time_units failed: unknown result",
)
def get_value(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_value(long_var_name)
return self.recv_get_value()
def send_get_value(self, long_var_name):
self._oprot.writeMessageBegin("get_value", TMessageType.CALL, self._seqid)
args = get_value_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_value(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_value_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.error is not None:
raise result.error
raise TApplicationException(
TApplicationException.MISSING_RESULT, "get_value failed: unknown result"
)
def get_value_at_indices(self, long_var_name, inds):
"""
Parameters:
- long_var_name
- inds
"""
self.send_get_value_at_indices(long_var_name, inds)
return self.recv_get_value_at_indices()
def send_get_value_at_indices(self, long_var_name, inds):
self._oprot.writeMessageBegin(
"get_value_at_indices", TMessageType.CALL, self._seqid
)
args = get_value_at_indices_args()
args.long_var_name = long_var_name
args.inds = inds
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_value_at_indices(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_value_at_indices_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.error is not None:
raise result.error
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_value_at_indices failed: unknown result",
)
def set_value(self, long_var_name, src):
"""
Parameters:
- long_var_name
- src
"""
self.send_set_value(long_var_name, src)
self.recv_set_value()
def send_set_value(self, long_var_name, src):
self._oprot.writeMessageBegin("set_value", TMessageType.CALL, self._seqid)
args = set_value_args()
args.long_var_name = long_var_name
args.src = src
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_set_value(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = set_value_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def set_value_at_indices(self, long_var_name, inds, src):
"""
Parameters:
- long_var_name
- inds
- src
"""
self.send_set_value_at_indices(long_var_name, inds, src)
self.recv_set_value_at_indices()
def send_set_value_at_indices(self, long_var_name, inds, src):
self._oprot.writeMessageBegin(
"set_value_at_indices", TMessageType.CALL, self._seqid
)
args = set_value_at_indices_args()
args.long_var_name = long_var_name
args.inds = inds
args.src = src
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_set_value_at_indices(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = set_value_at_indices_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def get_grid_type(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_grid_type(long_var_name)
return self.recv_get_grid_type()
def send_get_grid_type(self, long_var_name):
self._oprot.writeMessageBegin("get_grid_type", TMessageType.CALL, self._seqid)
args = get_grid_type_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_grid_type(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_grid_type_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.error is not None:
raise result.error
raise TApplicationException(
TApplicationException.MISSING_RESULT, "get_grid_type failed: unknown result"
)
def get_grid_shape(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_grid_shape(long_var_name)
return self.recv_get_grid_shape()
def send_get_grid_shape(self, long_var_name):
self._oprot.writeMessageBegin("get_grid_shape", TMessageType.CALL, self._seqid)
args = get_grid_shape_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_grid_shape(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_grid_shape_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_grid_shape failed: unknown result",
)
def get_grid_spacing(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_grid_spacing(long_var_name)
return self.recv_get_grid_spacing()
def send_get_grid_spacing(self, long_var_name):
self._oprot.writeMessageBegin(
"get_grid_spacing", TMessageType.CALL, self._seqid
)
args = get_grid_spacing_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_grid_spacing(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_grid_spacing_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_grid_spacing failed: unknown result",
)
def get_grid_origin(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_grid_origin(long_var_name)
return self.recv_get_grid_origin()
def send_get_grid_origin(self, long_var_name):
self._oprot.writeMessageBegin("get_grid_origin", TMessageType.CALL, self._seqid)
args = get_grid_origin_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_grid_origin(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_grid_origin_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_grid_origin failed: unknown result",
)
def get_grid_x(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_grid_x(long_var_name)
return self.recv_get_grid_x()
def send_get_grid_x(self, long_var_name):
self._oprot.writeMessageBegin("get_grid_x", TMessageType.CALL, self._seqid)
args = get_grid_x_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_grid_x(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_grid_x_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT, "get_grid_x failed: unknown result"
)
def get_grid_y(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_grid_y(long_var_name)
return self.recv_get_grid_y()
def send_get_grid_y(self, long_var_name):
self._oprot.writeMessageBegin("get_grid_y", TMessageType.CALL, self._seqid)
args = get_grid_y_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_grid_y(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_grid_y_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT, "get_grid_y failed: unknown result"
)
def get_grid_z(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_grid_z(long_var_name)
return self.recv_get_grid_z()
def send_get_grid_z(self, long_var_name):
self._oprot.writeMessageBegin("get_grid_z", TMessageType.CALL, self._seqid)
args = get_grid_z_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_grid_z(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_grid_z_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT, "get_grid_z failed: unknown result"
)
def get_grid_connectivity(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_grid_connectivity(long_var_name)
return self.recv_get_grid_connectivity()
def send_get_grid_connectivity(self, long_var_name):
self._oprot.writeMessageBegin(
"get_grid_connectivity", TMessageType.CALL, self._seqid
)
args = get_grid_connectivity_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_grid_connectivity(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_grid_connectivity_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_grid_connectivity failed: unknown result",
)
def get_grid_offset(self, long_var_name):
"""
Parameters:
- long_var_name
"""
self.send_get_grid_offset(long_var_name)
return self.recv_get_grid_offset()
def send_get_grid_offset(self, long_var_name):
self._oprot.writeMessageBegin("get_grid_offset", TMessageType.CALL, self._seqid)
args = get_grid_offset_args()
args.long_var_name = long_var_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_grid_offset(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_grid_offset_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_grid_offset failed: unknown result",
)
def initialize_config(self, file):
"""
Parameters:
- file
"""
self.send_initialize_config(file)
self.recv_initialize_config()
def send_initialize_config(self, file):
self._oprot.writeMessageBegin(
"initialize_config", TMessageType.CALL, self._seqid
)
args = initialize_config_args()
args.file = file
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_initialize_config(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = initialize_config_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def initialize_model(self,):
self.send_initialize_model()
self.recv_initialize_model()
def send_initialize_model(self,):
self._oprot.writeMessageBegin(
"initialize_model", TMessageType.CALL, self._seqid
)
args = initialize_model_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_initialize_model(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = initialize_model_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def set_start_time(self, start_time):
"""
Parameters:
- start_time
"""
self.send_set_start_time(start_time)
self.recv_set_start_time()
def send_set_start_time(self, start_time):
self._oprot.writeMessageBegin("set_start_time", TMessageType.CALL, self._seqid)
args = set_start_time_args()
args.start_time = start_time
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_set_start_time(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = set_start_time_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def set_end_time(self, end_time):
"""
Parameters:
- end_time
"""
self.send_set_end_time(end_time)
self.recv_set_end_time()
def send_set_end_time(self, end_time):
self._oprot.writeMessageBegin("set_end_time", TMessageType.CALL, self._seqid)
args = set_end_time_args()
args.end_time = end_time
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_set_end_time(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = set_end_time_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def get_attribute_names(self,):
self.send_get_attribute_names()
return self.recv_get_attribute_names()
def send_get_attribute_names(self,):
self._oprot.writeMessageBegin(
"get_attribute_names", TMessageType.CALL, self._seqid
)
args = get_attribute_names_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_attribute_names(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_attribute_names_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_attribute_names failed: unknown result",
)
def get_attribute_value(self, attribute_name):
"""
Parameters:
- attribute_name
"""
self.send_get_attribute_value(attribute_name)
return self.recv_get_attribute_value()
def send_get_attribute_value(self, attribute_name):
self._oprot.writeMessageBegin(
"get_attribute_value", TMessageType.CALL, self._seqid
)
args = get_attribute_value_args()
args.attribute_name = attribute_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_attribute_value(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_attribute_value_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.error is not None:
raise result.error
raise TApplicationException(
TApplicationException.MISSING_RESULT,
"get_attribute_value failed: unknown result",
)
def set_attribute_value(self, attribute_name, attribute_value):
"""
Parameters:
- attribute_name
- attribute_value
"""
self.send_set_attribute_value(attribute_name, attribute_value)
self.recv_set_attribute_value()
def send_set_attribute_value(self, attribute_name, attribute_value):
self._oprot.writeMessageBegin(
"set_attribute_value", TMessageType.CALL, self._seqid
)
args = set_attribute_value_args()
args.attribute_name = attribute_name
args.attribute_value = attribute_value
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_set_attribute_value(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = set_attribute_value_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def save_state(self, destination_directory):
"""
Parameters:
- destination_directory
"""
self.send_save_state(destination_directory)
self.recv_save_state()
def send_save_state(self, destination_directory):
self._oprot.writeMessageBegin("save_state", TMessageType.CALL, self._seqid)
args = save_state_args()
args.destination_directory = destination_directory
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_save_state(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = save_state_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def load_state(self, source_directory):
"""
Parameters:
- source_directory
"""
self.send_load_state(source_directory)
self.recv_load_state()
def send_load_state(self, source_directory):
self._oprot.writeMessageBegin("load_state", TMessageType.CALL, self._seqid)
args = load_state_args()
args.source_directory = source_directory
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_load_state(self,):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = load_state_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["initialize"] = Processor.process_initialize
self._processMap["update"] = Processor.process_update
self._processMap["update_until"] = Processor.process_update_until
self._processMap["update_frac"] = Processor.process_update_frac
self._processMap["finalize_model"] = Processor.process_finalize_model
self._processMap["get_component_name"] = Processor.process_get_component_name
self._processMap["get_input_var_names"] = Processor.process_get_input_var_names
self._processMap[
"get_output_var_names"
] = Processor.process_get_output_var_names
self._processMap["get_var_type"] = Processor.process_get_var_type
self._processMap["get_var_units"] = Processor.process_get_var_units
self._processMap["get_var_rank"] = Processor.process_get_var_rank
self._processMap["get_var_size"] = Processor.process_get_var_size
self._processMap["get_var_nbytes"] = Processor.process_get_var_nbytes
self._processMap["get_start_time"] = Processor.process_get_start_time
self._processMap["get_current_time"] = Processor.process_get_current_time
self._processMap["get_end_time"] = Processor.process_get_end_time
self._processMap["get_time_step"] = Processor.process_get_time_step
self._processMap["get_time_units"] = Processor.process_get_time_units
self._processMap["get_value"] = Processor.process_get_value
self._processMap[
"get_value_at_indices"
] = Processor.process_get_value_at_indices
self._processMap["set_value"] = Processor.process_set_value
self._processMap[
"set_value_at_indices"
] = Processor.process_set_value_at_indices
self._processMap["get_grid_type"] = Processor.process_get_grid_type
self._processMap["get_grid_shape"] = Processor.process_get_grid_shape
self._processMap["get_grid_spacing"] = Processor.process_get_grid_spacing
self._processMap["get_grid_origin"] = Processor.process_get_grid_origin
self._processMap["get_grid_x"] = Processor.process_get_grid_x
self._processMap["get_grid_y"] = Processor.process_get_grid_y
self._processMap["get_grid_z"] = Processor.process_get_grid_z
self._processMap[
"get_grid_connectivity"
] = Processor.process_get_grid_connectivity
self._processMap["get_grid_offset"] = Processor.process_get_grid_offset
self._processMap["initialize_config"] = Processor.process_initialize_config
self._processMap["initialize_model"] = Processor.process_initialize_model
self._processMap["set_start_time"] = Processor.process_set_start_time
self._processMap["set_end_time"] = Processor.process_set_end_time
self._processMap["get_attribute_names"] = Processor.process_get_attribute_names
self._processMap["get_attribute_value"] = Processor.process_get_attribute_value
self._processMap["set_attribute_value"] = Processor.process_set_attribute_value
self._processMap["save_state"] = Processor.process_save_state
self._processMap["load_state"] = Processor.process_load_state
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(
TApplicationException.UNKNOWN_METHOD, "Unknown function %s" % (name)
)
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_initialize(self, seqid, iprot, oprot):
args = initialize_args()
args.read(iprot)
iprot.readMessageEnd()
result = initialize_result()
try:
self._handler.initialize(args.file)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("initialize", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_update(self, seqid, iprot, oprot):
args = update_args()
args.read(iprot)
iprot.readMessageEnd()
result = update_result()
try:
self._handler.update()
except ModelException as error:
result.error = error
oprot.writeMessageBegin("update", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_update_until(self, seqid, iprot, oprot):
args = update_until_args()
args.read(iprot)
iprot.readMessageEnd()
result = update_until_result()
try:
self._handler.update_until(args.time)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("update_until", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_update_frac(self, seqid, iprot, oprot):
args = update_frac_args()
args.read(iprot)
iprot.readMessageEnd()
result = update_frac_result()
try:
self._handler.update_frac(args.frac)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("update_frac", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_finalize_model(self, seqid, iprot, oprot):
args = finalize_model_args()
args.read(iprot)
iprot.readMessageEnd()
result = finalize_model_result()
try:
self._handler.finalize_model()
except ModelException as error:
result.error = error
oprot.writeMessageBegin("finalize_model", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_component_name(self, seqid, iprot, oprot):
args = get_component_name_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_component_name_result()
try:
result.success = self._handler.get_component_name()
except ModelException as error:
result.error = error
oprot.writeMessageBegin("get_component_name", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_input_var_names(self, seqid, iprot, oprot):
args = get_input_var_names_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_input_var_names_result()
result.success = self._handler.get_input_var_names()
oprot.writeMessageBegin("get_input_var_names", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_output_var_names(self, seqid, iprot, oprot):
args = get_output_var_names_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_output_var_names_result()
result.success = self._handler.get_output_var_names()
oprot.writeMessageBegin("get_output_var_names", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_var_type(self, seqid, iprot, oprot):
args = get_var_type_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_var_type_result()
result.success = self._handler.get_var_type(args.long_var_name)
oprot.writeMessageBegin("get_var_type", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_var_units(self, seqid, iprot, oprot):
args = get_var_units_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_var_units_result()
result.success = self._handler.get_var_units(args.long_var_name)
oprot.writeMessageBegin("get_var_units", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_var_rank(self, seqid, iprot, oprot):
args = get_var_rank_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_var_rank_result()
result.success = self._handler.get_var_rank(args.long_var_name)
oprot.writeMessageBegin("get_var_rank", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_var_size(self, seqid, iprot, oprot):
args = get_var_size_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_var_size_result()
result.success = self._handler.get_var_size(args.long_var_name)
oprot.writeMessageBegin("get_var_size", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_var_nbytes(self, seqid, iprot, oprot):
args = get_var_nbytes_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_var_nbytes_result()
result.success = self._handler.get_var_nbytes(args.long_var_name)
oprot.writeMessageBegin("get_var_nbytes", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_start_time(self, seqid, iprot, oprot):
args = get_start_time_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_start_time_result()
result.success = self._handler.get_start_time()
oprot.writeMessageBegin("get_start_time", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_current_time(self, seqid, iprot, oprot):
args = get_current_time_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_current_time_result()
result.success = self._handler.get_current_time()
oprot.writeMessageBegin("get_current_time", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_end_time(self, seqid, iprot, oprot):
args = get_end_time_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_end_time_result()
result.success = self._handler.get_end_time()
oprot.writeMessageBegin("get_end_time", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_time_step(self, seqid, iprot, oprot):
args = get_time_step_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_time_step_result()
result.success = self._handler.get_time_step()
oprot.writeMessageBegin("get_time_step", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_time_units(self, seqid, iprot, oprot):
args = get_time_units_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_time_units_result()
result.success = self._handler.get_time_units()
oprot.writeMessageBegin("get_time_units", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_value(self, seqid, iprot, oprot):
args = get_value_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_value_result()
try:
result.success = self._handler.get_value(args.long_var_name)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("get_value", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_value_at_indices(self, seqid, iprot, oprot):
args = get_value_at_indices_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_value_at_indices_result()
try:
result.success = self._handler.get_value_at_indices(
args.long_var_name, args.inds
)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("get_value_at_indices", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_set_value(self, seqid, iprot, oprot):
args = set_value_args()
args.read(iprot)
iprot.readMessageEnd()
result = set_value_result()
try:
self._handler.set_value(args.long_var_name, args.src)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("set_value", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_set_value_at_indices(self, seqid, iprot, oprot):
args = set_value_at_indices_args()
args.read(iprot)
iprot.readMessageEnd()
result = set_value_at_indices_result()
try:
self._handler.set_value_at_indices(args.long_var_name, args.inds, args.src)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("set_value_at_indices", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_grid_type(self, seqid, iprot, oprot):
args = get_grid_type_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_grid_type_result()
try:
result.success = self._handler.get_grid_type(args.long_var_name)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("get_grid_type", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_grid_shape(self, seqid, iprot, oprot):
args = get_grid_shape_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_grid_shape_result()
result.success = self._handler.get_grid_shape(args.long_var_name)
oprot.writeMessageBegin("get_grid_shape", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_grid_spacing(self, seqid, iprot, oprot):
args = get_grid_spacing_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_grid_spacing_result()
result.success = self._handler.get_grid_spacing(args.long_var_name)
oprot.writeMessageBegin("get_grid_spacing", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_grid_origin(self, seqid, iprot, oprot):
args = get_grid_origin_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_grid_origin_result()
result.success = self._handler.get_grid_origin(args.long_var_name)
oprot.writeMessageBegin("get_grid_origin", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_grid_x(self, seqid, iprot, oprot):
args = get_grid_x_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_grid_x_result()
result.success = self._handler.get_grid_x(args.long_var_name)
oprot.writeMessageBegin("get_grid_x", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_grid_y(self, seqid, iprot, oprot):
args = get_grid_y_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_grid_y_result()
result.success = self._handler.get_grid_y(args.long_var_name)
oprot.writeMessageBegin("get_grid_y", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_grid_z(self, seqid, iprot, oprot):
args = get_grid_z_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_grid_z_result()
result.success = self._handler.get_grid_z(args.long_var_name)
oprot.writeMessageBegin("get_grid_z", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_grid_connectivity(self, seqid, iprot, oprot):
args = get_grid_connectivity_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_grid_connectivity_result()
result.success = self._handler.get_grid_connectivity(args.long_var_name)
oprot.writeMessageBegin("get_grid_connectivity", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_grid_offset(self, seqid, iprot, oprot):
args = get_grid_offset_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_grid_offset_result()
result.success = self._handler.get_grid_offset(args.long_var_name)
oprot.writeMessageBegin("get_grid_offset", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_initialize_config(self, seqid, iprot, oprot):
args = initialize_config_args()
args.read(iprot)
iprot.readMessageEnd()
result = initialize_config_result()
try:
self._handler.initialize_config(args.file)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("initialize_config", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_initialize_model(self, seqid, iprot, oprot):
args = initialize_model_args()
args.read(iprot)
iprot.readMessageEnd()
result = initialize_model_result()
try:
self._handler.initialize_model()
except ModelException as error:
result.error = error
oprot.writeMessageBegin("initialize_model", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_set_start_time(self, seqid, iprot, oprot):
args = set_start_time_args()
args.read(iprot)
iprot.readMessageEnd()
result = set_start_time_result()
try:
self._handler.set_start_time(args.start_time)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("set_start_time", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_set_end_time(self, seqid, iprot, oprot):
args = set_end_time_args()
args.read(iprot)
iprot.readMessageEnd()
result = set_end_time_result()
try:
self._handler.set_end_time(args.end_time)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("set_end_time", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_attribute_names(self, seqid, iprot, oprot):
args = get_attribute_names_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_attribute_names_result()
result.success = self._handler.get_attribute_names()
oprot.writeMessageBegin("get_attribute_names", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_attribute_value(self, seqid, iprot, oprot):
args = get_attribute_value_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_attribute_value_result()
try:
result.success = self._handler.get_attribute_value(args.attribute_name)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("get_attribute_value", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_set_attribute_value(self, seqid, iprot, oprot):
args = set_attribute_value_args()
args.read(iprot)
iprot.readMessageEnd()
result = set_attribute_value_result()
try:
self._handler.set_attribute_value(args.attribute_name, args.attribute_value)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("set_attribute_value", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_save_state(self, seqid, iprot, oprot):
args = save_state_args()
args.read(iprot)
iprot.readMessageEnd()
result = save_state_result()
try:
self._handler.save_state(args.destination_directory)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("save_state", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_load_state(self, seqid, iprot, oprot):
args = load_state_args()
args.read(iprot)
iprot.readMessageEnd()
result = load_state_result()
try:
self._handler.load_state(args.source_directory)
except ModelException as error:
result.error = error
oprot.writeMessageBegin("load_state", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class initialize_args(object):
"""
Attributes:
- file
"""
thrift_spec = (None, (1, TType.STRING, "file", None, None)) # 0 # 1
def __init__(self, file=None):
self.file = file
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.file = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("initialize_args")
if self.file is not None:
oprot.writeFieldBegin("file", TType.STRING, 1)
oprot.writeString(self.file)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class initialize_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("initialize_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class update_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("update_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class update_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("update_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class update_until_args(object):
"""
Attributes:
- time
"""
thrift_spec = (None, (1, TType.DOUBLE, "time", None, None)) # 0 # 1
def __init__(self, time=None):
self.time = time
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.time = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("update_until_args")
if self.time is not None:
oprot.writeFieldBegin("time", TType.DOUBLE, 1)
oprot.writeDouble(self.time)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class update_until_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("update_until_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class update_frac_args(object):
"""
Attributes:
- frac
"""
thrift_spec = (None, (1, TType.DOUBLE, "frac", None, None)) # 0 # 1
def __init__(self, frac=None):
self.frac = frac
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.frac = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("update_frac_args")
if self.frac is not None:
oprot.writeFieldBegin("frac", TType.DOUBLE, 1)
oprot.writeDouble(self.frac)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class update_frac_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("update_frac_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class finalize_model_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("finalize_model_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class finalize_model_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("finalize_model_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_component_name_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_component_name_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_component_name_result(object):
"""
Attributes:
- success
- error
"""
thrift_spec = (
(0, TType.STRING, "success", None, None), # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, success=None, error=None):
self.success = success
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_component_name_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_input_var_names_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_input_var_names_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_input_var_names_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.LIST, "success", (TType.STRING, None), None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in range(_size0):
_elem5 = iprot.readString()
self.success.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_input_var_names_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter6 in self.success:
oprot.writeString(iter6)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_output_var_names_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_output_var_names_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_output_var_names_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.LIST, "success", (TType.STRING, None), None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype10, _size7) = iprot.readListBegin()
for _i11 in range(_size7):
_elem12 = iprot.readString()
self.success.append(_elem12)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_output_var_names_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter13 in self.success:
oprot.writeString(iter13)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_var_type_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_var_type_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_var_type_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.STRING, "success", None, None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_var_type_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_var_units_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_var_units_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_var_units_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.STRING, "success", None, None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_var_units_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_var_rank_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_var_rank_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_var_rank_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.I32, "success", None, None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_var_rank_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_var_size_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_var_size_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_var_size_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.I32, "success", None, None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_var_size_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_var_nbytes_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_var_nbytes_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_var_nbytes_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.I32, "success", None, None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_var_nbytes_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_start_time_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_start_time_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_start_time_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.DOUBLE, "success", None, None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.DOUBLE:
self.success = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_start_time_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.DOUBLE, 0)
oprot.writeDouble(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_current_time_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_current_time_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_current_time_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.DOUBLE, "success", None, None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.DOUBLE:
self.success = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_current_time_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.DOUBLE, 0)
oprot.writeDouble(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_end_time_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_end_time_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_end_time_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.DOUBLE, "success", None, None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.DOUBLE:
self.success = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_end_time_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.DOUBLE, 0)
oprot.writeDouble(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_time_step_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_time_step_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_time_step_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.DOUBLE, "success", None, None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.DOUBLE:
self.success = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_time_step_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.DOUBLE, 0)
oprot.writeDouble(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_time_units_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_time_units_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_time_units_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.STRING, "success", None, None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_time_units_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_value_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_value_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_value_result(object):
"""
Attributes:
- success
- error
"""
thrift_spec = (
(0, TType.STRING, "success", None, None), # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, success=None, error=None):
self.success = success
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_value_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_value_at_indices_args(object):
"""
Attributes:
- long_var_name
- inds
"""
thrift_spec = (
None, # 0
(1, TType.STRING, "long_var_name", None, None), # 1
(2, TType.LIST, "inds", (TType.I32, None), None), # 2
)
def __init__(self, long_var_name=None, inds=None):
self.long_var_name = long_var_name
self.inds = inds
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.inds = []
(_etype17, _size14) = iprot.readListBegin()
for _i18 in range(_size14):
_elem19 = iprot.readI32()
self.inds.append(_elem19)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_value_at_indices_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
if self.inds is not None:
oprot.writeFieldBegin("inds", TType.LIST, 2)
oprot.writeListBegin(TType.I32, len(self.inds))
for iter20 in self.inds:
oprot.writeI32(iter20)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_value_at_indices_result(object):
"""
Attributes:
- success
- error
"""
thrift_spec = (
(0, TType.STRING, "success", None, None), # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, success=None, error=None):
self.success = success
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_value_at_indices_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_value_args(object):
"""
Attributes:
- long_var_name
- src
"""
thrift_spec = (
None, # 0
(1, TType.STRING, "long_var_name", None, None), # 1
(2, TType.STRING, "src", None, None), # 2
)
def __init__(self, long_var_name=None, src=None):
self.long_var_name = long_var_name
self.src = src
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.src = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("set_value_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
if self.src is not None:
oprot.writeFieldBegin("src", TType.STRING, 2)
oprot.writeString(self.src)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_value_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("set_value_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_value_at_indices_args(object):
"""
Attributes:
- long_var_name
- inds
- src
"""
thrift_spec = (
None, # 0
(1, TType.STRING, "long_var_name", None, None), # 1
(2, TType.LIST, "inds", (TType.I32, None), None), # 2
(3, TType.STRING, "src", None, None), # 3
)
def __init__(self, long_var_name=None, inds=None, src=None):
self.long_var_name = long_var_name
self.inds = inds
self.src = src
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.inds = []
(_etype24, _size21) = iprot.readListBegin()
for _i25 in range(_size21):
_elem26 = iprot.readI32()
self.inds.append(_elem26)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.src = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("set_value_at_indices_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
if self.inds is not None:
oprot.writeFieldBegin("inds", TType.LIST, 2)
oprot.writeListBegin(TType.I32, len(self.inds))
for iter27 in self.inds:
oprot.writeI32(iter27)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.src is not None:
oprot.writeFieldBegin("src", TType.STRING, 3)
oprot.writeString(self.src)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_value_at_indices_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("set_value_at_indices_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_type_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_type_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_type_result(object):
"""
Attributes:
- success
- error
"""
thrift_spec = (
(0, TType.I32, "success", None, None), # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, success=None, error=None):
self.success = success
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_type_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_shape_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_shape_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_shape_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.LIST, "success", (TType.I32, None), None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype31, _size28) = iprot.readListBegin()
for _i32 in range(_size28):
_elem33 = iprot.readI32()
self.success.append(_elem33)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_shape_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.LIST, 0)
oprot.writeListBegin(TType.I32, len(self.success))
for iter34 in self.success:
oprot.writeI32(iter34)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_spacing_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_spacing_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_spacing_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.LIST, "success", (TType.DOUBLE, None), None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype38, _size35) = iprot.readListBegin()
for _i39 in range(_size35):
_elem40 = iprot.readDouble()
self.success.append(_elem40)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_spacing_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.LIST, 0)
oprot.writeListBegin(TType.DOUBLE, len(self.success))
for iter41 in self.success:
oprot.writeDouble(iter41)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_origin_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_origin_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_origin_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.LIST, "success", (TType.DOUBLE, None), None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype45, _size42) = iprot.readListBegin()
for _i46 in range(_size42):
_elem47 = iprot.readDouble()
self.success.append(_elem47)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_origin_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.LIST, 0)
oprot.writeListBegin(TType.DOUBLE, len(self.success))
for iter48 in self.success:
oprot.writeDouble(iter48)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_x_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_x_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_x_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.LIST, "success", (TType.DOUBLE, None), None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype52, _size49) = iprot.readListBegin()
for _i53 in range(_size49):
_elem54 = iprot.readDouble()
self.success.append(_elem54)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_x_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.LIST, 0)
oprot.writeListBegin(TType.DOUBLE, len(self.success))
for iter55 in self.success:
oprot.writeDouble(iter55)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_y_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_y_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_y_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.LIST, "success", (TType.DOUBLE, None), None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype59, _size56) = iprot.readListBegin()
for _i60 in range(_size56):
_elem61 = iprot.readDouble()
self.success.append(_elem61)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_y_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.LIST, 0)
oprot.writeListBegin(TType.DOUBLE, len(self.success))
for iter62 in self.success:
oprot.writeDouble(iter62)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_z_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_z_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_z_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.LIST, "success", (TType.DOUBLE, None), None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype66, _size63) = iprot.readListBegin()
for _i67 in range(_size63):
_elem68 = iprot.readDouble()
self.success.append(_elem68)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_z_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.LIST, 0)
oprot.writeListBegin(TType.DOUBLE, len(self.success))
for iter69 in self.success:
oprot.writeDouble(iter69)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_connectivity_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_connectivity_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_connectivity_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.LIST, "success", (TType.I32, None), None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype73, _size70) = iprot.readListBegin()
for _i74 in range(_size70):
_elem75 = iprot.readI32()
self.success.append(_elem75)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_connectivity_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.LIST, 0)
oprot.writeListBegin(TType.I32, len(self.success))
for iter76 in self.success:
oprot.writeI32(iter76)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_offset_args(object):
"""
Attributes:
- long_var_name
"""
thrift_spec = (None, (1, TType.STRING, "long_var_name", None, None)) # 0 # 1
def __init__(self, long_var_name=None):
self.long_var_name = long_var_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.long_var_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_offset_args")
if self.long_var_name is not None:
oprot.writeFieldBegin("long_var_name", TType.STRING, 1)
oprot.writeString(self.long_var_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_grid_offset_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.LIST, "success", (TType.I32, None), None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype80, _size77) = iprot.readListBegin()
for _i81 in range(_size77):
_elem82 = iprot.readI32()
self.success.append(_elem82)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_grid_offset_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.LIST, 0)
oprot.writeListBegin(TType.I32, len(self.success))
for iter83 in self.success:
oprot.writeI32(iter83)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class initialize_config_args(object):
"""
Attributes:
- file
"""
thrift_spec = (None, (1, TType.STRING, "file", None, None)) # 0 # 1
def __init__(self, file=None):
self.file = file
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.file = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("initialize_config_args")
if self.file is not None:
oprot.writeFieldBegin("file", TType.STRING, 1)
oprot.writeString(self.file)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class initialize_config_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("initialize_config_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class initialize_model_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("initialize_model_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class initialize_model_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("initialize_model_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_start_time_args(object):
"""
Attributes:
- start_time
"""
thrift_spec = (None, (1, TType.DOUBLE, "start_time", None, None)) # 0 # 1
def __init__(self, start_time=None):
self.start_time = start_time
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.start_time = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("set_start_time_args")
if self.start_time is not None:
oprot.writeFieldBegin("start_time", TType.DOUBLE, 1)
oprot.writeDouble(self.start_time)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_start_time_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("set_start_time_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_end_time_args(object):
"""
Attributes:
- end_time
"""
thrift_spec = (None, (1, TType.DOUBLE, "end_time", None, None)) # 0 # 1
def __init__(self, end_time=None):
self.end_time = end_time
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.end_time = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("set_end_time_args")
if self.end_time is not None:
oprot.writeFieldBegin("end_time", TType.DOUBLE, 1)
oprot.writeDouble(self.end_time)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_end_time_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("set_end_time_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_attribute_names_args(object):
thrift_spec = ()
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_attribute_names_args")
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_attribute_names_result(object):
"""
Attributes:
- success
"""
thrift_spec = ((0, TType.LIST, "success", (TType.STRING, None), None),) # 0
def __init__(self, success=None):
self.success = success
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype87, _size84) = iprot.readListBegin()
for _i88 in range(_size84):
_elem89 = iprot.readString()
self.success.append(_elem89)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_attribute_names_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter90 in self.success:
oprot.writeString(iter90)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_attribute_value_args(object):
"""
Attributes:
- attribute_name
"""
thrift_spec = (None, (1, TType.STRING, "attribute_name", None, None)) # 0 # 1
def __init__(self, attribute_name=None):
self.attribute_name = attribute_name
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.attribute_name = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_attribute_value_args")
if self.attribute_name is not None:
oprot.writeFieldBegin("attribute_name", TType.STRING, 1)
oprot.writeString(self.attribute_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_attribute_value_result(object):
"""
Attributes:
- success
- error
"""
thrift_spec = (
(0, TType.STRING, "success", None, None), # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, success=None, error=None):
self.success = success
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("get_attribute_value_result")
if self.success is not None:
oprot.writeFieldBegin("success", TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_attribute_value_args(object):
"""
Attributes:
- attribute_name
- attribute_value
"""
thrift_spec = (
None, # 0
(1, TType.STRING, "attribute_name", None, None), # 1
(2, TType.STRING, "attribute_value", None, None), # 2
)
def __init__(self, attribute_name=None, attribute_value=None):
self.attribute_name = attribute_name
self.attribute_value = attribute_value
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.attribute_name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.attribute_value = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("set_attribute_value_args")
if self.attribute_name is not None:
oprot.writeFieldBegin("attribute_name", TType.STRING, 1)
oprot.writeString(self.attribute_name)
oprot.writeFieldEnd()
if self.attribute_value is not None:
oprot.writeFieldBegin("attribute_value", TType.STRING, 2)
oprot.writeString(self.attribute_value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_attribute_value_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("set_attribute_value_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class save_state_args(object):
"""
Attributes:
- destination_directory
"""
thrift_spec = (
None, # 0
(1, TType.STRING, "destination_directory", None, None), # 1
)
def __init__(self, destination_directory=None):
self.destination_directory = destination_directory
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.destination_directory = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("save_state_args")
if self.destination_directory is not None:
oprot.writeFieldBegin("destination_directory", TType.STRING, 1)
oprot.writeString(self.destination_directory)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class save_state_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("save_state_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class load_state_args(object):
"""
Attributes:
- source_directory
"""
thrift_spec = (None, (1, TType.STRING, "source_directory", None, None)) # 0 # 1
def __init__(self, source_directory=None):
self.source_directory = source_directory
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.source_directory = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("load_state_args")
if self.source_directory is not None:
oprot.writeFieldBegin("source_directory", TType.STRING, 1)
oprot.writeString(self.source_directory)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class load_state_result(object):
"""
Attributes:
- error
"""
thrift_spec = (
None, # 0
(
1,
TType.STRUCT,
"error",
(ModelException, ModelException.thrift_spec),
None,
), # 1
)
def __init__(self, error=None):
self.error = error
def read(self, iprot):
if (
iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and isinstance(iprot.trans, TTransport.CReadableTransport)
and self.thrift_spec is not None
and fastbinary is not None
):
fastbinary.decode_binary(
self, iprot.trans, (self.__class__, self.thrift_spec)
)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = ModelException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (
oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated
and self.thrift_spec is not None
and fastbinary is not None
):
oprot.trans.write(
fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))
)
return
oprot.writeStructBegin("load_state_result")
if self.error is not None:
oprot.writeFieldBegin("error", TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ["%s=%r" % (key, value) for key, value in self.__dict__.items()]
return "%s(%s)" % (self.__class__.__name__, ", ".join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 31.751407
| 88
| 0.573396
| 25,386
| 248,169
| 5.273694
| 0.010321
| 0.016582
| 0.029848
| 0.058561
| 0.956557
| 0.93084
| 0.905757
| 0.875439
| 0.859013
| 0.83975
| 0
| 0.003822
| 0.329469
| 248,169
| 7,815
| 89
| 31.75547
| 0.800709
| 0.015639
| 0
| 0.84586
| 1
| 0
| 0.027235
| 0.003849
| 0
| 0
| 0
| 0
| 0
| 1
| 0.119339
| false
| 0.001907
| 0.000953
| 0.038138
| 0.233275
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ce1e06785177b9fec3afeac2e713b18012fa7158
| 14,661
|
py
|
Python
|
tests/digraph/test_isomorphic.py
|
enavarro51/retworkx
|
71e34d111623d1de2e4870a8227eddacfb3ade4c
|
[
"Apache-2.0"
] | null | null | null |
tests/digraph/test_isomorphic.py
|
enavarro51/retworkx
|
71e34d111623d1de2e4870a8227eddacfb3ade4c
|
[
"Apache-2.0"
] | null | null | null |
tests/digraph/test_isomorphic.py
|
enavarro51/retworkx
|
71e34d111623d1de2e4870a8227eddacfb3ade4c
|
[
"Apache-2.0"
] | 1
|
2022-03-24T05:00:30.000Z
|
2022-03-24T05:00:30.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import unittest
import retworkx
class TestIsomorphic(unittest.TestCase):
def test_empty_isomorphic(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(retworkx.is_isomorphic(dag_a, dag_b, id_order=id_order))
def test_empty_isomorphic_compare_nodes(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_isomorphic_identical(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("a_1")
dag_b.add_child(node_b, "a_2", "a_1")
dag_b.add_child(node_b, "a_3", "a_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(retworkx.is_isomorphic(dag_a, dag_b, id_order=id_order))
def test_isomorphic_mismatch_node_data(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("b_1")
dag_b.add_child(node_b, "b_2", "b_1")
dag_b.add_child(node_b, "b_3", "b_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(retworkx.is_isomorphic(dag_a, dag_b, id_order=id_order))
def test_isomorphic_compare_nodes_mismatch_node_data(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("b_1")
dag_b.add_child(node_b, "b_2", "b_1")
dag_b.add_child(node_b, "b_3", "b_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertFalse(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_is_isomorphic_nodes_compare_raises(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("b_1")
dag_b.add_child(node_b, "b_2", "b_1")
dag_b.add_child(node_b, "b_3", "b_2")
def compare_nodes(a, b):
raise TypeError("Failure")
self.assertRaises(TypeError, retworkx.is_isomorphic, (dag_a, dag_b, compare_nodes))
def test_isomorphic_compare_nodes_identical(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("a_1")
dag_b.add_child(node_b, "a_2", "a_1")
dag_b.add_child(node_b, "a_3", "a_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_isomorphic_compare_edges_identical(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("a_1")
dag_b.add_child(node_b, "a_2", "a_1")
dag_b.add_child(node_b, "a_3", "a_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(
dag_a,
dag_b,
edge_matcher=lambda x, y: x == y,
id_order=id_order,
)
)
def test_isomorphic_compare_nodes_with_removals(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
qr_0_in = dag_a.add_node("qr[0]")
qr_1_in = dag_a.add_node("qr[1]")
cr_0_in = dag_a.add_node("cr[0]")
qr_0_out = dag_a.add_node("qr[0]")
qr_1_out = dag_a.add_node("qr[1]")
cr_0_out = dag_a.add_node("qr[0]")
cu1 = dag_a.add_child(qr_0_in, "cu1", "qr[0]")
dag_a.add_edge(qr_1_in, cu1, "qr[1]")
measure_0 = dag_a.add_child(cr_0_in, "measure", "cr[0]")
dag_a.add_edge(cu1, measure_0, "qr[0]")
measure_1 = dag_a.add_child(cu1, "measure", "qr[1]")
dag_a.add_edge(measure_0, measure_1, "cr[0]")
dag_a.add_edge(measure_1, qr_1_out, "qr[1]")
dag_a.add_edge(measure_1, cr_0_out, "cr[0]")
dag_a.add_edge(measure_0, qr_0_out, "qr[0]")
dag_a.remove_node(cu1)
dag_a.add_edge(qr_0_in, measure_0, "qr[0]")
dag_a.add_edge(qr_1_in, measure_1, "qr[1]")
qr_0_in = dag_b.add_node("qr[0]")
qr_1_in = dag_b.add_node("qr[1]")
cr_0_in = dag_b.add_node("cr[0]")
qr_0_out = dag_b.add_node("qr[0]")
qr_1_out = dag_b.add_node("qr[1]")
cr_0_out = dag_b.add_node("qr[0]")
measure_0 = dag_b.add_child(cr_0_in, "measure", "cr[0]")
dag_b.add_edge(qr_0_in, measure_0, "qr[0]")
measure_1 = dag_b.add_child(qr_1_in, "measure", "qr[1]")
dag_b.add_edge(measure_1, qr_1_out, "qr[1]")
dag_b.add_edge(measure_1, cr_0_out, "cr[0]")
dag_b.add_edge(measure_0, measure_1, "cr[0]")
dag_b.add_edge(measure_0, qr_0_out, "qr[0]")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_isomorphic_compare_nodes_with_removals_deepcopy(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
qr_0_in = dag_a.add_node("qr[0]")
qr_1_in = dag_a.add_node("qr[1]")
cr_0_in = dag_a.add_node("cr[0]")
qr_0_out = dag_a.add_node("qr[0]")
qr_1_out = dag_a.add_node("qr[1]")
cr_0_out = dag_a.add_node("qr[0]")
cu1 = dag_a.add_child(qr_0_in, "cu1", "qr[0]")
dag_a.add_edge(qr_1_in, cu1, "qr[1]")
measure_0 = dag_a.add_child(cr_0_in, "measure", "cr[0]")
dag_a.add_edge(cu1, measure_0, "qr[0]")
measure_1 = dag_a.add_child(cu1, "measure", "qr[1]")
dag_a.add_edge(measure_0, measure_1, "cr[0]")
dag_a.add_edge(measure_1, qr_1_out, "qr[1]")
dag_a.add_edge(measure_1, cr_0_out, "cr[0]")
dag_a.add_edge(measure_0, qr_0_out, "qr[0]")
dag_a.remove_node(cu1)
dag_a.add_edge(qr_0_in, measure_0, "qr[0]")
dag_a.add_edge(qr_1_in, measure_1, "qr[1]")
qr_0_in = dag_b.add_node("qr[0]")
qr_1_in = dag_b.add_node("qr[1]")
cr_0_in = dag_b.add_node("cr[0]")
qr_0_out = dag_b.add_node("qr[0]")
qr_1_out = dag_b.add_node("qr[1]")
cr_0_out = dag_b.add_node("qr[0]")
measure_0 = dag_b.add_child(cr_0_in, "measure", "cr[0]")
dag_b.add_edge(qr_0_in, measure_0, "qr[0]")
measure_1 = dag_b.add_child(qr_1_in, "measure", "qr[1]")
dag_b.add_edge(measure_1, qr_1_out, "qr[1]")
dag_b.add_edge(measure_1, cr_0_out, "cr[0]")
dag_b.add_edge(measure_0, measure_1, "cr[0]")
dag_b.add_edge(measure_0, qr_0_out, "qr[0]")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(
copy.deepcopy(dag_a),
copy.deepcopy(dag_b),
lambda x, y: x == y,
id_order=id_order,
)
)
def test_digraph_isomorphic_parallel_edges_with_edge_matcher(self):
graph = retworkx.PyDiGraph()
graph.extend_from_weighted_edge_list([(0, 1, "a"), (0, 1, "b"), (1, 2, "c")])
self.assertTrue(retworkx.is_isomorphic(graph, graph, edge_matcher=lambda x, y: x == y))
def test_digraph_isomorphic_self_loop(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0])
graph.add_edges_from([(0, 0, "a")])
self.assertTrue(retworkx.is_isomorphic(graph, graph))
def test_digraph_non_isomorphic_edge_mismatch_self_loop(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0])
graph.add_edges_from([(0, 0, "a")])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0])
second_graph.add_edges_from([(0, 0, "b")])
self.assertFalse(
retworkx.is_isomorphic(graph, second_graph, edge_matcher=lambda x, y: x == y)
)
def test_digraph_non_isomorphic_rule_out_incoming(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0, 1, 2, 3])
graph.add_edges_from_no_data([(0, 1), (0, 2), (2, 1)])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0, 1, 2, 3])
second_graph.add_edges_from_no_data([(0, 1), (0, 2), (3, 1)])
self.assertFalse(retworkx.is_isomorphic(graph, second_graph, id_order=True))
def test_digraph_non_isomorphic_rule_ins_outgoing(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0, 1, 2, 3])
graph.add_edges_from_no_data([(1, 0), (2, 0), (1, 2)])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0, 1, 2, 3])
second_graph.add_edges_from_no_data([(1, 0), (2, 0), (1, 3)])
self.assertFalse(retworkx.is_isomorphic(graph, second_graph, id_order=True))
def test_digraph_non_isomorphic_rule_ins_incoming(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0, 1, 2, 3])
graph.add_edges_from_no_data([(1, 0), (2, 0), (2, 1)])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0, 1, 2, 3])
second_graph.add_edges_from_no_data([(1, 0), (2, 0), (3, 1)])
self.assertFalse(retworkx.is_isomorphic(graph, second_graph, id_order=True))
def test_isomorphic_parallel_edges(self):
first = retworkx.PyDiGraph()
first.extend_from_edge_list([(0, 1), (0, 1), (1, 2), (2, 3)])
second = retworkx.PyDiGraph()
second.extend_from_edge_list([(0, 1), (1, 2), (1, 2), (2, 3)])
self.assertFalse(retworkx.is_isomorphic(first, second))
def test_digraph_isomorphic_insufficient_call_limit(self):
graph = retworkx.generators.directed_path_graph(5)
self.assertFalse(retworkx.is_isomorphic(graph, graph, call_limit=2))
def test_digraph_vf2_mapping_identical(self):
graph = retworkx.generators.directed_grid_graph(2, 2)
second_graph = retworkx.generators.directed_grid_graph(2, 2)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph)
self.assertEqual(next(mapping), {0: 0, 1: 1, 2: 2, 3: 3})
def test_digraph_vf2_mapping_identical_removals(self):
graph = retworkx.generators.directed_path_graph(2)
second_graph = retworkx.generators.directed_path_graph(4)
second_graph.remove_nodes_from([1, 2])
second_graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph)
self.assertEqual({0: 0, 1: 3}, next(mapping))
def test_digraph_vf2_mapping_identical_removals_first(self):
second_graph = retworkx.generators.directed_path_graph(2)
graph = retworkx.generators.directed_path_graph(4)
graph.remove_nodes_from([1, 2])
graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph)
self.assertEqual({0: 0, 3: 1}, next(mapping))
def test_digraph_vf2_mapping_identical_vf2pp(self):
graph = retworkx.generators.directed_grid_graph(2, 2)
second_graph = retworkx.generators.directed_grid_graph(2, 2)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph, id_order=False)
self.assertEqual(next(mapping), {0: 0, 1: 1, 2: 2, 3: 3})
def test_digraph_vf2_mapping_identical_removals_vf2pp(self):
graph = retworkx.generators.directed_path_graph(2)
second_graph = retworkx.generators.directed_path_graph(4)
second_graph.remove_nodes_from([1, 2])
second_graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph, id_order=False)
self.assertEqual({0: 0, 1: 3}, next(mapping))
def test_digraph_vf2_mapping_identical_removals_first_vf2pp(self):
second_graph = retworkx.generators.directed_path_graph(2)
graph = retworkx.generators.directed_path_graph(4)
graph.remove_nodes_from([1, 2])
graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph, id_order=False)
self.assertEqual({0: 0, 3: 1}, next(mapping))
def test_digraph_vf2_number_of_valid_mappings(self):
graph = retworkx.generators.directed_mesh_graph(3)
mapping = retworkx.digraph_vf2_mapping(graph, graph, id_order=True)
total = 0
for _ in mapping:
total += 1
self.assertEqual(total, 6)
def test_empty_digraph_vf2_mapping(self):
g_a = retworkx.PyDiGraph()
g_b = retworkx.PyDiGraph()
for id_order in [False, True]:
with self.subTest(id_order=id_order):
mapping = retworkx.digraph_vf2_mapping(g_a, g_b, id_order=id_order, subgraph=False)
self.assertEqual({}, next(mapping))
| 41.769231
| 99
| 0.621035
| 2,290
| 14,661
| 3.612227
| 0.065502
| 0.035783
| 0.044004
| 0.033849
| 0.863032
| 0.836194
| 0.807302
| 0.787234
| 0.779739
| 0.778288
| 0
| 0.036664
| 0.248414
| 14,661
| 350
| 100
| 41.888571
| 0.714039
| 0.035536
| 0
| 0.732639
| 0
| 0
| 0.039284
| 0
| 0
| 0
| 0
| 0
| 0.090278
| 1
| 0.09375
| false
| 0
| 0.010417
| 0
| 0.107639
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce2c2deed125fd5f34b84566cb994a3c58c087ed
| 6,482
|
py
|
Python
|
LOSO_Split.py
|
wzhlearning/fNIRS-Transformer
|
12ebc37384ef3ebd1d23d1bb1de8818af2f75399
|
[
"MIT"
] | null | null | null |
LOSO_Split.py
|
wzhlearning/fNIRS-Transformer
|
12ebc37384ef3ebd1d23d1bb1de8818af2f75399
|
[
"MIT"
] | 1
|
2022-02-08T09:49:27.000Z
|
2022-02-08T13:20:22.000Z
|
LOSO_Split.py
|
wzhlearning/fNIRS-Transformer
|
12ebc37384ef3ebd1d23d1bb1de8818af2f75399
|
[
"MIT"
] | null | null | null |
import numpy as np
def Split_Dataset_A(sub, feature, label, channels):
"""
LOSO-CV for Dataset A
Args:
sub: leave one subject out.
feature: input fNIRS signals.
label: input fNIRS labels.
channels: fNIRS channels.
Returns:
X_train: training set.
y_train: labels for training set.
X_test: test set.
y_test: labels for test set.
"""
if sub == 1:
X_test = feature[: 36]
y_test = label[: 36]
X_train = feature[36:]
y_train = label[36:]
elif sub == 8:
X_test = feature[300:]
y_test = label[300:]
X_train = feature[:300]
y_train = label[: 300]
else:
start, end = 0, 0
if sub in [2, 3]:
start = 36 * (sub - 1)
end = 36 * sub
elif sub in [4, 5, 6, 7]:
start = 108 + 48 * (sub - 4)
end = 108 + 48 * (sub - 3)
X_test = feature[start: end]
y_test = label[start: end]
feature_set_1 = feature[: start]
label_set_1 = label[:start]
feature_set_2 = feature[end:]
label_set_2 = label[end:]
X_train = np.append(feature_set_1, feature_set_2, axis=0)
y_train = np.append(label_set_1, label_set_2, axis=0)
X_train = X_train.reshape((X_train.shape[0], 2, channels, -1))
X_test = X_test.reshape((X_test.shape[0], 2, channels, -1))
return X_train, y_train, X_test, y_test
def Split_Dataset_A_Res(sub, feature, label, channels):
"""
Split one subject's data to evaluate the results of LOSO-CV on Dataset A.
Args:
sub: leave one subject out.
feature: input fNIRS signals.
label: input fNIRS labels.
channels: fNIRS channels.
Returns:
X_test: test set.
y_test: labels for test set.
"""
if sub == 1:
X_test = feature[: 36]
y_test = label[: 36]
elif sub == 8:
X_test = feature[300:]
y_test = label[300:]
else:
start, end = 0, 0
if sub in [2, 3]:
start = 36 * (sub - 1)
end = 36 * sub
elif sub in [4, 5, 6, 7]:
start = 108 + 48 * (sub - 4)
end = 108 + 48 * (sub - 3)
X_test = feature[start: end]
y_test = label[start: end]
X_test = X_test.reshape((X_test.shape[0], 2, channels, -1))
return X_test, y_test
def Split_Dataset_B(sub, feature, label, channels):
"""
LOSO-CV for Dataset B
Args:
sub: leave one subject out.
feature: input fNIRS signals.
label: input fNIRS labels.
channels: fNIRS channels.
Returns:
X_train: training set.
y_train: labels for training set.
X_test: test set.
y_test: labels for test set.
"""
if sub == 1:
X_test = feature[: 60]
y_test = label[: 60]
X_train = feature[60:]
y_train = label[60:]
elif sub == 29:
X_test = feature[60 * 28:]
y_test = label[60 * 28:]
X_train = feature[:60 * 28]
y_train = label[: 60 * 28]
else:
X_test = feature[60 * (sub - 1): 60 * sub]
y_test = label[60 * (sub - 1): 60 * sub]
feature_set_1 = feature[: 60 * (sub - 1)]
label_set_1 = label[:60 * (sub - 1)]
feature_set_2 = feature[60 * sub:]
label_set_2 = label[60 * sub:]
X_train = np.append(feature_set_1, feature_set_2, axis=0)
y_train = np.append(label_set_1, label_set_2, axis=0)
X_train = X_train.reshape((X_train.shape[0], 2, channels, -1))
X_test = X_test.reshape((X_test.shape[0], 2, channels, -1))
return X_train, y_train, X_test, y_test
def Split_Dataset_B_Res(sub, feature, label, channels):
"""
Split one subject's data to evaluate the results of LOSO-CV on Dataset B.
Args:
sub: leave one subject out.
feature: input fNIRS signals.
label: input fNIRS labels.
channels: fNIRS channels.
Returns:
X_test: test set.
y_test: labels for test set.
"""
if sub == 1:
X_test = feature[: 60]
y_test = label[: 60]
elif sub == 29:
X_test = feature[60 * 28:]
y_test = label[60 * 28:]
else:
X_test = feature[60 * (sub - 1): 60 * sub]
y_test = label[60 * (sub - 1): 60 * sub]
X_test = X_test.reshape((X_test.shape[0], 2, channels, -1))
return X_test, y_test
def Split_Dataset_C(sub, feature, label, channels):
"""
LOSO-CV for Dataset A
Args:
sub: leave one subject out.
feature: input fNIRS signals.
label: input fNIRS labels.
channels: fNIRS channels.
Returns:
X_train: training set.
y_train: labels for training set.
X_test: test set.
y_test: labels for test set.
"""
if sub == 1:
X_test = feature[: 75]
y_test = label[: 75]
X_train = feature[75:]
y_train = label[75:]
elif sub == 30:
X_test = feature[75 * 29:]
y_test = label[75 * 29:]
X_train = feature[:75 * 29]
y_train = label[: 75 * 29]
else:
X_test = feature[75 * (sub - 1): 75 * sub]
y_test = label[75 * (sub - 1): 75 * sub]
feature_set_1 = feature[: 75 * (sub - 1)]
label_set_1 = label[:75 * (sub - 1)]
feature_set_2 = feature[75 * sub:]
label_set_2 = label[75 * sub:]
X_train = np.append(feature_set_1, feature_set_2, axis=0)
y_train = np.append(label_set_1, label_set_2, axis=0)
X_train = X_train.reshape((X_train.shape[0], 2, channels, -1))
X_test = X_test.reshape((X_test.shape[0], 2, channels, -1))
return X_train, y_train, X_test, y_test
def Split_Dataset_C_Res(sub, feature, label, channels):
"""
Split one subject's data to evaluate the results of LOSO-CV on Dataset C.
Args:
sub: leave one subject out.
feature: input fNIRS signals.
label: input fNIRS labels.
channels: fNIRS channels.
Returns:
X_test: test set.
y_test: labels for test set.
"""
if sub == 1:
X_test = feature[: 75]
y_test = label[: 75]
elif sub == 30:
X_test = feature[75 * 29:]
y_test = label[75 * 29:]
else:
X_test = feature[75 * (sub - 1): 75 * sub]
y_test = label[75 * (sub - 1): 75 * sub]
X_test = X_test.reshape((X_test.shape[0], 2, channels, -1))
return X_test, y_test
| 27.939655
| 77
| 0.551527
| 955
| 6,482
| 3.554974
| 0.074346
| 0.070692
| 0.063623
| 0.039764
| 0.923417
| 0.902798
| 0.879234
| 0.879234
| 0.86863
| 0.86863
| 0
| 0.065608
| 0.329836
| 6,482
| 232
| 78
| 27.939655
| 0.71593
| 0.258562
| 0
| 0.741667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.008333
| 0
| 0.108333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce4392728050f8c0657bd361172926102d958912
| 319
|
py
|
Python
|
math_dioproject/basicfunctions/simples.py
|
marcocrippa/Package_DioProject_math
|
c15f122140f1efd372bb00f08b7ebae2d39d42b8
|
[
"Unlicense"
] | null | null | null |
math_dioproject/basicfunctions/simples.py
|
marcocrippa/Package_DioProject_math
|
c15f122140f1efd372bb00f08b7ebae2d39d42b8
|
[
"Unlicense"
] | null | null | null |
math_dioproject/basicfunctions/simples.py
|
marcocrippa/Package_DioProject_math
|
c15f122140f1efd372bb00f08b7ebae2d39d42b8
|
[
"Unlicense"
] | null | null | null |
def soma(num1, num2):
print(f'A soma dos numeros eh igual é: {num1+num2}')
def sub(num1, num2):
print(f'A soma dos numeros eh igual é: {num1-num2}')
def mult(num1, num2):
print(f'A soma dos numeros eh igual é: {num1*num2}')
def divi(num1, num2):
print(f'A soma dos numeros eh igual é: {num1/num2}')
| 24.538462
| 56
| 0.648903
| 60
| 319
| 3.45
| 0.25
| 0.309179
| 0.251208
| 0.270531
| 0.913043
| 0.913043
| 0.913043
| 0.913043
| 0.913043
| 0.913043
| 0
| 0.062745
| 0.200627
| 319
| 12
| 57
| 26.583333
| 0.74902
| 0
| 0
| 0
| 0
| 0
| 0.528302
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
cbeecea1d08fbb840d82eeba6b2d3849f72ea5de
| 35,505
|
py
|
Python
|
whoville/cloudbreak/apis/v1util_api.py
|
balazsgaspar/whoville
|
0d26853bf5cfd3485067b0c23f886e2b4ab742f8
|
[
"Apache-2.0"
] | 30
|
2017-06-12T13:05:24.000Z
|
2021-08-03T09:00:48.000Z
|
whoville/cloudbreak/apis/v1util_api.py
|
balazsgaspar/whoville
|
0d26853bf5cfd3485067b0c23f886e2b4ab742f8
|
[
"Apache-2.0"
] | 6
|
2017-12-27T23:12:45.000Z
|
2019-03-07T22:14:24.000Z
|
whoville/cloudbreak/apis/v1util_api.py
|
balazsgaspar/whoville
|
0d26853bf5cfd3485067b0c23f886e2b4ab742f8
|
[
"Apache-2.0"
] | 31
|
2017-06-12T13:05:28.000Z
|
2019-09-20T01:50:29.000Z
|
# coding: utf-8
"""
Cloudbreak API
Cloudbreak is a powerful left surf that breaks over a coral reef, a mile off southwest the island of Tavarua, Fiji. Cloudbreak is a cloud agnostic Hadoop as a Service API. Abstracts the provisioning and ease management and monitoring of on-demand clusters. SequenceIQ's Cloudbreak is a RESTful application development platform with the goal of helping developers to build solutions for deploying Hadoop YARN clusters in different environments. Once it is deployed in your favourite servlet container it exposes a REST API allowing to span up Hadoop clusters of arbitary sizes and cloud providers. Provisioning Hadoop has never been easier. Cloudbreak is built on the foundation of cloud providers API (Amazon AWS, Microsoft Azure, Google Cloud Platform, Openstack), Apache Ambari, Docker lightweight containers, Swarm and Consul. For further product documentation follow the link: <a href=\"http://hortonworks.com/apache/cloudbreak/\">http://hortonworks.com/apache/cloudbreak/</a>
OpenAPI spec version: 2.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class V1utilApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def check_client_version(self, version, **kwargs):
"""
checks the client version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.check_client_version(version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str version: (required)
:return: VersionCheckResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.check_client_version_with_http_info(version, **kwargs)
else:
(data) = self.check_client_version_with_http_info(version, **kwargs)
return data
def check_client_version_with_http_info(self, version, **kwargs):
"""
checks the client version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.check_client_version_with_http_info(version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str version: (required)
:return: VersionCheckResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['version']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_client_version" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `check_client_version`")
collection_formats = {}
path_params = {}
if 'version' in params:
path_params['version'] = params['version']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/client/{version}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VersionCheckResult',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_rds_database_util(self, **kwargs):
"""
create a database for the service in the RDS if the connection could be created
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_rds_database_util(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param RDSBuildRequest body:
:param list[str] target:
:return: RdsBuildResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_rds_database_util_with_http_info(**kwargs)
else:
(data) = self.create_rds_database_util_with_http_info(**kwargs)
return data
def create_rds_database_util_with_http_info(self, **kwargs):
"""
create a database for the service in the RDS if the connection could be created
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_rds_database_util_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param RDSBuildRequest body:
:param list[str] target:
:return: RdsBuildResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'target']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_rds_database_util" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'target' in params:
query_params.append(('target', params['target']))
collection_formats['target'] = 'multi'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/rds-database', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RdsBuildResult',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_cloud_storage_matrix(self, **kwargs):
"""
returns supported cloud storage for stack version
Define stack version at least at patch level eg. 2.6.0
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_cloud_storage_matrix(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str stack_version:
:return: list[CloudStorageSupportedResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_cloud_storage_matrix_with_http_info(**kwargs)
else:
(data) = self.get_cloud_storage_matrix_with_http_info(**kwargs)
return data
def get_cloud_storage_matrix_with_http_info(self, **kwargs):
"""
returns supported cloud storage for stack version
Define stack version at least at patch level eg. 2.6.0
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_cloud_storage_matrix_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str stack_version:
:return: list[CloudStorageSupportedResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stack_version']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cloud_storage_matrix" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'stack_version' in params:
query_params.append(('stackVersion', params['stack_version']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/cloudstoragematrix', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[CloudStorageSupportedResponse]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_custom_parameters(self, **kwargs):
"""
returns custom parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_custom_parameters(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ParametersQueryRequest body:
:return: ParametersQueryResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_custom_parameters_with_http_info(**kwargs)
else:
(data) = self.get_custom_parameters_with_http_info(**kwargs)
return data
def get_custom_parameters_with_http_info(self, **kwargs):
"""
returns custom parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_custom_parameters_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ParametersQueryRequest body:
:return: ParametersQueryResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_custom_parameters" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/custom-parameters', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParametersQueryResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_file_system_parameters(self, **kwargs):
"""
returns filesystem parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_file_system_parameters(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param StructuredParametersQueryRequest body:
:return: StructuredParameterQueriesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_file_system_parameters_with_http_info(**kwargs)
else:
(data) = self.get_file_system_parameters_with_http_info(**kwargs)
return data
def get_file_system_parameters_with_http_info(self, **kwargs):
"""
returns filesystem parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_file_system_parameters_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param StructuredParametersQueryRequest body:
:return: StructuredParameterQueriesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_file_system_parameters" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/filesystem-parameters', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StructuredParameterQueriesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_knox_services(self, blueprint_name, **kwargs):
"""
returns supported knox services
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_knox_services(blueprint_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str blueprint_name: (required)
:return: list[ExposedServiceResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_knox_services_with_http_info(blueprint_name, **kwargs)
else:
(data) = self.get_knox_services_with_http_info(blueprint_name, **kwargs)
return data
def get_knox_services_with_http_info(self, blueprint_name, **kwargs):
"""
returns supported knox services
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_knox_services_with_http_info(blueprint_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str blueprint_name: (required)
:return: list[ExposedServiceResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['blueprint_name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_knox_services" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'blueprint_name' is set
if ('blueprint_name' not in params) or (params['blueprint_name'] is None):
raise ValueError("Missing the required parameter `blueprint_name` when calling `get_knox_services`")
collection_formats = {}
path_params = {}
if 'blueprint_name' in params:
path_params['blueprintName'] = params['blueprint_name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/knoxservices/{blueprintName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ExposedServiceResponse]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_stack_matrix_util(self, **kwargs):
"""
returns default ambari details for distinct HDP and HDF
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_stack_matrix_util(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: StackMatrix
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_stack_matrix_util_with_http_info(**kwargs)
else:
(data) = self.get_stack_matrix_util_with_http_info(**kwargs)
return data
def get_stack_matrix_util_with_http_info(self, **kwargs):
"""
returns default ambari details for distinct HDP and HDF
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_stack_matrix_util_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: StackMatrix
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_stack_matrix_util" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/stackmatrix', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StackMatrix',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_ambari_database_util(self, **kwargs):
"""
tests a database connection parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.test_ambari_database_util(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AmbariDatabaseDetails body:
:return: AmbariDatabaseTestResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.test_ambari_database_util_with_http_info(**kwargs)
else:
(data) = self.test_ambari_database_util_with_http_info(**kwargs)
return data
def test_ambari_database_util_with_http_info(self, **kwargs):
"""
tests a database connection parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.test_ambari_database_util_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AmbariDatabaseDetails body:
:return: AmbariDatabaseTestResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_ambari_database_util" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/ambari-database', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AmbariDatabaseTestResult',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.716743
| 984
| 0.568709
| 3,457
| 35,505
| 5.591553
| 0.080995
| 0.066218
| 0.023176
| 0.029798
| 0.898448
| 0.877496
| 0.874237
| 0.860476
| 0.851785
| 0.818469
| 0
| 0.000917
| 0.355161
| 35,505
| 871
| 985
| 40.76349
| 0.843372
| 0.331249
| 0
| 0.769596
| 0
| 0
| 0.149647
| 0.047666
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04038
| false
| 0
| 0.016627
| 0
| 0.11639
| 0.023753
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0212f0b64596d68ae31bc81761b6680fb86b5d5f
| 1,264
|
py
|
Python
|
util/TurtleHare.py
|
arsonite/codescout
|
60068596357545c9583f8e0fcdfc9a80b2db15b7
|
[
"MIT"
] | null | null | null |
util/TurtleHare.py
|
arsonite/codescout
|
60068596357545c9583f8e0fcdfc9a80b2db15b7
|
[
"MIT"
] | 2
|
2020-10-20T14:50:52.000Z
|
2020-10-20T14:51:08.000Z
|
util/TurtleHare.py
|
arsonite/codescout
|
60068596357545c9583f8e0fcdfc9a80b2db15b7
|
[
"MIT"
] | null | null | null |
import time
class TurtleHare:
def __init__(self, *args, **kwargs):
self.function = kwargs.get('function')
def measure(self, function, *args):
print('\nMeasuring execution time with TurtleHare ...')
print('Print statements of executed function below:')
print('--------------------------------------------\n')
start = time.time()
self.function(*args)
end = time.time()
print('\n---------------------------------------------')
print('End of print statements of executed function.')
print('Final execution measurement:')
print(f' In seconds: {end - start}')
class static:
@staticmethod
def measure(function, *args):
print('\nMeasuring execution time with TurtleHare ...')
print('Print statements of executed function below:')
print('--------------------------------------------\n')
start = time.time()
function(*args)
end = time.time()
print('\n---------------------------------------------')
print('End of print statements of executed function.')
print('Final execution measurement:')
print(f' In seconds: {end - start}')
| 39.5
| 68
| 0.484177
| 113
| 1,264
| 5.380531
| 0.265487
| 0.078947
| 0.111842
| 0.164474
| 0.792763
| 0.792763
| 0.792763
| 0.792763
| 0.792763
| 0.792763
| 0
| 0
| 0.268987
| 1,264
| 31
| 69
| 40.774194
| 0.658009
| 0
| 0
| 0.642857
| 0
| 0
| 0.457278
| 0.147152
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107143
| false
| 0
| 0.035714
| 0
| 0.214286
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
023b183985e72889e2e1d6f3b72b7899c0cf01eb
| 24,847
|
py
|
Python
|
tests/test_result_faces.py
|
Data-Only-Greater/SNL-Delft3D-CEC-Verify
|
3aaab93b8fab6e3b6710b68d181b9946ae3ab74a
|
[
"MIT"
] | null | null | null |
tests/test_result_faces.py
|
Data-Only-Greater/SNL-Delft3D-CEC-Verify
|
3aaab93b8fab6e3b6710b68d181b9946ae3ab74a
|
[
"MIT"
] | 34
|
2021-12-20T14:51:36.000Z
|
2022-03-16T11:44:30.000Z
|
tests/test_result_faces.py
|
Data-Only-Greater/SNL-Delft3D-CEC-Verify
|
3aaab93b8fab6e3b6710b68d181b9946ae3ab74a
|
[
"MIT"
] | 1
|
2021-12-20T14:17:30.000Z
|
2021-12-20T14:17:30.000Z
|
# -*- coding: utf-8 -*-
import warnings
import numpy as np
import pandas as pd
import pytest
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
import xarray as xr
from snl_d3d_cec_verify.cases import CaseStudy
from snl_d3d_cec_verify.result.faces import (_check_case_study,
_faces_frame_to_slice,
_faces_frame_to_depth,
_map_to_faces_frame_with_tke,
_map_to_faces_frame,
_get_quadrilateral_centre,
_FMFaces,
_trim_to_faces_frame,
_StructuredFaces)
def test_check_case_study_error():
case = CaseStudy(dx=[1, 2, 3])
with pytest.raises(ValueError) as excinfo:
_check_case_study(case)
assert "case study must have length one" in str(excinfo)
@pytest.fixture
def faces_frame_fm(data_dir):
csv_path = data_dir / "output" / "faces_frame_fm.csv"
frame = pd.read_csv(csv_path, parse_dates=["time"])
times = frame.time.unique()
return frame[frame.time == times[-1]]
@pytest.fixture
def faces_frame_structured(data_dir):
csv_path = data_dir / "output" / "faces_frame_structured.csv"
frame = pd.read_csv(csv_path, parse_dates=["time"])
times = frame.time.unique()
return frame[frame.time == times[-1]]
def test_faces_frame_to_slice_sigma(faces_frame_fm):
ts = pd.Timestamp("2001-01-01 01:00:00")
sigma = -0.5
ds = _faces_frame_to_slice(faces_frame_fm, ts, "sigma", sigma)
assert isinstance(ds, xr.Dataset)
assert len(ds["$x$"]) == 18
assert len(ds["$y$"]) == 4
assert np.isclose(ds["$x$"].min(), 0.5)
assert np.isclose(ds["$x$"].max(), 17.5)
assert np.isclose(ds["$y$"].min(), 1.5)
assert np.isclose(ds["$y$"].max(), 4.5)
assert ds[r"$\sigma$"].values.take(0) == sigma
assert ds.time.values.take(0) == ts
assert ds["$z$"].min() > -1.0012
assert ds["$z$"].max() < -1
# Same bounds as the frame
assert ds["$u$"].min() >= faces_frame_fm["u"].min()
assert ds["$u$"].max() <= faces_frame_fm["u"].max()
assert ds["$v$"].min() >= faces_frame_fm["v"].min()
assert ds["$v$"].max() <= faces_frame_fm["v"].max()
assert ds["$w$"].min() >= faces_frame_fm["w"].min()
assert ds["$w$"].max() <= faces_frame_fm["w"].max()
def test_faces_frame_structured_to_slice_sigma(faces_frame_structured):
ts = pd.Timestamp("2001-01-01 01:00:00")
sigma = -0.75
ds = _faces_frame_to_slice(faces_frame_structured, ts, "sigma", sigma)
assert isinstance(ds, xr.Dataset)
assert len(ds["$x$"]) == 18
assert len(ds["$y$"]) == 4
assert np.isclose(ds["$x$"].min(), 0.5)
assert np.isclose(ds["$x$"].max(), 17.5)
assert np.isclose(ds["$y$"].min(), 1.5)
assert np.isclose(ds["$y$"].max(), 4.5)
assert ds[r"$\sigma$"].values.take(0) == sigma
assert ds.time.values.take(0) == ts
assert ds["$z$"].min() > -1.504
assert ds["$z$"].max() < -1.5
# Same bounds as the frame
assert ds["$u$"].min() >= faces_frame_structured["u"].min()
assert ds["$u$"].max() <= faces_frame_structured["u"].max()
assert ds["$v$"].min() >= faces_frame_structured["v"].min()
assert ds["$v$"].max() <= faces_frame_structured["v"].max()
assert ds["$w$"].min() >= faces_frame_structured["w"].min()
assert ds["$w$"].max() <= faces_frame_structured["w"].max()
assert ds["$k$"].min() >= 0
assert ds["$k$"].min() >= faces_frame_structured["tke"].min()
assert ds["$k$"].max() <= faces_frame_structured["tke"].max()
def test_faces_frame_to_slice_sigma_extrapolate_forward(faces_frame_fm):
ts = pd.Timestamp("2001-01-01 01:00:00")
sigma = 0.1
ds = _faces_frame_to_slice(faces_frame_fm, ts, "sigma", sigma)
assert ds["$z$"].min() > 0.2
assert ds["$z$"].max() < 0.2003
def test_faces_frame_to_slice_sigma_extrapolate_backward(faces_frame_fm):
ts = pd.Timestamp("2001-01-01 01:00:00")
sigma = -1.1
ds = _faces_frame_to_slice(faces_frame_fm, ts, "sigma", sigma)
assert ds["$z$"].min() > -2.203
assert ds["$z$"].max() < -2.2
def test_faces_frame_to_slice_z(faces_frame_fm):
ts = pd.Timestamp("2001-01-01 01:00:00")
z = -1
ds = _faces_frame_to_slice(faces_frame_fm, ts, "z", z)
assert isinstance(ds, xr.Dataset)
assert len(ds["$x$"]) == 18
assert len(ds["$y$"]) == 4
assert np.isclose(ds["$x$"].min(), 0.5)
assert np.isclose(ds["$x$"].max(), 17.5)
assert np.isclose(ds["$y$"].min(), 1.5)
assert np.isclose(ds["$y$"].max(), 4.5)
assert ds["$z$"].values.take(0) == z
assert ds.time.values.take(0) == ts
assert ds[r"$\sigma$"].values.min() >= -1
assert ds["$z$"].max() < 1.002
# Same bounds as the frame
assert ds["$u$"].min() >= faces_frame_fm["u"].min()
assert ds["$u$"].max() <= faces_frame_fm["u"].max()
assert ds["$v$"].min() >= faces_frame_fm["v"].min()
assert ds["$v$"].max() <= faces_frame_fm["v"].max()
assert ds["$w$"].min() >= faces_frame_fm["w"].min()
assert ds["$w$"].max() <= faces_frame_fm["w"].max()
def test_faces_frame_to_slice_error():
with pytest.raises(RuntimeError) as excinfo:
_faces_frame_to_slice("mock", "mock", "mock", "mock")
assert "Given key is not valid" in str(excinfo)
def test_faces_frame_to_depth(faces_frame_fm):
ts = pd.Timestamp("2001-01-01 01:00:00")
da = _faces_frame_to_depth(faces_frame_fm, ts)
assert isinstance(da, xr.DataArray)
assert len(da["$x$"]) == 18
assert len(da["$y$"]) == 4
assert da.time.values.take(0) == ts
# Same bounds as the frame
assert da.min() >= faces_frame_fm["depth"].min()
assert da.max() <= faces_frame_fm["depth"].max()
def test_faces_frame_structured_to_depth(faces_frame_structured):
ts = pd.Timestamp("2001-01-01 01:00:00")
da = _faces_frame_to_depth(faces_frame_structured, ts)
assert isinstance(da, xr.DataArray)
assert len(da["$x$"]) == 18
assert len(da["$y$"]) == 4
assert da.time.values.take(0) == ts
# Same bounds as the frame
assert da.min() >= faces_frame_structured["depth"].min()
assert da.max() <= faces_frame_structured["depth"].max()
def test_faces_load_t_step_first(faces):
t_step = -1
expected_t_step = faces._resolve_t_step(t_step)
faces._load_t_step(t_step)
assert len(faces._frame) == 18 * 4 * 7
assert expected_t_step in faces._t_steps
assert faces._t_steps[expected_t_step] == \
pd.Timestamp('2001-01-01 01:00:00')
def test_faces_load_t_step_second(faces):
faces._load_t_step(-1)
faces._load_t_step(0)
assert len(faces._frame) == 18 * 4 * 7 * 2
assert len(faces._t_steps) == 2
assert set(faces._frame["time"]) == set([
pd.Timestamp('2001-01-01 01:00:00'),
pd.Timestamp('2001-01-01')])
def test_faces_load_t_step_no_repeat(faces):
faces._load_t_step(-1)
faces._load_t_step(1)
assert len(faces._frame) == 18 * 4 * 7
assert len(faces._t_steps) == 1
def test_faces_extract_depth(mocker, faces):
mock = mocker.patch('snl_d3d_cec_verify.result.faces.'
'_faces_frame_to_depth')
faces.extract_depth(-1)
mock.assert_called()
def test_faces_extract_sigma(mocker, faces):
mock = mocker.patch('snl_d3d_cec_verify.result.faces.'
'_faces_frame_to_slice')
faces.extract_sigma(-1, 0)
mock.assert_called()
assert 'sigma' in mock.call_args.args[2]
def test_faces_extract_sigma_interp(faces):
t_step = -1
sigma = -0.5
x = 1
y = 3
ds = faces.extract_sigma(t_step, sigma, x, y)
t_step = faces._resolve_t_step(t_step)
ts = faces._t_steps[t_step]
assert isinstance(ds, xr.Dataset)
assert ds[r"$\sigma$"].values.take(0) == sigma
assert ds.time.values.take(0) == ts
assert ds["$x$"].values.take(0) == x
assert ds["$y$"].values.take(0) == y
assert np.isclose(ds["$z$"].values, -1.00114767)
# Same bounds as the frame
assert (faces._frame["u"].min() <= ds["$u$"].values.take(0) <=
faces._frame["u"].max())
assert (faces._frame["v"].min() <= ds["$v$"].values.take(0) <=
faces._frame["v"].max())
assert (faces._frame["w"].min() <= ds["$w$"].values.take(0) <=
faces._frame["w"].max())
def test_faces_extract_z(mocker, faces):
mock = mocker.patch('snl_d3d_cec_verify.result.faces.'
'_faces_frame_to_slice')
faces.extract_z(-1, -1)
mock.assert_called()
assert 'z' in mock.call_args.args[2]
def test_faces_extract_z_interp(faces):
t_step = -1
z = -1
x = 1
y = 3
ds = faces.extract_z(t_step, z, x, y)
t_step = faces._resolve_t_step(t_step)
ts = faces._t_steps[t_step]
assert isinstance(ds, xr.Dataset)
assert ds["$z$"].values.take(0) == z
assert ds.time.values.take(0) == ts
assert ds["$x$"].values.take(0) == x
assert ds["$y$"].values.take(0) == y
assert np.isclose(ds[r"$\sigma$"].values, -0.49942682)
# Same bounds as the frame
assert (faces._frame["u"].min() <= ds["$u$"].values.take(0) <=
faces._frame["u"].max())
assert (faces._frame["v"].min() <= ds["$v$"].values.take(0) <=
faces._frame["v"].max())
assert (faces._frame["w"].min() <= ds["$w$"].values.take(0) <=
faces._frame["w"].max())
@pytest.mark.parametrize("x, y", [
("mock", None),
(None, "mock")])
def test_faces_extract_interp_error(faces, x, y):
with pytest.raises(RuntimeError) as excinfo:
faces.extract_z("mock", "mock", x, y)
assert "x and y must both be set" in str(excinfo)
def test_faces_extract_turbine_z(mocker, faces):
case = CaseStudy()
offset_z = 0.5
t_step = -1
mock = mocker.patch.object(faces, 'extract_z')
faces.extract_turbine_z(t_step, case, offset_z)
mock.assert_called_with(t_step, case.turb_pos_z + offset_z)
def test_faces_extract_turbine_centreline(mocker, faces):
case = CaseStudy()
t_step = -1
x_step = 0.5
offset_x = 0.5
offset_y = 0.5
offset_z = 0.5
mock = mocker.patch.object(faces, 'extract_z')
faces.extract_turbine_centreline(t_step,
case,
x_step,
offset_x,
offset_y,
offset_z)
mock.assert_called()
assert mock.call_args.args[0] == t_step
assert mock.call_args.args[1] == case.turb_pos_z + offset_z
x = mock.call_args.args[2]
y = mock.call_args.args[3]
assert min(x) == case.turb_pos_x + offset_x
assert max(x) <= faces.xmax
assert np.unique(np.diff(x)).take(0) == x_step
assert set(y) == set([case.turb_pos_y + offset_y])
def test_faces_extract_turbine_centre(mocker, faces):
case = CaseStudy()
t_step = -1
offset_x = 0.5
offset_y = 0.5
offset_z = 0.5
mock = mocker.patch.object(faces, 'extract_z')
faces.extract_turbine_centre(t_step,
case,
offset_x,
offset_y,
offset_z)
mock.assert_called()
assert mock.call_args.args[0] == t_step
assert mock.call_args.args[1] == case.turb_pos_z + offset_z
x = mock.call_args.args[2]
y = mock.call_args.args[3]
assert len(x) == 1
assert len(y) == 1
assert x[0] == case.turb_pos_x + offset_x
assert y[0] == case.turb_pos_y + offset_y
def test_map_to_faces_frame_with_tke(data_dir):
map_path = data_dir / "output" / "FlowFM_map.nc"
faces_frame = _map_to_faces_frame_with_tke(map_path, -1)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 18 * 4 * 7
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w",
"tke"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2.003 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() <= 0
assert np.isclose(faces_frame["sigma"].unique(),
[-1.,
-0.83333333,
-0.66666667,
-0.5,
-0.33333333,
-0.16666667,
0.]).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() > 2
assert faces_frame["depth"].max() < 2.003
assert faces_frame["u"].min() > 0.57
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-15
assert faces_frame["v"].max() < 1e-15
assert faces_frame["w"].min() > -0.02
assert faces_frame["w"].max() < 0.02
assert faces_frame["tke"].min() > 0
assert faces_frame["tke"].max() < 0.0089
sigma_slice = _faces_frame_to_slice(faces_frame,
pd.Timestamp('2001-01-01 01:00:00'),
"sigma",
-0.75)
assert np.isclose(sigma_slice["$z$"].values.mean(), -1.5009617997833038)
assert round(sigma_slice["$k$"].values.mean(), 5) == 0.00627
def test_map_to_faces_frame_with_tke_none(data_dir):
map_path = data_dir / "output" / "FlowFM_map.nc"
faces_frame = _map_to_faces_frame_with_tke(map_path)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 18 * 4 * 7 * 2
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w",
"tke"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2.003 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() <= 0
assert np.isclose(faces_frame["sigma"].unique(),
[-1.,
-0.83333333,
-0.66666667,
-0.5,
-0.33333333,
-0.16666667,
0.]).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 00:00:00'),
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() > 1.998
assert faces_frame["depth"].max() < 2.003
assert faces_frame["u"].min() >= 0
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-15
assert faces_frame["v"].max() < 1e-15
assert faces_frame["w"].min() > -0.02
assert faces_frame["w"].max() < 0.02
assert faces_frame["tke"].min() > 0
assert faces_frame["tke"].max() < 0.0089
def test_map_to_faces_frame(data_dir):
map_path = data_dir / "output" / "FlowFM_map.nc"
faces_frame = _map_to_faces_frame(map_path, -1)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 216
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() < 0
assert (faces_frame["sigma"].unique() == (-0.8333333333333334,
-0.5,
-0.16666666666666669)).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() > 2
assert faces_frame["depth"].max() < 2.003
assert faces_frame["u"].min() > 0.6
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-15
assert faces_frame["v"].max() < 1e-15
assert faces_frame["w"].min() > -0.02
assert faces_frame["w"].max() < 0.02
sigma_slice = _faces_frame_to_slice(faces_frame,
pd.Timestamp('2001-01-01 01:00:00'),
"sigma",
-0.75)
assert np.isclose(sigma_slice["$z$"].values.mean(), -1.5009617997833038)
def test_map_to_faces_frame_none(data_dir):
map_path = data_dir / "output" / "FlowFM_map.nc"
faces_frame = _map_to_faces_frame(map_path)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 432
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() < 0
assert (faces_frame["sigma"].unique() == (-0.8333333333333334,
-0.5,
-0.16666666666666669)).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 00:00:00'),
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() >= 2
assert faces_frame["depth"].max() < 2.003
assert faces_frame["u"].min() >= 0.
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-15
assert faces_frame["v"].max() < 1e-15
assert faces_frame["w"].min() > -0.02
assert faces_frame["w"].max() < 0.02
def test_get_quadrilateral_centre():
densities = np.array([0, 0, 1, 1])
result = _get_quadrilateral_centre(densities)
assert result == 0.5
def test_FMFaces(mocker):
mock = mocker.patch(
'snl_d3d_cec_verify.result.faces._map_to_faces_frame_with_tke',
autospec=True)
path = "mock"
tstep = 0
test = _FMFaces(path, 2, 18)
test._get_faces_frame(tstep)
mock.assert_called_with(path, tstep)
def test_trim_to_faces_frame(data_dir):
trim_path = data_dir / "output" / "trim-D3D.nc"
faces_frame = _trim_to_faces_frame(trim_path, -1)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 216
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w",
"tke"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() < 0
assert np.isclose(faces_frame["sigma"].unique(),
(-0.16666667, -0.5, -0.83333331)).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() > 2
assert faces_frame["depth"].max() < 2.005
assert faces_frame["u"].min() > 0.6
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-2
assert faces_frame["v"].max() < 1e-2
assert faces_frame["w"].min() > -0.03
assert faces_frame["w"].max() < 0.02
assert faces_frame["tke"].min() > 0
assert faces_frame["tke"].max() < 0.004
def test_trim_to_faces_frame_none(data_dir):
trim_path = data_dir / "output" / "trim-D3D.nc"
faces_frame = _trim_to_faces_frame(trim_path)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 432
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w",
"tke"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() < 0
assert np.isclose(faces_frame["sigma"].unique(),
(-0.16666667, -0.5, -0.83333331)).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 00:00:00'),
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() >= 2
assert faces_frame["depth"].max() < 2.005
assert faces_frame["u"].min() >= 0.
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-2
assert faces_frame["v"].max() < 1e-2
assert faces_frame["w"].min() > -0.03
assert faces_frame["w"].max() < 0.02
assert faces_frame["tke"].min() > 0
assert faces_frame["tke"].max() < 0.004
def test_StructuredFaces(mocker):
mock = mocker.patch('snl_d3d_cec_verify.result.faces._trim_to_faces_frame',
autospec=True)
path = "mock"
tstep = 0
test = _StructuredFaces(path, 2, 18)
test._get_faces_frame(tstep)
mock.assert_called_with(path, tstep)
| 34.995775
| 79
| 0.501871
| 3,157
| 24,847
| 3.725689
| 0.061768
| 0.195545
| 0.095222
| 0.047611
| 0.864394
| 0.834297
| 0.806581
| 0.763731
| 0.726152
| 0.717225
| 0
| 0.06235
| 0.345474
| 24,847
| 709
| 80
| 35.045134
| 0.660887
| 0.007888
| 0
| 0.717017
| 0
| 0
| 0.067852
| 0.012053
| 0
| 0
| 0
| 0
| 0.472275
| 1
| 0.061185
| false
| 0
| 0.013384
| 0
| 0.078394
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0249d0492b629f21983709b3ef3a89c17707583c
| 62
|
py
|
Python
|
src/jig/commands/__init__.py
|
Lightslayer/jig
|
7287f594f63f8bc8cc1f42dcafec292dd75c89f2
|
[
"BSD-2-Clause"
] | 16
|
2015-04-07T19:26:01.000Z
|
2020-03-05T21:09:07.000Z
|
src/jig/commands/__init__.py
|
Lightslayer/jig
|
7287f594f63f8bc8cc1f42dcafec292dd75c89f2
|
[
"BSD-2-Clause"
] | 2
|
2015-02-11T13:29:35.000Z
|
2015-03-02T21:03:08.000Z
|
src/jig/commands/__init__.py
|
Lightslayer/jig
|
7287f594f63f8bc8cc1f42dcafec292dd75c89f2
|
[
"BSD-2-Clause"
] | 2
|
2020-05-29T06:48:16.000Z
|
2020-05-29T06:54:36.000Z
|
from .base import get_command
from .base import list_commands
| 20.666667
| 31
| 0.83871
| 10
| 62
| 5
| 0.7
| 0.32
| 0.56
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 62
| 2
| 32
| 31
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
65f2d5bbafd33eea920dd1dd9926c9c70d6bba47
| 3,128
|
py
|
Python
|
tethys_datasets/base.py
|
CI-WATER/django-tethys_datasets
|
504963a720693931a1fa1a899d5492548672216f
|
[
"BSD-2-Clause"
] | null | null | null |
tethys_datasets/base.py
|
CI-WATER/django-tethys_datasets
|
504963a720693931a1fa1a899d5492548672216f
|
[
"BSD-2-Clause"
] | null | null | null |
tethys_datasets/base.py
|
CI-WATER/django-tethys_datasets
|
504963a720693931a1fa1a899d5492548672216f
|
[
"BSD-2-Clause"
] | null | null | null |
from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES
class DatasetService:
"""
Used to define dataset services for apps.
"""
def __init__(self, name, type, endpoint, apikey=None, username=None, password=None):
"""
Constructor
"""
self.name = name
# Validate the types
if type in VALID_ENGINES:
self.type = type
self.engine = VALID_ENGINES[type]
else:
if len(VALID_ENGINES) > 2:
comma_separated_types = ', '.join('"{0}"'.format(t) for t in VALID_ENGINES.keys()[:-1])
last_type = '"{0}"'.format(VALID_ENGINES.keys()[-1])
valid_types_string = '{0}, and {1}'.format(comma_separated_types, last_type)
elif len(VALID_ENGINES) == 2:
valid_types_string = '"{0}" and "{1}"'.format(VALID_ENGINES.keys()[0], VALID_ENGINES.keys()[1])
else:
valid_types_string = '"{0}"'.format(VALID_ENGINES.keys()[0])
raise ValueError('The value "{0}" is not a valid for argument "type" of DatasetService. Valid values for '
'"type" argument include {1}.'.format(type, valid_types_string))
self.endpoint = endpoint
self.apikey = apikey
self.username = username
self.password = password
def __repr__(self):
"""
String representation
"""
return '<DatasetService: type={0}, api_endpoint={1}>'.format(self.type, self.endpoint)
class SpatialDatasetService:
"""
Used to define spatial dataset services for apps.
"""
def __init__(self, name, type, endpoint, apikey=None, username=None, password=None):
"""
Constructor
"""
self.name = name
# Validate the types
if type in VALID_SPATIAL_ENGINES:
self.type = type
self.engine = VALID_SPATIAL_ENGINES[type]
else:
if len(VALID_SPATIAL_ENGINES) > 2:
comma_separated_types = ', '.join('"{0}"'.format(t) for t in VALID_SPATIAL_ENGINES.keys()[:-1])
last_type = '"{0}"'.format(VALID_SPATIAL_ENGINES.keys()[-1])
valid_types_string = '{0}, and {1}'.format(comma_separated_types, last_type)
elif len(VALID_SPATIAL_ENGINES) == 2:
valid_types_string = '"{0}" and "{1}"'.format(VALID_SPATIAL_ENGINES.keys()[0], VALID_SPATIAL_ENGINES.keys()[1])
else:
valid_types_string = '"{0}"'.format(VALID_SPATIAL_ENGINES.keys()[0])
raise ValueError('The value "{0}" is not a valid for argument "type" of SpatialDatasetService. Valid values for '
'"type" argument include {1}.'.format(type, valid_types_string))
self.endpoint = endpoint
self.apikey = apikey
self.username = username
self.password = password
def __repr__(self):
"""
String representation
"""
return '<SpatialDatasetService: type={0}, api_endpoint={1}>'.format(self.type, self.endpoint)
| 37.238095
| 127
| 0.585678
| 354
| 3,128
| 4.951977
| 0.175141
| 0.075299
| 0.108386
| 0.058186
| 0.863662
| 0.837992
| 0.790645
| 0.751854
| 0.715345
| 0.715345
| 0
| 0.016187
| 0.289003
| 3,128
| 83
| 128
| 37.686747
| 0.772032
| 0.063299
| 0
| 0.510638
| 0
| 0
| 0.149786
| 0.016049
| 0
| 0
| 0
| 0
| 0
| 1
| 0.085106
| false
| 0.085106
| 0.021277
| 0
| 0.191489
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
5a169ac430db4c87362d251735cd8223f4b87b3e
| 7,993
|
py
|
Python
|
nmigen/test/test_lib_cdc.py
|
antmicro/nmigen
|
d964ba9cc45490b141c8c4c4c3d8add1a26a739d
|
[
"BSD-2-Clause"
] | null | null | null |
nmigen/test/test_lib_cdc.py
|
antmicro/nmigen
|
d964ba9cc45490b141c8c4c4c3d8add1a26a739d
|
[
"BSD-2-Clause"
] | null | null | null |
nmigen/test/test_lib_cdc.py
|
antmicro/nmigen
|
d964ba9cc45490b141c8c4c4c3d8add1a26a739d
|
[
"BSD-2-Clause"
] | null | null | null |
# nmigen: UnusedElaboratable=no
from .utils import *
from ..hdl import *
from ..back.pysim import *
from ..lib.cdc import *
class FFSynchronizerTestCase(FHDLTestCase):
def test_stages_wrong(self):
with self.assertRaisesRegex(TypeError,
r"^Synchronization stage count must be a positive integer, not 0$"):
FFSynchronizer(Signal(), Signal(), stages=0)
with self.assertRaisesRegex(ValueError,
r"^Synchronization stage count may not safely be less than 2$"):
FFSynchronizer(Signal(), Signal(), stages=1)
def test_basic(self):
i = Signal()
o = Signal()
frag = FFSynchronizer(i, o)
sim = Simulator(frag)
sim.add_clock(1e-6)
def process():
self.assertEqual((yield o), 0)
yield i.eq(1)
yield Tick()
self.assertEqual((yield o), 0)
yield Tick()
self.assertEqual((yield o), 0)
yield Tick()
self.assertEqual((yield o), 1)
sim.add_process(process)
sim.run()
def test_reset_value(self):
i = Signal(reset=1)
o = Signal()
frag = FFSynchronizer(i, o, reset=1)
sim = Simulator(frag)
sim.add_clock(1e-6)
def process():
self.assertEqual((yield o), 1)
yield i.eq(0)
yield Tick()
self.assertEqual((yield o), 1)
yield Tick()
self.assertEqual((yield o), 1)
yield Tick()
self.assertEqual((yield o), 0)
sim.add_process(process)
sim.run()
class AsyncFFSynchronizerTestCase(FHDLTestCase):
def test_stages_wrong(self):
with self.assertRaisesRegex(TypeError,
r"^Synchronization stage count must be a positive integer, not 0$"):
ResetSynchronizer(Signal(), stages=0)
with self.assertRaisesRegex(ValueError,
r"^Synchronization stage count may not safely be less than 2$"):
ResetSynchronizer(Signal(), stages=1)
def test_edge_wrong(self):
with self.assertRaisesRegex(ValueError,
r"^AsyncFFSynchronizer async edge must be one of 'pos' or 'neg', not 'xxx'$"):
AsyncFFSynchronizer(Signal(), Signal(), domain="sync", async_edge="xxx")
def test_pos_edge(self):
i = Signal()
o = Signal()
m = Module()
m.domains += ClockDomain("sync")
m.submodules += AsyncFFSynchronizer(i, o)
sim = Simulator(m)
sim.add_clock(1e-6)
def process():
# initial reset
self.assertEqual((yield i), 0)
self.assertEqual((yield o), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 0)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 0)
yield Tick(); yield Delay(1e-8)
yield i.eq(1)
yield Delay(1e-8)
self.assertEqual((yield o), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 1)
yield i.eq(0)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 0)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 0)
yield Tick(); yield Delay(1e-8)
sim.add_process(process)
with sim.write_vcd("test.vcd"):
sim.run()
def test_neg_edge(self):
i = Signal(reset=1)
o = Signal()
m = Module()
m.domains += ClockDomain("sync")
m.submodules += AsyncFFSynchronizer(i, o, async_edge="neg")
sim = Simulator(m)
sim.add_clock(1e-6)
def process():
# initial reset
self.assertEqual((yield i), 1)
self.assertEqual((yield o), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 0)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 0)
yield Tick(); yield Delay(1e-8)
yield i.eq(0)
yield Delay(1e-8)
self.assertEqual((yield o), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 1)
yield i.eq(1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 0)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield o), 0)
yield Tick(); yield Delay(1e-8)
sim.add_process(process)
with sim.write_vcd("test.vcd"):
sim.run()
class ResetSynchronizerTestCase(FHDLTestCase):
def test_stages_wrong(self):
with self.assertRaisesRegex(TypeError,
r"^Synchronization stage count must be a positive integer, not 0$"):
ResetSynchronizer(Signal(), stages=0)
with self.assertRaisesRegex(ValueError,
r"^Synchronization stage count may not safely be less than 2$"):
ResetSynchronizer(Signal(), stages=1)
def test_basic(self):
arst = Signal()
m = Module()
m.domains += ClockDomain("sync")
m.submodules += ResetSynchronizer(arst)
s = Signal(reset=1)
m.d.sync += s.eq(0)
sim = Simulator(m)
sim.add_clock(1e-6)
def process():
# initial reset
self.assertEqual((yield s), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield s), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield s), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield s), 0)
yield Tick(); yield Delay(1e-8)
yield arst.eq(1)
yield Delay(1e-8)
self.assertEqual((yield s), 0)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield s), 1)
yield arst.eq(0)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield s), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield s), 1)
yield Tick(); yield Delay(1e-8)
self.assertEqual((yield s), 0)
yield Tick(); yield Delay(1e-8)
sim.add_process(process)
with sim.write_vcd("test.vcd"):
sim.run()
# TODO: test with distinct clocks
class PulseSynchronizerTestCase(FHDLTestCase):
def test_stages_wrong(self):
with self.assertRaisesRegex(TypeError,
r"^Synchronization stage count must be a positive integer, not 0$"):
PulseSynchronizer("w", "r", stages=0)
with self.assertRaisesRegex(ValueError,
r"^Synchronization stage count may not safely be less than 2$"):
PulseSynchronizer("w", "r", stages=1)
def test_smoke(self):
m = Module()
m.domains += ClockDomain("sync")
ps = m.submodules.dut = PulseSynchronizer("sync", "sync")
sim = Simulator(m)
sim.add_clock(1e-6)
def process():
yield ps.i.eq(0)
# TODO: think about reset
for n in range(5):
yield Tick()
# Make sure no pulses are generated in quiescent state
for n in range(3):
yield Tick()
self.assertEqual((yield ps.o), 0)
# Check conservation of pulses
accum = 0
for n in range(10):
yield ps.i.eq(1 if n < 4 else 0)
yield Tick()
accum += yield ps.o
self.assertEqual(accum, 4)
sim.add_process(process)
sim.run()
| 34.601732
| 94
| 0.540348
| 949
| 7,993
| 4.513172
| 0.124341
| 0.136587
| 0.177446
| 0.091058
| 0.807378
| 0.785664
| 0.747373
| 0.728928
| 0.72706
| 0.715153
| 0
| 0.028311
| 0.341549
| 7,993
| 230
| 95
| 34.752174
| 0.785484
| 0.026148
| 0
| 0.791878
| 0
| 0
| 0.080129
| 0
| 0
| 0
| 0
| 0.004348
| 0.243655
| 1
| 0.086294
| false
| 0
| 0.020305
| 0
| 0.126904
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a321377b4469ad47bcda3de609a57ea1a3f20cb
| 8,224
|
py
|
Python
|
bin/test.py
|
timb-machine/pyattck
|
1636c9191a92fa28e2cc03f8f04b85195070f0b9
|
[
"MIT"
] | null | null | null |
bin/test.py
|
timb-machine/pyattck
|
1636c9191a92fa28e2cc03f8f04b85195070f0b9
|
[
"MIT"
] | null | null | null |
bin/test.py
|
timb-machine/pyattck
|
1636c9191a92fa28e2cc03f8f04b85195070f0b9
|
[
"MIT"
] | null | null | null |
from pyattck import Attck
attack = Attck()
# Examples of MITRE Enterprise ATT&CK
for actor in attack.enterprise.actors:
print(actor.id)
print(actor.name)
# accessing malware used by an actor or group
for malware in actor.malwares:
print(malware.id)
print(malware.name)
# accessing tools used by an actor or group
for tool in actor.tools:
print(tool.id)
print(tool.name)
# accessing techniques used by an actor or group
for technique in actor.techniques:
print(technique.id)
print(technique.name)
# you can also access generated data sets on aa technique
print(technique.command_list)
print(technique.commands)
print(technique.queries)
print(technique.datasets)
print(technique.possible_detections)
# accessing malware
for malware in attack.enterprise.malwares:
print(malware.id)
print(malware.name)
# accessing actor or groups using this malware
for actor in malware.actors:
print(actor.id)
print(actor.name)
# accessing techniques that this malware is used in
for technique in malware.techniques:
print(technique.id)
print(technique.name)
# accessing mitigation
for mitigation in attack.enterprise.mitigations:
print(mitigation.id)
print(mitigation.name)
# accessing techniques related to mitigation recommendations
for technique in mitigation.techniques:
print(technique.id)
print(technique.name)
# you can also access generated data sets on aa technique
print(technique.command_list)
print(technique.commands)
print(technique.queries)
print(technique.datasets)
print(technique.possible_detections)
# accessing tactics
for tactic in attack.enterprise.tactics:
print(tactic.id)
print(tactic.name)
# accessing techniques related to this tactic
for technique in tactic.techniques:
print(technique.id)
print(technique.name)
# you can also access generated data sets on aa technique
print(technique.command_list)
print(technique.commands)
print(technique.queries)
print(technique.datasets)
print(technique.possible_detections)
# accessing techniques
for technique in attack.enterprise.techniques:
print(technique.id)
print(technique.name)
# you can also access generated data sets on aa technique
print(technique.command_list)
print(technique.commands)
print(technique.queries)
print(technique.datasets)
print(technique.possible_detections)
# accessing tactics that this technique belongs to
for tactic in technique.tactics:
print(tactic.id)
print(tactic.name)
# accessing mitigation recommendations for this technique
for mitigation in technique.mitigations:
print(mitigation.id)
print(mitigation.name)
# accessing actors using this technique
for actor in technique.actors:
print(actor.id)
print(actor.name)
# accessing tools
for tool in attack.enterprise.tools:
print(tool.id)
print(tool.name)
# accessing techniques this tool is used in
for technique in tool.techniques:
print(technique.id)
print(technique.name)
# you can also access generated data sets on aa technique
print(technique.command_list)
print(technique.commands)
print(technique.queries)
print(technique.datasets)
print(technique.possible_detections)
# accessing actor or groups using this tool
for actor in tool.actors:
print(actor.id)
print(actor.name)
# Examples of MITRE PRE-ATT&CK
for actor in attack.preattack.actors:
print(actor.id)
print(actor.name)
# accessing techniques used by an actor or group
for technique in actor.techniques:
print(technique.id)
print(technique.name)
# accessing tactics
for tactic in attack.preattack.tactics:
print(tactic.id)
print(tactic.name)
# accessing techniques related to this tactic
for technique in tactic.techniques:
print(technique.id)
print(technique.name)
# accessing techniques
for technique in attack.preattack.techniques:
print(technique.id)
print(technique.name)
# accessing tactics that this technique belongs to
for tactic in technique.tactics:
print(tactic.id)
print(tactic.name)
# accessing actors using this technique
for actor in technique.actors:
print(actor.id)
print(actor.name)
# Examples of MITRE Mobile ATT&CK
for actor in attack.mobile.actors:
print(actor.id)
print(actor.name)
# accessing malware used by an actor or group
for malware in actor.malwares:
print(malware.id)
print(malware.name)
# accessing tools used by an actor or group
for tool in actor.tools:
print(tool.id)
print(tool.name)
# accessing techniques used by an actor or group
for technique in actor.techniques:
print(technique.id)
print(technique.name)
# you can also access generated data sets on aa technique
print(technique.command_list)
print(technique.commands)
print(technique.queries)
print(technique.datasets)
print(technique.possible_detections)
# accessing malware
for malware in attack.mobile.malwares:
print(malware.id)
print(malware.name)
# accessing actor or groups using this malware
for actor in malware.actors:
print(actor.id)
print(actor.name)
# accessing techniques that this malware is used in
for technique in malware.techniques:
print(technique.id)
print(technique.name)
# accessing mitigation
for mitigation in attack.mobile.mitigations:
print(mitigation.id)
print(mitigation.name)
# accessing techniques related to mitigation recommendations
for technique in mitigation.techniques:
print(technique.id)
print(technique.name)
# you can also access generated data sets on aa technique
print(technique.command_list)
print(technique.commands)
print(technique.queries)
print(technique.datasets)
print(technique.possible_detections)
# accessing tactics
for tactic in attack.mobile.tactics:
print(tactic.id)
print(tactic.name)
# accessing techniques related to this tactic
for technique in tactic.techniques:
print(technique.id)
print(technique.name)
# you can also access generated data sets on aa technique
print(technique.command_list)
print(technique.commands)
print(technique.queries)
print(technique.datasets)
print(technique.possible_detections)
# accessing techniques
for technique in attack.mobile.techniques:
print(technique.id)
print(technique.name)
# you can also access generated data sets on aa technique
print(technique.command_list)
print(technique.commands)
print(technique.queries)
print(technique.datasets)
print(technique.possible_detections)
# accessing tactics that this technique belongs to
for tactic in technique.tactics:
print(tactic.id)
print(tactic.name)
# accessing mitigation recommendations for this technique
for mitigation in technique.mitigations:
print(mitigation.id)
print(mitigation.name)
# accessing actors using this technique
for actor in technique.actors:
print(actor.id)
print(actor.name)
# accessing tools
for tool in attack.mobile.tools:
print(tool.id)
print(tool.name)
# accessing techniques this tool is used in
for technique in tool.techniques:
print(technique.id)
print(technique.name)
# you can also access generated data sets on aa technique
print(technique.command_list)
print(technique.commands)
print(technique.queries)
print(technique.datasets)
print(technique.possible_detections)
# accessing actor or groups using this tool
for actor in tool.actors:
print(actor.id)
print(actor.name)
| 29.163121
| 65
| 0.691269
| 998
| 8,224
| 5.676353
| 0.062124
| 0.197705
| 0.03707
| 0.068844
| 0.966461
| 0.966461
| 0.947043
| 0.947043
| 0.945455
| 0.929921
| 0
| 0
| 0.239543
| 8,224
| 281
| 66
| 29.266904
| 0.90582
| 0.264835
| 0
| 0.906077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005525
| 0
| 0.005525
| 0.751381
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
ce477d2ff2db43989755872cca5e7cc676f6d6fc
| 5,335
|
py
|
Python
|
temp_models/trimet_gis_models.py
|
hackoregon/provisional-transportation-api
|
15a74f82f751e9bc60c00eacbe6f16bb2d8905e0
|
[
"MIT"
] | 2
|
2018-05-30T18:07:16.000Z
|
2019-05-06T10:01:54.000Z
|
temp_models/trimet_gis_models.py
|
hackoregon/provisional-transportation-api
|
15a74f82f751e9bc60c00eacbe6f16bb2d8905e0
|
[
"MIT"
] | 33
|
2018-05-24T05:00:58.000Z
|
2020-06-05T18:16:32.000Z
|
temp_models/trimet_gis_models.py
|
hackoregon/provisional-transportation-api
|
15a74f82f751e9bc60c00eacbe6f16bb2d8905e0
|
[
"MIT"
] | 3
|
2018-05-24T03:38:59.000Z
|
2019-07-10T18:35:31.000Z
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
class TmBoundary(models.Model):
ogc_fid = models.AutoField(primary_key=True)
area_sq_mi = models.FloatField(blank=True, null=True)
acres = models.FloatField(blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_boundary'
class TmParkride(models.Model):
ogc_fid = models.AutoField(primary_key=True)
name = models.CharField(max_length=-1, blank=True, null=True)
address = models.CharField(max_length=-1, blank=True, null=True)
city = models.CharField(max_length=-1, blank=True, null=True)
county = models.CharField(max_length=-1, blank=True, null=True)
zipcode = models.CharField(max_length=-1, blank=True, null=True)
owner = models.CharField(max_length=-1, blank=True, null=True)
spaces = models.IntegerField(blank=True, null=True)
status = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_parkride'
class TmRailLines(models.Model):
ogc_fid = models.AutoField(primary_key=True)
type = models.CharField(max_length=-1, blank=True, null=True)
line = models.CharField(max_length=-1, blank=True, null=True)
passage = models.CharField(max_length=-1, blank=True, null=True)
status = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_rail_lines'
class TmRailStops(models.Model):
ogc_fid = models.AutoField(primary_key=True)
station = models.CharField(max_length=-1, blank=True, null=True)
type = models.CharField(max_length=-1, blank=True, null=True)
line = models.CharField(max_length=-1, blank=True, null=True)
status = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_rail_stops'
class TmRouteStops(models.Model):
ogc_fid = models.AutoField(primary_key=True)
rte = models.IntegerField(blank=True, null=True)
dir = models.IntegerField(blank=True, null=True)
rte_desc = models.CharField(max_length=-1, blank=True, null=True)
dir_desc = models.CharField(max_length=-1, blank=True, null=True)
type = models.CharField(max_length=-1, blank=True, null=True)
stop_seq = models.IntegerField(blank=True, null=True)
stop_id = models.IntegerField(blank=True, null=True)
stop_name = models.CharField(max_length=-1, blank=True, null=True)
jurisdic = models.CharField(max_length=-1, blank=True, null=True)
zipcode = models.CharField(max_length=-1, blank=True, null=True)
frequent = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_route_stops'
class TmRoutes(models.Model):
ogc_fid = models.AutoField(primary_key=True)
rte = models.IntegerField(blank=True, null=True)
dir = models.IntegerField(blank=True, null=True)
rte_desc = models.CharField(max_length=-1, blank=True, null=True)
public_rte = models.CharField(max_length=-1, blank=True, null=True)
dir_desc = models.CharField(max_length=-1, blank=True, null=True)
frequent = models.CharField(max_length=-1, blank=True, null=True)
type = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_routes'
class TmStops(models.Model):
ogc_fid = models.AutoField(primary_key=True)
stop_id = models.IntegerField(blank=True, null=True)
stop_name = models.CharField(max_length=-1, blank=True, null=True)
jurisdic = models.CharField(max_length=-1, blank=True, null=True)
zipcode = models.CharField(max_length=-1, blank=True, null=True)
type = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_stops'
class TmTranCen(models.Model):
ogc_fid = models.AutoField(primary_key=True)
name = models.CharField(max_length=-1, blank=True, null=True)
address = models.CharField(max_length=-1, blank=True, null=True)
city = models.CharField(max_length=-1, blank=True, null=True)
county = models.CharField(max_length=-1, blank=True, null=True)
zipcode = models.CharField(max_length=-1, blank=True, null=True)
status = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_tran_cen'
| 41.679688
| 104
| 0.713215
| 756
| 5,335
| 4.906085
| 0.156085
| 0.133459
| 0.192774
| 0.25209
| 0.840658
| 0.839579
| 0.821515
| 0.813157
| 0.813157
| 0.754381
| 0
| 0.00833
| 0.167385
| 5,335
| 127
| 105
| 42.007874
| 0.826655
| 0.084911
| 0
| 0.739583
| 1
| 0
| 0.018465
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.010417
| 0.010417
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
ce512c21e7546acbb4cf7854892d06c35967ab27
| 4,997
|
py
|
Python
|
sacredbrowser/StateModels.py
|
michaelwand/SacredBrowser
|
e33843a9fc4458b0a2ead4e243c7a9c88e228bca
|
[
"MIT"
] | 13
|
2016-06-29T20:34:29.000Z
|
2021-07-22T16:27:52.000Z
|
sacredbrowser/StateModels.py
|
michaelwand/SacredBrowser
|
e33843a9fc4458b0a2ead4e243c7a9c88e228bca
|
[
"MIT"
] | null | null | null |
sacredbrowser/StateModels.py
|
michaelwand/SacredBrowser
|
e33843a9fc4458b0a2ead4e243c7a9c88e228bca
|
[
"MIT"
] | 8
|
2016-09-21T23:19:29.000Z
|
2018-11-21T02:40:44.000Z
|
# This file contains objects which represent QT models to encapsulate browser state
# (see BrowserState.py) which is NOT contained in any database (for that, see DbEntries.py and
# DbModel.py). Note that the models do not automatically react to
# any changes originating reloading the database entries, the controller must call the respective functions.
# Note that all these models are NOT editable, they only change by outside command.
from PyQt5 import QtCore, QtGui, QtWidgets
from . import BrowserState
class InvisibleFieldsModel(QtCore.QAbstractListModel):
def __init__(self,fields):
super().__init__()
self._fields = fields # an instance of BrowserState.Fields
self._fields.invisible_fields_to_be_changed.connect(self.slot_invisible_fields_to_be_changed)
self._fields.invisible_fields_changed.connect(self.slot_invisible_fields_changed)
def rowCount(self,idx):
assert not idx.isValid() # we only have top level data
return self._fields.invisible_fields_count()
def data(self,index,role):
row = index.row() # only relevant thing
if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.ToolTipRole:
return self._fields.get_invisible_fields()[row][1] # remove the type
else:
return None
def slot_invisible_fields_to_be_changed(self,change_data):
# for the interpretation of change_data see BrowserState.py
if change_data.tp == BrowserState.Fields.ChangeType.Reset:
self.beginResetModel()
elif change_data.tp == BrowserState.Fields.ChangeType.Content:
pass
elif change_data.tp == BrowserState.Fields.ChangeType.Insert:
first = change_data.info[0]
last = change_data.info[0] + change_data.info[1] - 1
self.beginInsertRows(QtCore.QModelIndex(),first,last)
elif change_data.tp == BrowserState.Fields.ChangeType.Remove:
first = change_data.info[0]
last = change_data.info[0] + change_data.info[1] - 1
self.beginRemoveRows(QtCore.QModelIndex(),first,last)
def slot_invisible_fields_changed(self,new_fields,change_data):
# for the interpretation of change_data see BrowserState.py
if change_data.tp == BrowserState.Fields.ChangeType.Reset:
self.endResetModel()
elif change_data.tp == BrowserState.Fields.ChangeType.Content:
for row in change_data.info:
idx = self.index(row,0,QtCore.QModelIndex())
self.dataChanged.emit(idx,idx)
elif change_data.tp == BrowserState.Fields.ChangeType.Insert:
self.endInsertRows()
elif change_data.tp == BrowserState.Fields.ChangeType.Remove:
self.endRemoveRows()
class VisibleFieldsModel(QtCore.QAbstractListModel):
def __init__(self,fields):
super().__init__()
self._fields = fields # an instance of BrowserState.Fields
self._fields.visible_fields_to_be_changed.connect(self.slot_visible_fields_to_be_changed)
self._fields.visible_fields_changed.connect(self.slot_visible_fields_changed)
def rowCount(self,idx):
assert not idx.isValid() # we only have top level data
return self._fields.visible_fields_count()
def data(self,index,role):
row = index.row() # only relevant thing
if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.ToolTipRole:
return self._fields.get_visible_fields()[row][1]
else:
return None
def slot_visible_fields_to_be_changed(self,change_data):
# for the interpretation of change_data see BrowserState.py
if change_data.tp == BrowserState.Fields.ChangeType.Reset:
self.beginResetModel()
elif change_data.tp == BrowserState.Fields.ChangeType.Content:
pass
elif change_data.tp == BrowserState.Fields.ChangeType.Insert:
first = change_data.info[0]
last = change_data.info[0] + change_data.info[1] - 1
self.beginInsertRows(QtCore.QModelIndex(),first,last)
elif change_data.tp == BrowserState.Fields.ChangeType.Remove:
first = change_data.info[0]
last = change_data.info[0] + change_data.info[1] - 1
self.beginRemoveRows(QtCore.QModelIndex(),first,last)
def slot_visible_fields_changed(self,new_fields,change_data):
# for the interpretation of change_data see BrowserState.py
if change_data.tp == BrowserState.Fields.ChangeType.Reset:
self.endResetModel()
elif change_data.tp == BrowserState.Fields.ChangeType.Content:
for row in change_data.info:
idx = self.index(row,0,QtCore.QModelIndex())
self.dataChanged.emit(idx,idx)
elif change_data.tp == BrowserState.Fields.ChangeType.Insert:
self.endInsertRows()
elif change_data.tp == BrowserState.Fields.ChangeType.Remove:
self.endRemoveRows()
| 47.141509
| 108
| 0.688213
| 613
| 4,997
| 5.417618
| 0.199021
| 0.114423
| 0.057814
| 0.115628
| 0.833484
| 0.814815
| 0.793135
| 0.749172
| 0.749172
| 0.749172
| 0
| 0.005417
| 0.224134
| 4,997
| 105
| 109
| 47.590476
| 0.851174
| 0.168501
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 1
| 0.125
| false
| 0.025
| 0.025
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce5953d702e1b2d25c9a5050b8f362a7d7604615
| 3,028
|
py
|
Python
|
dice.py
|
mpi3d/py-sense-hat
|
dd09b42cd9ed14a8e07857083624fb146d66a220
|
[
"MIT"
] | 1
|
2020-06-24T07:29:15.000Z
|
2020-06-24T07:29:15.000Z
|
dice.py
|
MPi3D/Python_Sense_Hat
|
dd09b42cd9ed14a8e07857083624fb146d66a220
|
[
"MIT"
] | null | null | null |
dice.py
|
MPi3D/Python_Sense_Hat
|
dd09b42cd9ed14a8e07857083624fb146d66a220
|
[
"MIT"
] | null | null | null |
from sense_hat import SenseHat
import time
import random
sense = SenseHat()
sense.clear()
sense.set_rotation(180)
sense.low_light = True
sense.set_imu_config(False, False, True)
s = 30
b = [255,255,255]
n = [0,0,0]
d1 = [n,n,n,n,n,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,b,b,n,n,n,
n,n,n,b,b,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,n,n,n,n,n]
d2 = [b,b,n,n,n,n,n,n,
b,b,n,n,n,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,n,n,n,b,b,
n,n,n,n,n,n,b,b]
d3 = [b,b,n,n,n,n,n,n,
b,b,n,n,n,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,b,b,n,n,n,
n,n,n,b,b,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,n,n,n,b,b,
n,n,n,n,n,n,b,b]
d4 = [b,b,n,n,n,n,b,b,
b,b,n,n,n,n,b,b,
n,n,n,n,n,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,n,n,n,n,n,
n,n,n,n,n,n,n,n,
b,b,n,n,n,n,b,b,
b,b,n,n,n,n,b,b]
d5 = [b,b,n,n,n,n,b,b,
b,b,n,n,n,n,b,b,
n,n,n,n,n,n,n,n,
n,n,n,b,b,n,n,n,
n,n,n,b,b,n,n,n,
n,n,n,n,n,n,n,n,
b,b,n,n,n,n,b,b,
b,b,n,n,n,n,b,b]
d6 = [b,b,n,n,n,n,b,b,
b,b,n,n,n,n,b,b,
n,n,n,n,n,n,n,n,
b,b,n,n,n,n,b,b,
b,b,n,n,n,n,b,b,
n,n,n,n,n,n,n,n,
b,b,n,n,n,n,b,b,
b,b,n,n,n,n,b,b]
accel_only = sense.get_accelerometer()
a = ("{pitch}.{roll}.{yaw}".format(**accel_only))
p,p0,r,r0,y,y0 = a.split(".")
p1 = p
r1 = r
y1 = y
while True :
accel_only = sense.get_accelerometer()
a = ("{pitch}.{roll}.{yaw}".format(**accel_only))
p,p0,r,r0,y,y0 = a.split(".")
if int(p) + s < int(p1) or int(p) - s > int(p1) or int(r) + s < int(r1) or int(r) - s > int(r1) or int(y) + s < int(y1) or int(y) - s > int(y1) :
i = random.randint(10,20)
while i > 0 :
w = random.randint(1,6)
if w == 1 :
sense.set_pixels(d1)
elif w == 2 :
sense.set_pixels(d2)
elif w == 3 :
sense.set_pixels(d3)
elif w == 4 :
sense.set_pixels(d4)
elif w == 5 :
sense.set_pixels(d5)
elif w == 6 :
sense.set_pixels(d6)
t = 0.5 / i
time.sleep(t)
i = i - 1
i = 5
while i > 0 :
time.sleep(0.2)
sense.clear()
time.sleep(0.2)
if w == 1 :
sense.set_pixels(d1)
elif w == 2 :
sense.set_pixels(d2)
elif w == 3 :
sense.set_pixels(d3)
elif w == 4 :
sense.set_pixels(d4)
elif w == 5 :
sense.set_pixels(d5)
elif w == 6 :
sense.set_pixels(d6)
i = i - 1
accel_only = sense.get_accelerometer()
a = ("{pitch}.{roll}.{yaw}".format(**accel_only))
p,p0,r,r0,y,y0 = a.split(".")
p1 = p
r1 = r
y1 = y
time.sleep(0.01)
| 24.419355
| 149
| 0.419089
| 667
| 3,028
| 1.863568
| 0.101949
| 0.432824
| 0.574417
| 0.66613
| 0.765084
| 0.765084
| 0.749799
| 0.7321
| 0.708769
| 0.708769
| 0
| 0.045128
| 0.356011
| 3,028
| 123
| 150
| 24.617886
| 0.592308
| 0
| 0
| 0.778761
| 0
| 0
| 0.020806
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026549
| 0
| 0.026549
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0ca92eecee8839563dea629ab0bd35f0d0180e73
| 6,034
|
py
|
Python
|
src/api/tests/tests_timetable.py
|
memclutter/clinic-crm
|
1d7b84fbf4f1e1510db303df956fa19db8c01b2b
|
[
"BSD-2-Clause"
] | 3
|
2017-09-25T15:42:15.000Z
|
2017-12-13T21:32:51.000Z
|
src/api/tests/tests_timetable.py
|
sweetlearn/clinic-crm
|
1d7b84fbf4f1e1510db303df956fa19db8c01b2b
|
[
"BSD-2-Clause"
] | null | null | null |
src/api/tests/tests_timetable.py
|
sweetlearn/clinic-crm
|
1d7b84fbf4f1e1510db303df956fa19db8c01b2b
|
[
"BSD-2-Clause"
] | 6
|
2018-04-02T04:39:00.000Z
|
2021-03-14T11:42:44.000Z
|
import datetime
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from clinic.models import Doctor, Speciality
from timetables.models import Timetable
class TimetableTestCase(APITestCase):
def test_return_empty_list(self):
response = self._get_response()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, [])
def test_return_not_empty_list(self):
spec = Speciality.objects.create(title='Test')
doc = Doctor.objects.create(speciality=spec, first_name='Doctor', last_name='Test')
Timetable.objects.create(doctor=doc,
day_of_week=Timetable.DW_MON,
start_time=datetime.time(hour=8),
end_time=datetime.time(hour=17),
break_start_time=datetime.time(hour=12),
break_end_time=datetime.time(hour=13))
Timetable.objects.create(doctor=doc,
day_of_week=Timetable.DW_TUE,
start_time=datetime.time(hour=9),
end_time=datetime.time(hour=18),
break_start_time=datetime.time(hour=12),
break_end_time=datetime.time(hour=13))
response = self._get_response()
self.assertEqual(len(response.data), Timetable.objects.count())
def test_return_correcct_list(self):
spec = Speciality.objects.create(title='Test')
doc = Doctor.objects.create(speciality=spec, first_name='Doctor', last_name='Test')
tt = Timetable.objects.create(doctor=doc,
day_of_week=Timetable.DW_MON,
start_time=datetime.time(hour=8),
end_time=datetime.time(hour=17),
break_start_time=datetime.time(hour=12),
break_end_time=datetime.time(hour=13))
response = self._get_response()
tf = '%H:%M:%S'
self.assertEqual(response.data[0]['doctor'], doc.id)
self.assertEqual(response.data[0]['day_of_week'], tt.day_of_week)
self.assertEqual(response.data[0]['start_time'], tt.start_time.strftime(tf))
self.assertEqual(response.data[0]['end_time'], tt.end_time.strftime(tf))
self.assertEqual(response.data[0]['break_start_time'], tt.break_start_time.strftime(tf))
self.assertEqual(response.data[0]['break_end_time'], tt.break_end_time.strftime(tf))
def test_filter_by_doctor(self):
spec = Speciality.objects.create(title='Test')
doc1 = Doctor.objects.create(speciality=spec, first_name='Doctor', last_name='One')
doc2 = Doctor.objects.create(speciality=spec, first_name='Doctor', last_name='Two')
Timetable.objects.create(doctor=doc1,
day_of_week=Timetable.DW_MON,
start_time=datetime.time(hour=8),
end_time=datetime.time(hour=17),
break_start_time=datetime.time(hour=12),
break_end_time=datetime.time(hour=13))
Timetable.objects.create(doctor=doc1,
day_of_week=Timetable.DW_TUE,
start_time=datetime.time(hour=9),
end_time=datetime.time(hour=18),
break_start_time=datetime.time(hour=12),
break_end_time=datetime.time(hour=13))
Timetable.objects.create(doctor=doc2,
day_of_week=Timetable.DW_MON,
start_time=datetime.time(hour=8),
end_time=datetime.time(hour=17),
break_start_time=datetime.time(hour=12),
break_end_time=datetime.time(hour=13))
response = self._get_response(doctor=doc1.id)
self.assertEqual(len(response.data), doc1.timetable_set.count())
def test_filter_by_day_of_week(self):
spec = Speciality.objects.create(title='Test')
doc1 = Doctor.objects.create(speciality=spec, first_name='Doctor', last_name='One')
doc2 = Doctor.objects.create(speciality=spec, first_name='Doctor', last_name='Two')
Timetable.objects.create(doctor=doc1,
day_of_week=Timetable.DW_MON,
start_time=datetime.time(hour=8),
end_time=datetime.time(hour=17),
break_start_time=datetime.time(hour=12),
break_end_time=datetime.time(hour=13))
Timetable.objects.create(doctor=doc1,
day_of_week=Timetable.DW_TUE,
start_time=datetime.time(hour=9),
end_time=datetime.time(hour=18),
break_start_time=datetime.time(hour=12),
break_end_time=datetime.time(hour=13))
Timetable.objects.create(doctor=doc2,
day_of_week=Timetable.DW_MON,
start_time=datetime.time(hour=8),
end_time=datetime.time(hour=17),
break_start_time=datetime.time(hour=12),
break_end_time=datetime.time(hour=13))
response = self._get_response(day_of_week=Timetable.DW_MON)
self.assertEqual(len(response.data), Timetable.objects.filter(day_of_week=Timetable.DW_MON).count())
def _get_response(self, **kwargs):
url = reverse('api:timetable-list')
return self.client.get(url, data=kwargs, format='json')
| 49.459016
| 108
| 0.558502
| 664
| 6,034
| 4.847892
| 0.123494
| 0.134203
| 0.178938
| 0.223672
| 0.802423
| 0.767008
| 0.740913
| 0.712333
| 0.712333
| 0.666977
| 0
| 0.021063
| 0.339079
| 6,034
| 121
| 109
| 49.867769
| 0.786108
| 0
| 0
| 0.6875
| 0
| 0
| 0.027686
| 0
| 0
| 0
| 0
| 0
| 0.114583
| 1
| 0.0625
| false
| 0
| 0.0625
| 0
| 0.145833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b8947fea77c89a301979c1af018a4fcad9f371c
| 45
|
py
|
Python
|
keyboards/__init__.py
|
jtprog/gendalf_bot
|
aebdaf52f9af3b1307eb1962b16d2e9fd04dd51f
|
[
"WTFPL"
] | 2
|
2020-10-19T07:49:16.000Z
|
2020-10-19T07:50:54.000Z
|
telegram_bot/keyboards/__init__.py
|
LD31D/teaching_bot
|
3aacd395a00509eaceab41845779d5fa1a12bf70
|
[
"MIT"
] | 21
|
2019-10-23T21:16:18.000Z
|
2021-08-02T21:43:46.000Z
|
telegram_bot/keyboards/__init__.py
|
LD31D/teaching_bot
|
3aacd395a00509eaceab41845779d5fa1a12bf70
|
[
"MIT"
] | 1
|
2019-10-25T23:01:25.000Z
|
2019-10-25T23:01:25.000Z
|
from . import default
from . import inline
| 15
| 22
| 0.733333
| 6
| 45
| 5.5
| 0.666667
| 0.606061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 45
| 2
| 23
| 22.5
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0b937eb99a6a214a5eb59df53ff65ade69c11502
| 5,920
|
py
|
Python
|
tests/test_unit_parser.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | 22
|
2020-06-30T05:27:21.000Z
|
2022-02-21T12:13:51.000Z
|
tests/test_unit_parser.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | 71
|
2020-08-07T13:06:30.000Z
|
2022-03-15T06:44:49.000Z
|
tests/test_unit_parser.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | 13
|
2020-06-30T03:28:32.000Z
|
2021-12-30T08:17:16.000Z
|
import pytest
from opennem.core.unit_parser import parse_unit_duid, parse_unit_number
class TestUnitParser(object):
# Simple
def test_returns_string_one(self):
subj = parse_unit_number("1")
assert subj.id == 1, "Returns string 1 as unit number 1"
assert subj.number == 1, "Unit has one unit"
def test_returns_string_one(self):
subj = parse_unit_number("2")
assert subj.id == 2, "Has unit id of 2"
assert subj.number == 1, "Unit has one unit"
def test_returns_int_one(self):
subj = parse_unit_number(1)
assert subj.id == 1, "Returns int 1 as unit number 1"
assert subj.number == 1, "Unit has one unit"
def test_returns_string_one_padded(self):
subj = parse_unit_number(" 1 ")
assert subj.id == 1, "Returns string 1 as unit number 1"
assert subj.number == 1, "Unit has one unit"
def test_blank_unit_number(self):
subj = parse_unit_number("")
assert subj.id == 1, "Returns string 1 as unit number 1"
assert subj.number == 1, "Unit has one unit"
def test_none_unit_number(self):
subj = parse_unit_number(None)
assert subj.id == 1, "Returns string 1 as unit number 1"
assert subj.number == 1, "Unit has one unit"
# Ranges
def test_simple_range(self):
subj = parse_unit_number("1-2")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 2, "Unit has two units"
def test_simple_range_padded(self):
subj = parse_unit_number("1- 2 ")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 2, "Unit has two units"
def test_range_unit_number(self):
subj = parse_unit_number("1-50")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 50, "Unit has 50 units"
def test_range_unit_number_shifted(self):
subj = parse_unit_number("50-99")
assert subj.id == 50, "Unit has an id of 50"
assert subj.number == 50, "Unit has 50 units"
assert subj.alias == None, "Unit has no alias"
# Aliases
def test_single_has_alias(self):
subj = parse_unit_number("1a")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 1, "Unit has 1 unit"
assert subj.alias == "A", "Unit has alias of A"
def test_single_has_alias_prepend(self):
subj = parse_unit_number("WT1")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 1, "Unit has 1 unit"
assert subj.alias == "WT", "Unit has alias of WT"
def test_single_long_alias(self):
subj = parse_unit_number("WKIEWA1")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 1, "Unit has 1 unit"
assert subj.alias == "WKIEWA", "Unit has alias of WKIEWA"
def test_single_has_alias_prepend_space(self):
subj = parse_unit_number("WT 1")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 1, "Unit has 1 unit"
assert subj.alias == "WT", "Unit has alias of WT"
def test_range_has_alias(self):
subj = parse_unit_number("1-2a")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 2, "Unit has 2 unit"
assert subj.alias == "A", "Unit has alias of A"
def test_range_has_alias_prepend(self):
subj = parse_unit_number("WT1-2")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 2, "Unit has 2 unit"
assert subj.alias == "WT", "Unit has alias of WT"
def test_range_has_alias_prepend_space(self):
subj = parse_unit_number("WT 1-2")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 2, "Unit has 2 unit"
assert subj.alias == "WT", "Unit has alias of WT"
# Force single
def test_force_single(self):
subj = parse_unit_number("GT 1-2", force_single=True)
assert subj.id == 2, "Unit has an id of 2"
assert subj.number == 1, "Unit has 1 unit"
assert subj.alias == "GT1", "Unit has alias of GT1"
# Multi units in one line
def test_ampersand(self):
subj = parse_unit_number("1 & 2")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 2, "Unit has 2 units"
assert subj.alias == None, "Unit has no alias"
def test_ampersand_three(self):
subj = parse_unit_number("1 & 2 & 3")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 3, "Unit has 3 units"
assert subj.alias == None, "Unit has no alias"
def test_comma_separated(self):
subj = parse_unit_number("1,2")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 2, "Unit has 2 units"
assert subj.alias == None, "Unit has no alias"
def test_comma_separated_single(self):
subj = parse_unit_number("GT 1-2,GT 1-4", force_single=True)
assert subj.id == 2, "Unit has an id of 1"
assert subj.number == 2, "Unit has 2 units"
assert subj.alias == "GT1", "Unit has GT1 alias"
def test_comma_and_ampersand_separated(self):
subj = parse_unit_number("1, 2 & 5,3 & 4")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 5, "Unit has 5 units"
assert subj.alias == None, "Unit has no alias"
class TestUnitDuidParser(object):
def test_unit_duid(self):
subj = parse_unit_duid("WT1-2", "NONE")
assert subj.id == 1, "Unit has an id of 1"
assert subj.number == 2, "Unit has 2 unit"
assert subj.alias == "WT", "Unit has alias of WT"
def test_unit_duid_single(self):
subj = parse_unit_duid("GT 1-2", "AGLHAL")
assert subj.id == 2, "Unit has an id of 2"
assert subj.number == 1, "Unit has 1 unit"
assert subj.alias == "GT1", "Unit has alias of GT1"
| 37.232704
| 71
| 0.609628
| 933
| 5,920
| 3.724544
| 0.076099
| 0.189928
| 0.062158
| 0.122302
| 0.869928
| 0.839712
| 0.813525
| 0.758849
| 0.736691
| 0.670791
| 0
| 0.036551
| 0.279054
| 5,920
| 158
| 72
| 37.468354
| 0.777648
| 0.009797
| 0
| 0.566667
| 0
| 0
| 0.239583
| 0
| 0
| 0
| 0
| 0
| 0.55
| 1
| 0.208333
| false
| 0
| 0.016667
| 0
| 0.241667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e7e99e913e114d8cffedf2e548795597174a3720
| 149
|
py
|
Python
|
curris/test/test_script.py
|
a1trl9/curris
|
f9b55a7a2a8864882f297a4a324ccff182176417
|
[
"MIT"
] | null | null | null |
curris/test/test_script.py
|
a1trl9/curris
|
f9b55a7a2a8864882f297a4a324ccff182176417
|
[
"MIT"
] | null | null | null |
curris/test/test_script.py
|
a1trl9/curris
|
f9b55a7a2a8864882f297a4a324ccff182176417
|
[
"MIT"
] | null | null | null |
from curris.test.base import compare_json
def test_script():
compare_json('curris/test/resource/script.md', 'curris/test/resource/script.json')
| 29.8
| 86
| 0.778523
| 22
| 149
| 5.136364
| 0.5
| 0.265487
| 0.318584
| 0.424779
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087248
| 149
| 4
| 87
| 37.25
| 0.830882
| 0
| 0
| 0
| 0
| 0
| 0.416107
| 0.416107
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
e7e9e600c87f8193cf3e12ad9935320f1e3a6b25
| 2,176
|
py
|
Python
|
tests/file_io/raw_file_io.py
|
Defense-Cyber-Crime-Center/dfvfs
|
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
|
[
"Apache-2.0"
] | 2
|
2016-02-18T12:46:26.000Z
|
2022-03-13T03:05:05.000Z
|
tests/file_io/raw_file_io.py
|
Defense-Cyber-Crime-Center/dfvfs
|
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
|
[
"Apache-2.0"
] | null | null | null |
tests/file_io/raw_file_io.py
|
Defense-Cyber-Crime-Center/dfvfs
|
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
|
[
"Apache-2.0"
] | 5
|
2016-12-18T08:05:39.000Z
|
2019-11-19T21:18:00.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the file-like object implementation using pysmraw."""
import os
import unittest
from dfvfs.path import raw_path_spec
from dfvfs.path import os_path_spec
from tests.file_io import test_lib
class RawFileTest(test_lib.ImageFileTestCase):
"""The unit test for the RAW storage media image file-like object."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
super(RawFileTest, self).setUp()
test_file = os.path.join(u'test_data', u'ímynd.dd')
path_spec = os_path_spec.OSPathSpec(location=test_file)
self._raw_path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
def testOpenCloseInode(self):
"""Test the open and close functionality using an inode."""
self._TestOpenCloseInode(self._raw_path_spec)
def testOpenCloseLocation(self):
"""Test the open and close functionality using a location."""
self._TestOpenCloseLocation(self._raw_path_spec)
def testSeek(self):
"""Test the seek functionality."""
self._TestSeek(self._raw_path_spec)
def testRead(self):
"""Test the read functionality."""
self._TestRead(self._raw_path_spec)
class SplitRawFileTest(test_lib.ImageFileTestCase):
"""The unit test for the split storage media image file-like object."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
super(SplitRawFileTest, self).setUp()
test_file = os.path.join(u'test_data', u'image.raw.000')
path_spec = os_path_spec.OSPathSpec(location=test_file)
self._raw_path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
def testOpenCloseInode(self):
"""Test the open and close functionality using an inode."""
self._TestOpenCloseInode(self._raw_path_spec)
def testOpenCloseLocation(self):
"""Test the open and close functionality using a location."""
self._TestOpenCloseLocation(self._raw_path_spec)
def testSeek(self):
"""Test the seek functionality."""
self._TestSeek(self._raw_path_spec)
def testRead(self):
"""Test the read functionality."""
self._TestRead(self._raw_path_spec)
if __name__ == '__main__':
unittest.main()
| 31.536232
| 74
| 0.734375
| 298
| 2,176
| 5.120805
| 0.238255
| 0.104849
| 0.093709
| 0.098296
| 0.807339
| 0.807339
| 0.807339
| 0.807339
| 0.753604
| 0.753604
| 0
| 0.002169
| 0.152574
| 2,176
| 68
| 75
| 32
| 0.82538
| 0.308824
| 0
| 0.628571
| 0
| 0
| 0.032707
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0
| 0.485714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f0478eb638c12d66ea0b4db5b05734de9e669bbe
| 118
|
py
|
Python
|
tests/__init__.py
|
kajyuuen/pytorch-partial-crf
|
bb368346544cb6241e425a8b4e1a3baee324ef0d
|
[
"MIT"
] | 22
|
2019-09-23T16:07:44.000Z
|
2022-03-17T12:16:18.000Z
|
tests/__init__.py
|
kajyuuen/pytorch-partial-crf
|
bb368346544cb6241e425a8b4e1a3baee324ef0d
|
[
"MIT"
] | 15
|
2019-10-20T08:23:55.000Z
|
2022-03-18T00:53:33.000Z
|
tests/__init__.py
|
kajyuuen/pytorch-partial-crf
|
bb368346544cb6241e425a8b4e1a3baee324ef0d
|
[
"MIT"
] | 3
|
2019-12-10T12:44:41.000Z
|
2020-09-03T03:16:46.000Z
|
__version__ = '0.1.2'
from pytorch_partial_crf.partial_crf import PartialCRF
from pytorch_partial_crf.crf import CRF
| 23.6
| 54
| 0.838983
| 19
| 118
| 4.736842
| 0.526316
| 0.333333
| 0.4
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028302
| 0.101695
| 118
| 4
| 55
| 29.5
| 0.820755
| 0
| 0
| 0
| 0
| 0
| 0.042373
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f065a602a73afee5b98329315fdbb51f1389138c
| 25,568
|
py
|
Python
|
gridworld_vav/data_analysis/plot_grid.py
|
dsbrown1331/vav-icml
|
90f40c2b5b52f3cc142ffd4e02bb82d88e1e221d
|
[
"MIT"
] | null | null | null |
gridworld_vav/data_analysis/plot_grid.py
|
dsbrown1331/vav-icml
|
90f40c2b5b52f3cc142ffd4e02bb82d88e1e221d
|
[
"MIT"
] | null | null | null |
gridworld_vav/data_analysis/plot_grid.py
|
dsbrown1331/vav-icml
|
90f40c2b5b52f3cc142ffd4e02bb82d88e1e221d
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import colors
import sys
def plot_dashed_arrow(state, width, ax, direction, arrow_color='k'):
print("plotting dashed arrow", direction)
h_length = 0.15
shaft_length = 0.4
#convert state to coords where (0,0) is top left
x_coord = state % width
y_coord = state // width
print(x_coord, y_coord)
if direction is 'down':
x_end = 0
y_end = shaft_length - h_length
elif direction is 'up':
x_end = 0
y_end = -shaft_length + h_length
elif direction is 'left':
x_end = -shaft_length + h_length
y_end = 0
elif direction is 'right':
x_end = shaft_length - h_length
y_end = 0
else:
print("ERROR: ", direction, " is not a valid action")
return
print(x_end, y_end)
ax.arrow(x_coord, y_coord, x_end, y_end, head_width=None, head_length=None, fc=arrow_color, ec=arrow_color,linewidth=4, linestyle=':',fill=False)
#convert state to coords where (0,0) is top left
x_coord = state % width
y_coord = state // width
print(x_coord, y_coord)
if direction is 'down':
x_end = 0
y_end = h_length
y_coord += shaft_length - h_length
elif direction is 'up':
x_end = 0
y_end = -h_length
y_coord += -shaft_length + h_length
elif direction is 'left':
x_end = -h_length
y_end = 0
x_coord += -shaft_length + h_length
elif direction is 'right':
x_end = h_length
y_end = 0
x_coord += shaft_length - h_length
else:
print("ERROR: ", direction, " is not a valid action")
return
print(x_end, y_end)
ax.arrow(x_coord, y_coord, x_end, y_end, head_width=0.2, head_length=h_length, fc=arrow_color, ec=arrow_color,linewidth=4, fill=False,length_includes_head = True)
def plot_arrow(state, width, ax, direction, arrow_color='k'):
print("plotting arrow", direction)
h_length = 0.15
shaft_length = 0.4
#convert state to coords where (0,0) is top left
x_coord = state % width
y_coord = state // width
print(x_coord, y_coord)
if direction is 'down':
x_end = 0
y_end = shaft_length - h_length
elif direction is 'up':
x_end = 0
y_end = -shaft_length + h_length
elif direction is 'left':
x_end = -shaft_length + h_length
y_end = 0
elif direction is 'right':
x_end = shaft_length - h_length
y_end = 0
else:
print("ERROR: ", direction, " is not a valid action")
return
print(x_end, y_end)
ax.arrow(x_coord, y_coord, x_end, y_end, head_width=0.2, head_length=h_length, fc=arrow_color, ec=arrow_color,linewidth=4)
def plot_dot(state, width, ax):
ax.plot(state % width, state // width, 'ko',markersize=10)
def plot_questionmark(state, width, ax):
ax.plot(state % width, state // width, 'k', marker=r'$?$',markersize=40)
def plot_optimal_policy(pi, feature_mat):
plt.figure()
ax = plt.axes()
count = 0
print(pi)
rows,cols = len(pi), len(pi[0])
for line in pi:
for el in line:
print("optimal action", el)
# could be a stochastic policy with more than one optimal action
for char in el:
print(char)
if char is "^" or char == (-1,0):
plot_arrow(count, cols, ax, "up")
elif char is "v" or char == (1,0):
plot_arrow(count, cols, ax, "down")
elif char is ">" or char == (0,1):
plot_arrow(count, cols, ax, "right")
elif char is "<" or char == (0,-1):
plot_arrow(count, cols, ax, "left")
elif char is ".":
plot_dot(count, cols, ax)
elif el is "w":
#wall
pass
else:
print("error in policy format")
sys.exit()
count += 1
mat = [[0 if fvec is None else fvec.index(1)+1 for fvec in row] for row in feature_mat]
#convert feature_mat into colors
#heatmap = plt.imshow(mat, cmap="Reds", interpolation='none', aspect='equal')
cmap = colors.ListedColormap(['black','white','tab:red', 'tab:blue','tab:green','tab:purple', 'tab:orange', 'tab:gray', 'tab:cyan'])
im = plt.imshow(mat, cmap=cmap, interpolation='none', aspect='equal')
ax = plt.gca()
ax.set_xticks(np.arange(-.5, cols, 1), minor=True);
ax.set_yticks(np.arange(-.5, rows, 1), minor=True);
#ax.grid(which='minor', axis='both', linestyle='-', linewidth=5, color='k')
# Gridlines based on minor ticks
ax.grid(which='minor', color='k', linestyle='-', linewidth=5)
ax.xaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_locator(plt.NullLocator())
ax.xaxis.set_major_locator(plt.NullLocator())
#cbar = plt.colorbar(heatmap)
#cbar.ax.tick_params(labelsize=20)
plt.show()
def plot_optimal_policy_vav(pi, feature_mat, walls=False, filename=False, show=False, arrow_color='k', feature_colors = None):
#takes a dictionary of policy optimal actions
#takes a 2d array of feature vectors
plt.figure()
ax = plt.axes()
count = 0
print(pi)
rows,cols = len(feature_mat), len(feature_mat[0])
for r in range(rows):
for c in range(cols):
if feature_mat[r][c]:
opt_actions = pi[(r,c)]
for a in opt_actions:
print("optimal action", a)
# could be a stochastic policy with more than one optimal action
if a is None:
plot_dot(count, cols, ax)
else:
if a == (-1,0):
plot_arrow(count, cols, ax, "up", arrow_color)
elif a == (1,0):
plot_arrow(count, cols, ax, "down", arrow_color)
elif a == (0,1):
plot_arrow(count, cols, ax, "right", arrow_color)
elif a == (0,-1):
plot_arrow(count, cols, ax, "left", arrow_color)
elif a is None:
plot_dot(count, cols, ax)
elif a is "w":
#wall
pass
else:
print("error in policy format")
#sys.exit()
count += 1
print(feature_mat)
#use for wall states
#if walls:
mat = [[0 if fvec is None else fvec.index(1)+1 for fvec in row] for row in feature_mat]
#mat =[[0,0],[2,2]]
feature_set = set()
for mrow in mat:
for m in mrow:
feature_set.add(m)
num_features = len(feature_set)
print(mat)
if feature_colors is None:
all_colors = ['black','white','tab:red','tab:blue','tab:gray','tab:green','tab:purple', 'tab:orange', 'tab:cyan']
else:
all_colors = feature_colors
colors_to_use = []
for f in range(9):#hard coded to only have 9 features right now
if f in feature_set:
colors_to_use.append(all_colors[f])
cmap = colors.ListedColormap(colors_to_use)
# else:
# mat = [[fvec.index(1) for fvec in row] for row in feature_mat]
# cmap = colors.ListedColormap(['white','tab:red','tab:blue','tab:green','tab:purple', 'tab:orange', 'tab:gray', 'tab:cyan'])
#input()
#convert feature_mat into colors
#heatmap = plt.imshow(mat, cmap="Reds", interpolation='none', aspect='equal')
im = plt.imshow(mat, cmap=cmap, interpolation='none', aspect='equal')
ax = plt.gca()
ax.set_xticks(np.arange(-.5, cols, 1), minor=True);
ax.set_yticks(np.arange(-.5, rows, 1), minor=True);
#ax.grid(which='minor', axis='both', linestyle='-', linewidth=5, color='k')
# Gridlines based on minor ticks
ax.grid(which='minor', color='k', linestyle='-', linewidth=5)
ax.xaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_locator(plt.NullLocator())
ax.xaxis.set_major_locator(plt.NullLocator())
#cbar = plt.colorbar(heatmap)
#cbar.ax.tick_params(labelsize=20)
plt.tight_layout()
if filename:
plt.savefig(filename)
elif show:
plt.show()
def plot_test_questions(question_list, feature_mat, walls=False, filename=False, show=False, arrow_color='k', feature_colors = None):
#takes a dictionary of policy optimal actions
#takes a 2d array of feature vectors
plt.figure()
ax = plt.axes()
count = 0
rows,cols = len(feature_mat), len(feature_mat[0])
for r in range(rows):
for c in range(cols):
if feature_mat[r][c]:
for (s,a) in question_list:
if s == (r,c):
if type(a) is list:
opt_actions = a
else:
opt_actions = [a]
for a in opt_actions:
print("optimal action", a)
# could be a stochastic policy with more than one optimal action
if a is None:
#plot_dot(count, cols, ax)
continue # don't plot anything at terminal no choice there anyways
else:
# if a == (-1,0):
# plot_arrow(count, cols, ax, "up", arrow_color)
# elif a == (1,0):
# plot_arrow(count, cols, ax, "down", arrow_color)
# elif a == (0,1):
# plot_arrow(count, cols, ax, "right", arrow_color)
# elif a == (0,-1):
# plot_arrow(count, cols, ax, "left", arrow_color)
# elif a is None:
plot_questionmark(count, cols, ax)
# elif a is "w":
# #wall
# pass
# else:
# print("error in policy format")
# #sys.exit()
count += 1
print(feature_mat)
#use for wall states
#if walls:
mat = [[0 if fvec is None else fvec.index(1)+1 for fvec in row] for row in feature_mat]
#mat =[[0,0],[2,2]]
feature_set = set()
for mrow in mat:
for m in mrow:
feature_set.add(m)
num_features = len(feature_set)
print(mat)
if feature_colors is None:
all_colors = ['black','white','tab:red','tab:blue','tab:gray','tab:green','tab:purple', 'tab:orange', 'tab:cyan']
else:
all_colors = feature_colors
colors_to_use = []
for f in range(9):#hard coded to only have 9 features right now
if f in feature_set:
colors_to_use.append(all_colors[f])
cmap = colors.ListedColormap(colors_to_use)
# else:
# mat = [[fvec.index(1) for fvec in row] for row in feature_mat]
# cmap = colors.ListedColormap(['white','tab:red','tab:blue','tab:green','tab:purple', 'tab:orange', 'tab:gray', 'tab:cyan'])
#input()
#convert feature_mat into colors
#heatmap = plt.imshow(mat, cmap="Reds", interpolation='none', aspect='equal')
im = plt.imshow(mat, cmap=cmap, interpolation='none', aspect='equal')
ax = plt.gca()
ax.set_xticks(np.arange(-.5, cols, 1), minor=True);
ax.set_yticks(np.arange(-.5, rows, 1), minor=True);
#ax.grid(which='minor', axis='both', linestyle='-', linewidth=5, color='k')
# Gridlines based on minor ticks
ax.grid(which='minor', color='k', linestyle='-', linewidth=5)
ax.xaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_locator(plt.NullLocator())
ax.xaxis.set_major_locator(plt.NullLocator())
#cbar = plt.colorbar(heatmap)
#cbar.ax.tick_params(labelsize=20)
plt.tight_layout()
if filename:
plt.savefig(filename)
if show:
plt.show()
def plot_preference_query(good_traj, bad_traj, feature_mat, walls=False, filename=False, show=False,
good_arrow_color='b', bad_arrow_color='r', feature_colors = None):
#Takes in two trajs good and bad and plots good in solid and bad in dotted
plt.figure()
ax = plt.axes()
count = 0
rows,cols = len(feature_mat), len(feature_mat[0])
#plot good trajectory
arrow_color=good_arrow_color
for r in range(rows):
for c in range(cols):
if feature_mat[r][c]:
for (s,a) in good_traj:
if s == (r,c):
if type(a) is list:
opt_actions = a
else:
opt_actions = [a]
for a in opt_actions:
print("optimal action", a)
# could be a stochastic policy with more than one optimal action
if a is None:
plot_dot(count, cols, ax)
else:
if a == (-1,0):
plot_arrow(count, cols, ax, "up", arrow_color)
elif a == (1,0):
plot_arrow(count, cols, ax, "down", arrow_color)
elif a == (0,1):
plot_arrow(count, cols, ax, "right", arrow_color)
elif a == (0,-1):
plot_arrow(count, cols, ax, "left", arrow_color)
elif a is None:
plot_dot(count, cols, ax)
elif a is "w":
#wall
pass
else:
print("error in policy format")
#sys.exit()
count += 1
#plot bad trajectory
arrow_color=bad_arrow_color
count = 0
for r in range(rows):
for c in range(cols):
if feature_mat[r][c]:
for (s,a) in bad_traj:
if s == (r,c):
if type(a) is list:
opt_actions = a
else:
opt_actions = [a]
for a in opt_actions:
print("optimal action", a)
# could be a stochastic policy with more than one optimal action
if a is None:
plot_dot(count, cols, ax)
else:
if a == (-1,0):
plot_dashed_arrow(count, cols, ax, "up", arrow_color)
elif a == (1,0):
plot_dashed_arrow(count, cols, ax, "down", arrow_color)
elif a == (0,1):
plot_dashed_arrow(count, cols, ax, "right", arrow_color)
elif a == (0,-1):
plot_dashed_arrow(count, cols, ax, "left", arrow_color)
elif a is None:
plot_dot(count, cols, ax)
elif a is "w":
#wall
pass
else:
print("error in policy format")
#sys.exit()
count += 1
#use for wall states
#if walls:
mat = [[0 if fvec is None else fvec.index(1)+1 for fvec in row] for row in feature_mat]
#mat =[[0,0],[2,2]]
feature_set = set()
for mrow in mat:
for m in mrow:
feature_set.add(m)
num_features = len(feature_set)
print(mat)
if feature_colors is None:
all_colors = ['black','white','tab:red','tab:blue','tab:gray','tab:green','tab:purple', 'tab:orange', 'tab:cyan']
else:
all_colors = feature_colors
colors_to_use = []
for f in range(9):#hard coded to only have 9 features right now
if f in feature_set:
colors_to_use.append(all_colors[f])
cmap = colors.ListedColormap(colors_to_use)
# else:
# mat = [[fvec.index(1) for fvec in row] for row in feature_mat]
# cmap = colors.ListedColormap(['white','tab:red','tab:blue','tab:green','tab:purple', 'tab:orange', 'tab:gray', 'tab:cyan'])
#input()
#convert feature_mat into colors
#heatmap = plt.imshow(mat, cmap="Reds", interpolation='none', aspect='equal')
im = plt.imshow(mat, cmap=cmap, interpolation='none', aspect='equal')
ax = plt.gca()
ax.set_xticks(np.arange(-.5, cols, 1), minor=True);
ax.set_yticks(np.arange(-.5, rows, 1), minor=True);
#ax.grid(which='minor', axis='both', linestyle='-', linewidth=5, color='k')
# Gridlines based on minor ticks
ax.grid(which='minor', color='k', linestyle='-', linewidth=5)
ax.xaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_locator(plt.NullLocator())
ax.xaxis.set_major_locator(plt.NullLocator())
#cbar = plt.colorbar(heatmap)
#cbar.ax.tick_params(labelsize=20)
plt.tight_layout()
if filename:
plt.savefig(filename)
if show:
plt.show()
def plot_optimal_policy_vav_grid(pis, feature_mats, g_rows, g_cols, walls=False, filename=False):
#size is tuple for rows / cols of
#takes a dictionary of policy optimal actions
#takes a 2d array of feature vectors
fig, axs = plt.subplots(g_rows, g_cols)
cnt = 0
for ax in axs:#r in range(g_rows):
#for c in range(g_cols):
#print(r,c)
#ax = axs[r,c]
#ax.set_title('Axis [0,0]')
pi = pis[cnt]
feature_mat = feature_mats[cnt]
cnt += 1
count = 0
#print(pi)
rows,cols = len(feature_mat), len(feature_mat[0])
for r in range(rows):
for c in range(cols):
opt_actions = pi[(r,c)]
for a in opt_actions:
# print("optimal action", a)
# could be a stochastic policy with more than one optimal action
if a is None:
plot_dot(count, cols, ax)
else:
if a == (-1,0):
plot_arrow(count, cols, ax, "up")
elif a == (1,0):
plot_arrow(count, cols, ax, "down")
elif a == (0,1):
plot_arrow(count, cols, ax, "right")
elif a == (0,-1):
plot_arrow(count, cols, ax, "left")
elif a is None:
plot_dot(count, cols, ax)
elif a is "w":
#wall
pass
else:
print("error in policy format")
sys.exit()
count += 1
# print(feature_mat)
#use for wall states
#if walls:
mat = [[0 if fvec is None else fvec.index(1)+1 for fvec in row] for row in feature_mat]
#mat =[[0,0],[2,2]]
feature_set = set()
for mrow in mat:
for m in mrow:
feature_set.add(m)
num_features = len(feature_set)
# print(mat)
all_colors = ['black','white','tab:red','tab:blue','tab:green','tab:purple', 'tab:orange', 'tab:gray', 'tab:cyan']
colors_to_use = []
for f in range(9):#hard coded to only have 9 features right now
if f in feature_set:
colors_to_use.append(all_colors[f])
cmap = colors.ListedColormap(colors_to_use)
# else:
# mat = [[fvec.index(1) for fvec in row] for row in feature_mat]
# cmap = colors.ListedColormap(['white','tab:red','tab:blue','tab:green','tab:purple', 'tab:orange', 'tab:gray', 'tab:cyan'])
#input()
#convert feature_mat into colors
#heatmap = plt.imshow(mat, cmap="Reds", interpolation='none', aspect='equal')
ax.imshow(mat, cmap=cmap, interpolation='none', aspect='equal')
#ax = plt.gca()
ax.set_xticks(np.arange(-.5, cols, 1), minor=True);
ax.set_yticks(np.arange(-.5, rows, 1), minor=True);
#ax.grid(which='minor', axis='both', linestyle='-', linewidth=5, color='k')
# Gridlines based on minor ticks
ax.grid(which='minor', color='k', linestyle='-', linewidth=5)
ax.xaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_locator(plt.NullLocator())
ax.xaxis.set_major_locator(plt.NullLocator())
#cbar = plt.colorbar(heatmap)
#cbar.ax.tick_params(labelsize=20)
plt.tight_layout()
if filename:
plt.savefig(filename)
else:
plt.show()
def plot_test_query(state, better_action, worse_action, feature_mat, equal_pref = False):
plt.figure()
ax = plt.axes()
count = 0
rows,cols = len(feature_mat), len(feature_mat[0])
if better_action is "^":
plot_arrow(state, cols, ax, "up")
elif better_action is "v":
plot_arrow(state, cols, ax, "down")
elif better_action is ">":
plot_arrow(state, cols, ax, "right")
elif better_action is "<":
plot_arrow(state, cols, ax, "left")
if equal_pref:
if worse_action is "^":
plot_arrow(state, cols, ax, "up")
elif worse_action is "v":
plot_arrow(state, cols, ax, "down")
elif worse_action is ">":
plot_arrow(state, cols, ax, "right")
elif worse_action is "<":
plot_arrow(state, cols, ax, "left")
else:
if worse_action is "^":
plot_dashed_arrow(state, cols, ax, "up")
elif worse_action is "v":
plot_dashed_arrow(state, cols, ax, "down")
elif worse_action is ">":
plot_dashed_arrow(state, cols, ax, "right")
elif worse_action is "<":
plot_dashed_arrow(state, cols, ax, "left")
mat = [[0 if fvec is None else fvec.index(1)+1 for fvec in row] for row in feature_mat]
#convert feature_mat into colors
#heatmap = plt.imshow(mat, cmap="Reds", interpolation='none', aspect='equal')
cmap = colors.ListedColormap(['black','white','tab:blue','tab:red','tab:green','tab:purple', 'tab:orange', 'tab:gray', 'tab:cyan'])
plt.imshow(mat, cmap=cmap, interpolation='none', aspect='equal')
# Add the grid
ax = plt.gca()
# Minor ticks
ax.set_xticks(np.arange(-.5, cols, 1), minor=True);
ax.set_yticks(np.arange(-.5, rows, 1), minor=True);
ax.grid(which='minor', axis='both', linestyle='-', linewidth=5, color='k')
#remove ticks
plt.tick_params(
axis='both', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom='off', # ticks along the bottom edge are off
top='off', # ticks along the top edge are off
left='off',
right='off',
labelbottom='off',
labelleft='off') # labels along the bottom edge are off
#cbar = plt.colorbar(heatmap)
#cbar.ax.tick_params(labelsize=20)
plt.show()
if __name__=="__main__":
pi = [['v', '^><','.'],['<>v','<','>'],['<>^v','v' ,'^']]
feature_mat = [[(1,0,0),(0,1,0),(0,0,1)],[(0,0,0,1),(0,0,0,0,1),(0,0,0,0,0,1)],[(0,0,0,0,0,0,1), (0,0,0,0,0,0,0,1),None] ]
plot_optimal_policy(pi, feature_mat)
state = 3 #the integer value of state starting from top left and reading left to right, top to bottom.
better_action = "v"
worse_action = "<"
#plot the optimal test query, where the right answer is bolded (add equal_pref=True argument if both are equally good)
plot_test_query(state, better_action, worse_action, feature_mat)
state = 4 #the integer value of state starting from top left and reading left to right, top to bottom.
better_action = "v"
worse_action = "<"
plot_test_query(state, better_action, worse_action, feature_mat, equal_pref = True)
| 39.763608
| 168
| 0.51627
| 3,254
| 25,568
| 3.918562
| 0.077443
| 0.022116
| 0.030194
| 0.030115
| 0.901733
| 0.897263
| 0.886362
| 0.883146
| 0.87452
| 0.844248
| 0
| 0.015179
| 0.36354
| 25,568
| 642
| 169
| 39.825545
| 0.768389
| 0.189534
| 0
| 0.793722
| 0
| 0
| 0.052263
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022422
| false
| 0.011211
| 0.008969
| 0
| 0.038117
| 0.065022
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f0842a84984e914741e9d6b1904c218590bec289
| 129
|
py
|
Python
|
FresnoPython/compat.py
|
FresnoPython/FresnoPython
|
d606b1d3604ba25ab0f3d178173ce9d6c05be1b2
|
[
"MIT"
] | 2
|
2017-05-25T20:14:07.000Z
|
2018-11-08T04:29:12.000Z
|
FresnoPython/compat.py
|
FresnoPython/FresnoPython
|
d606b1d3604ba25ab0f3d178173ce9d6c05be1b2
|
[
"MIT"
] | null | null | null |
FresnoPython/compat.py
|
FresnoPython/FresnoPython
|
d606b1d3604ba25ab0f3d178173ce9d6c05be1b2
|
[
"MIT"
] | 1
|
2017-05-21T14:01:20.000Z
|
2017-05-21T14:01:20.000Z
|
try:
# Python 3
from urllib.parse import quote_plus
except ImportError:
# Python 2
from urllib import quote_plus
| 18.428571
| 39
| 0.705426
| 18
| 129
| 4.944444
| 0.666667
| 0.224719
| 0.337079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0.255814
| 129
| 6
| 40
| 21.5
| 0.90625
| 0.131783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6515cbad7a621073e0de828103a1cbf5bf662e69
| 92
|
py
|
Python
|
event_annotator/__init__.py
|
roger-selzler/event_annotator
|
80a986dc196f2316530f5b3845da39ff347d277a
|
[
"MIT"
] | null | null | null |
event_annotator/__init__.py
|
roger-selzler/event_annotator
|
80a986dc196f2316530f5b3845da39ff347d277a
|
[
"MIT"
] | null | null | null |
event_annotator/__init__.py
|
roger-selzler/event_annotator
|
80a986dc196f2316530f5b3845da39ff347d277a
|
[
"MIT"
] | null | null | null |
from .event_annotator import Event_annotator
from .event_annotator import load_csv_data
| 23
| 45
| 0.847826
| 13
| 92
| 5.615385
| 0.538462
| 0.575342
| 0.493151
| 0.657534
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 92
| 3
| 46
| 30.666667
| 0.9125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e8e6f42e5548d17b537a0389b6e631e7d4e858c0
| 179,258
|
py
|
Python
|
testing/test_attractors.py
|
arielbro/attractor_learning
|
33693847f319d294a61a00f0d5c2c6457270fbfe
|
[
"MIT"
] | null | null | null |
testing/test_attractors.py
|
arielbro/attractor_learning
|
33693847f319d294a61a00f0d5c2c6457270fbfe
|
[
"MIT"
] | null | null | null |
testing/test_attractors.py
|
arielbro/attractor_learning
|
33693847f319d294a61a00f0d5c2c6457270fbfe
|
[
"MIT"
] | null | null | null |
import numpy as np
import logic
from unittest import TestCase
import graphs
import sympy
from collections import namedtuple
import random
from attractors import find_num_attractors_onestage, \
vertex_model_impact_scores, stochastic_vertex_model_impact_scores, find_num_steady_states, \
find_attractors_dubrova, find_attractors_onestage_enumeration, ImpactType, \
vertex_state_impact_scores, stochastic_vertex_state_impact_scores, graph_model_impact_score, \
graph_state_impact_score, stochastic_graph_model_impact_score, stochastic_graph_state_impact_score
import attractors
dubrova_path = "../" + attractors.dubrova_path
ILPAttractorExperimentParameters = namedtuple("AttractorExperimentParameters", "G T P n_attractors")
VertexModelImpactExperimentParameters = namedtuple("VertexModelImpactExperimentParameters", "G current_attractors T P "
"impact_types relative_basins "
"maximal_bits "
"impacts")
VertexStateImpactExperimentParameters = namedtuple("VertexStateImpactExperimentParameters", "G current_attractors "
"relative_basins "
"max_transient_len "
"impacts")
StochasticVertexModelImpactExperimentParameters = namedtuple(
"StochasticVertexModelImpactExperimentParameters", "G current_attractors "
"bits_of_change relative_basins impact_type impacts")
StochasticVertexStateImpactExperimentParameters = namedtuple(
"StochasticVertexStateImpactExperimentParameters", "G impacts")
GraphModelImpactExperimentParameters = namedtuple("GraphModelImpactExperimentParameters", "G current_attractors T P "
"impact_types relative_basins "
"maximal_bits "
"impact")
GraphStateImpactExperimentParameters = namedtuple("GraphStateImpactExperimentParameters", "G current_attractors "
"relative_basins "
"max_transient_len maximal_bits "
"impact")
StochasticGraphModelImpactExperimentParameters = namedtuple(
"StochasticGraphModelImpactExperimentParameters", "G current_attractors "
"bits_of_change relative_basins impact_type impact")
StochasticGraphStateImpactExperimentParameters = namedtuple(
"StochasticGraphStateImpactExperimentParameters", "G bits_of_change impact")
DubrovaExperimentParameters = namedtuple("DubrovaExperimentParameters", "G mutate n_attractors")
class TestAttractors(TestCase):
def test_num_attractors_onestage(self):
experiments = []
"""test on known toy models"""
# 0, 1
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=1, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=1))
# 2, 3
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[-1], threshold=1)])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=1, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=1))
# 4, 5
G = graphs.Network(vertex_names=["A"], edges=[],
vertex_functions=[None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=3, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=2))
# 6, 7
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[1], threshold=1),
None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=5, n_attractors=4))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=5, n_attractors=4))
# 8, 9
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[-1], threshold=1),
None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=1, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=2))
# 10, 11
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=2, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=1, n_attractors=1))
# 12, 13
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=3, n_attractors=2))
# 14, 15
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[None, None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=5, n_attractors=4))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=6, n_attractors=4))
# 16, 17
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[None, True])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=5, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=6, n_attractors=2))
# 18, 19, 20
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=2, n_attractors=1))
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=1, n_attractors=1))
# 21, 22, 23
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.Nand])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=3, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=15, P=15, n_attractors=3))
# 24, 25
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[lambda x: True, lambda x: False])
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=2, n_attractors=1))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=2, n_attractors=1))
# 26, 27
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[None, sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=4, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=4, n_attractors=2))
# 28, 29
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[None, lambda _: True])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=1))
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=2, n_attractors=1))
# 30, 31
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[None, None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=6, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=6, n_attractors=2))
# 32, 33, 34, 35, 36
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 0)])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=3, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=4, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=4, n_attractors=4))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=4, n_attractors=4))
# 37
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=3, n_attractors=3))
# 38, 39, 40
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand]*3)
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=2, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=10, P=10, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=5, P=10, n_attractors=1))
# 41, 42
# acyclic, should have 2**#input_nodes attractors of length 1
G = graphs.Network(vertex_names=["v1", "v2", "v3", "v4", "v5", "v6"],
edges=[("v1", "v4"), ("v2", "v4"), ("v1", "v5"), ("v4", "v6")],
vertex_functions=[sympy.Nand]*6)
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=10, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=10, n_attractors=8))
# 43, 44, 45
G = graphs.Network(vertex_names=["A1", "B1", "B2", "C1", "C2"],
edges=[("A1", "A1"), ("B1", "B2"), ("B2", "B1"), ("C1", "C2"), ("C2", "C1")],
vertex_functions=[sympy.And]*5)
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=10, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=18, n_attractors=18))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=40, n_attractors=20)) # offsets!
# 46, 47, 48
# a failed random graph added as a constant test
G = graphs.Network(
vertex_names=['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16',
'17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31',
'32', '33', '34'],
edges=[('1', '2'), ('2', '16'), ('3', '17'), ('5', '15'), ('6', '29'), ('7', '28'), ('8', '22'),
('9', '28'), ('10', '18'), ('11', '15'), ('12', '24'), ('13', '14'), ('15', '18'), ('16', '26'),
('17', '27'), ('18', '20'), ('19', '23'), ('20', '27'), ('23', '26'), ('24', '29'), ('25', '33'),
('26', '30'), ('27', '32'), ('28', '32'), ('30', '32'), ('31', '34'), ('32', '33'), ('33', '34')],
vertex_functions=[None, None, sympy.Nand, None, None, None, None, None, None, None, None, None, None, None,
sympy.Or, sympy.Nand,
sympy.Nand, sympy.Nand, sympy.Nand, None, sympy.Xor, None, sympy.And, sympy.Nand,
sympy.Xor, None, sympy.And, sympy.Nand, sympy.And, sympy.Xor, sympy.Or, None, sympy.Or,
sympy.And, sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=6, n_attractors=6))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=10, n_attractors=10))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=10, n_attractors=10))
# 49, 50, 51
# G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
# "\\Attractors - for Ariel\\BNS_Dubrova_2011\\MAPK_large2.cnet")
# experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=15, n_attractors=12))
# experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=15, n_attractors=14))
# experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=15, n_attractors=14))
G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
"\\Attractors - for Ariel\\BNS_Dubrova_2011\\tcr.cnet")
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=15, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=15, n_attractors=9))
experiments.append(ILPAttractorExperimentParameters(G=G, T=7, P=15, n_attractors=9))
# for _ in range(5):
# size = 35
# G = graphs.Network(vertex_names=list(range(size)),
# edges=[(i, random.choice(list(range(i+1, size)))) for i in range(size)
# if random.random() < 0.8 and i != size-1],
# vertex_functions=[random.choice([sympy.And, sympy.Nand, sympy.Or, sympy.Xor])
# for _ in range(size)])
# input_nodes = 0
# for v in G.vertices:
# is_input = True
# for e in G.edges:
# if e[1] == v:
# is_input = False
# break
# if is_input:
# input_nodes += 1
# attractor_number = 2**input_nodes
# experiments.append(ExperimentParameters(G=G, T=1, P=3, n_attractors=min(3, attractor_number)))
# experiments.append(ExperimentParameters(G=G, T=2, P=10, n_attractors=min(10, attractor_number)))
# experiments.append(ExperimentParameters(G=G, T=10, P=3, n_attractors=min(3, attractor_number)))
# TODO: figure out how disjoint long attractors work together (multiplying doesn't account for offsets)
# """test on basic semi-random networks: create connectivity components of acyclis networks and simple cycles"""
# n_random_experiment = 0
# while n_random_experiment < 10:
# n_components = random.randint(1, 3)
# attractor_number = 1
# max_attractor_len = 0
# cur_graph = None
# for n_component in range(n_components): # TODO: change to graph union method
# comp_size = random.randint(1, 5)
# V = [i for i in range(comp_size)]
# E = []
# comp_type =random.choice(["cycle", "acyclic"])
# if comp_type == "acyclic":
# for i in range(len(V) - 1): # create only forward facing edges
# for j in range(i+1, len(V)):
# if random.random() <= 0.8:
# E.append((V[i], V[j]))
# component_graph = graphs.Network(vertex_names=V, edges=E)
# restriction_level = random.choice([graphs.FunctionTypeRestriction.NONE,
# graphs.FunctionTypeRestriction.SYMMETRIC_THRESHOLD,
# graphs.FunctionTypeRestriction.SIMPLE_GATES])
# component_graph.randomize_functions(function_type_restriction=restriction_level)
# input_nodes = 0
# for v in V:
# is_input = True
# for e in E:
# if e[1] == v:
# is_input = False
# break
# if is_input:
# input_nodes += 1
# attractor_number *= 2**input_nodes
# max_attractor_len = max(max_attractor_len, 1)
# elif comp_type == "cycle":
# """currently supports only a cycle of identity function, using a group theory theorem from
# https://www.quora.com/How-many-unique-binary-matrices-are-there-up-to-rotations-translations-and-flips
# , can later add negation cycles"""
# for i in range(len(V)):
# E.append((V[i], V[(i + 1) % len(V)]))
# component_graph = graphs.Network(vertex_names=V, edges=E, vertex_functions=[sympy.And]*len(V))
# attractor_number *= binary_necklaces(len(V))
# max_attractor_len = max(max_attractor_len, len(V))
# cur_graph = component_graph if cur_graph is None else cur_graph + component_graph
# if attractor_number * len(cur_graph.vertices) * max_attractor_len <= 250:
# experiments.append(ExperimentParameters(G=cur_graph, T=max_attractor_len,
# P=attractor_number + 1,
# n_attractors=attractor_number))
# n_random_experiment += 1
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, T={}, P={}, expected_n_attractors={}".format(len(experiment.G.vertices),
experiment.T, experiment.P, experiment.n_attractors)
# continue
use_sampling = bool(random.randint(0, 1))
use_sampling_for_mip_start = bool(random.randint(0, 1))
simplify = bool(random.randint(0, 1))
key_slice_size = random.randint(1, 15)
print "key_slice_size={}".format(key_slice_size)
n_attractors = find_num_attractors_onestage(G=experiment.G, max_len=experiment.T, max_num=experiment.P,
use_sat=False, verbose=False,
sampling_bounds=(3, 3) if use_sampling else None,
use_sampling_for_mip_start=use_sampling_for_mip_start,
simplify_general_boolean=simplify,
key_slice_size=key_slice_size)
try:
self.assertEqual(n_attractors, experiment.n_attractors)
except AssertionError as e:
print e
print experiment.G
raise e
except Exception as e:
raise e
# print "number of experiments (without keys)={}".format(len(experiments))
# for i, experiment in enumerate(experiments):
# print "experiment #{}".format(i)h
# print "n={}, T={}, P={}, expected_n_attractors={}".format(len(experiment.G.vertices),
# experiment.T, experiment.P, experiment.n_attractors)
# # continue
# n_attractors = find_num_attractors_onestage(G=experiment.G, max_len=experiment.T, max_num=experiment.P,
# use_sat=False, verbose=False,
# use_state_keys=False, require_result=experiment.n_attractors)
# try:
# self.assertEqual(n_attractors, experiment.n_attractors)
# except AssertionError as e:
# print e
# print experiment.G
# raise e
def test_vertex_degeneracy_scores(self):
self.assertTrue(False) # TODO: write...
def test_graph_state_impact_scores(self):
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
maximal_bits=1,
impact=0))
# experiment #1
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=0))
# experiment #2
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
maximal_bits=1,
impact=0))
# experiment #3
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
maximal_bits=10,
impact=0))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.Nand, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #4
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
maximal_bits=1,
impact=0))
# experiment #5
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=0))
# experiment #6
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
maximal_bits=1,
impact=0))
# experiment #7
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
maximal_bits=10,
impact=0))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #8
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=5,
maximal_bits=1,
impact=1))
# experiment #9
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=1))
# experiment #10
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=5,
maximal_bits=5,
impact=1))
# experiment #11
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
max_transient_len=5,
maximal_bits=5,
impact=1))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #12
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
max_transient_len=5,
maximal_bits=5,
impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #13
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=1))
# experiment #14
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
max_transient_len=5,
maximal_bits=5,
impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #15
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #16
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=0))
# experiment #17
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=3,
impact=0))
# experiment #18
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=5,
maximal_bits=2,
impact=0))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("B", "A"), ("C", "A"), ("D", "A"),
("A", "B"), ("C", "B"), ("D", "B"),
("A", "C"), ("B", "C"), ("D", "C"),
("A", "D"), ("B", "D"), ("C", "D")],
vertex_functions=[lambda a, b, c: a + b + c > 1 for _ in range(4)])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# 0000 and 1111 are stable points, and attract everything with hamming distance <= 1,
# where 2 bits of change land right into another attractor.
# Other three two-state attractors are unstable under one bit change, with transient length of 1,
# Or they can be switched between eachother/stables with 2 (same as 0000/1111 ones, if needed)
# bits of change.
# experiment #19
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=0))
# experiment #20
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
maximal_bits=1,
impact=3 / 5.0))
# experiment #21
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=5,
maximal_bits=1,
impact=3 / 5.0))
# experiment #22
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=2,
impact=1))
# experiment #23
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=3,
maximal_bits=2,
impact=1))
relative_basins = [5 / float(16) if len(attractor) == 1 else 2 / float(16) for
attractor in current_attractors]
# experiment #24
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=5,
maximal_bits=1,
impact=6 / 16.0))
# experiment #25
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=0,
maximal_bits=2,
impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "C")],
vertex_functions=[None, sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #19
# 000, 110 and 111 are the steady states. First is stable, other can change on
# right vertex change, B with one step and C immediately.
# experiment #26
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=2 / 3.0))
# experiment #27
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=2,
impact=2 / 3.0))
# experiment #28
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=5,
maximal_bits=5,
impact=2 / 3.0))
relative_len_decider = lambda attractor: 0.5 if [
int(s) for s in attractor[0]] == [0, 0, 0] else 3 / float(8) if [
int(s) for s in attractor[0]] == [1, 1, 0] else 1 / float(8)
relative_basins = [relative_len_decider(att) for att in current_attractors]
# experiment #29
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=5,
maximal_bits=2,
impact=0.5))
# experiment #30
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=0,
maximal_bits=1,
impact=0.5))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("A", "B"), ("B", "C"), ("C", "D"),
("D", "D")],
vertex_functions=[None, sympy.And, sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# Now 0000 is stable, 1110 changes immediently on last vertex change, 1111 can change in 2, 1, or 0
# steps on change of second, third or last vertex.
# experiment #31
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=2 / 3.0))
# experiment #31
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=3,
maximal_bits=3,
impact=2 / 3.0))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "attractors:"
print experiment.current_attractors
print "n={}, relative_basins={}, expected_impacts={}".\
format(len(experiment.G.vertices), experiment.relative_basins, experiment.impact)
impact = graph_state_impact_score(G=experiment.G, current_attractors=experiment.current_attractors,
max_transient_len=experiment.max_transient_len,
relative_attractor_basin_sizes=experiment.relative_basins,
key_slice_size=15, maximal_bits_of_change=experiment.maximal_bits)
# (from vertex version) got numeric problems with test #16 regardless of key_slice
impact = round(impact, 5)
experiment_impact = round(experiment.impact, 5)
print "expected impact:"
print experiment_impact
print "got impact:"
print impact
try:
self.assertEqual(impact, experiment_impact)
except AssertionError as e:
print e
print experiment.G
raise e
def test_vertex_state_impact_scores(self):
# TODO: test stochastic kind
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
impacts=[0]))
# experiment #1
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[0]))
# experiment #2
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[0]))
# experiment #3
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[1],
max_transient_len=30,
impacts=[0]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.Nand, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #4
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[0, np.nan]))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #5
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[1]))
# experiment #6
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[1]))
# experiment #7
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[1]))
# experiment #8
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
max_transient_len=1,
impacts=[1]))
# experiment #9
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
max_transient_len=0,
impacts=[1]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #10
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[1, np.nan]))
# experiment #11
experiments.append(VertexStateImpactExperimentParameters(G=G,
current_attractors=current_attractors,
relative_basins=[0.1, 0.4, 0.4, 0.1],
max_transient_len=0,
impacts=[1, np.nan]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #12
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[1] * 3))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #13
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[1, 1, 1]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #14
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[0, 0, 0]))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("B", "A"), ("C", "A"), ("D", "A"),
("A", "B"), ("C", "B"), ("D", "B"),
("A", "C"), ("B", "C"), ("D", "C"),
("A", "D"), ("B", "D"), ("C", "D")],
vertex_functions=[lambda a, b, c: a + b + c > 1 for _ in range(4)])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #15
# 0000 and 1111 are stable points, and attract everything with hamming distance <= 1.
# Other three two-state attractors are unstable under one bit change, with transient length of 1.
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[0] * 4))
# experiment #16
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
impacts=[3 / 5.0] * 4))
# experiment #17
relative_basins = [5 / float(16) if len(attractor) == 1 else 2 / float(16) for
attractor in current_attractors]
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=1,
impacts=[6 / 16.0, 6 / 16.0,
6 / 16.0, 6 / 16.0]))
# experiment #18
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=2,
impacts=[6 / 16.0, 6 / 16.0,
6 / 16.0, 6 / 16.0]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "C")],
vertex_functions=[None, sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #19
# 000, 110 and 111 are the steady states. First is stable, other can change on
# right vertex change, B with one step and C immediately.
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[np.nan, 0, 2 / 3.0]))
# experiment #20
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
impacts=[np.nan, 1 / 3.0, 2/ 3.0]))
relative_len_decider = lambda attractor: 0.5 if [
int(s) for s in attractor[0]] == [0, 0, 0] else 3 / float(8) if [
int(s) for s in attractor[0]] == [1, 1, 0] else 1 / float(8)
relative_basins = [relative_len_decider(att) for att in current_attractors]
# experiment #21
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=1,
impacts=[np.nan, 1 / 8.0, 0.5]))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("A", "B"), ("B", "C"), ("C", "D"),
("D", "D")],
vertex_functions=[None, sympy.And, sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# Now 0000 is stable, 1110 changes immediently on last vertex change, 1111 can change in 2, 1, or 0
# steps on change of second, third or last vertex.
# experiment #22
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[np.nan, 0, 0, 2 / float(3)]))
# experiment #23
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
impacts=[np.nan, 0, 1 / float(3),
2 / float(3)]))
# experiment #24
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=2,
impacts=[np.nan, 1 / float(3), 1 / float(3),
2 / float(3)]))
# experiment #25
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=3,
impacts=[np.nan, 1 / float(3), 1 / float(3),
2 / float(3)]))
# experiment #26
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[np.nan, 1 / float(3), 1 / float(3),
2 / float(3)]))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "attractors:"
print experiment.current_attractors
print "n={}, relative_basins={}, expected_impacts={}".\
format(len(experiment.G.vertices), experiment.relative_basins, experiment.impacts)
impacts = vertex_state_impact_scores(G=experiment.G, current_attractors=experiment.current_attractors,
max_transient_len=experiment.max_transient_len,
relative_attractor_basin_sizes=experiment.relative_basins,
key_slice_size=15)
# got numeric problems with test #16 regardless of key_slice
impacts = [round(x, 5) if not np.isnan(x) else x for x in impacts]
experiment_impacts = [round(x, 5) if not np.isnan(x) else x for x in experiment.impacts]
print "expected impacts:"
print impacts
print "got impacts:"
print experiment_impacts
try:
self.assertEqual(impacts, experiment_impacts)
except AssertionError as e:
print e
print experiment.G
raise e
def test_graph_model_impact_scores(self):
# TODO: also test the resulting models (assure they have the correct number of attractors)
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #1
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #2
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=2))
# experiment #3
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1.5))
# experiment #4
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #5
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[1],
impact=1.5))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #6
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #7
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact=0.9))
# experiment #8
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #9
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact=0.75))
# experiment #10
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #11
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #12
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #13
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.4, 0.4, 0.1],
impact=0.75))
# experiment #14
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #15
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.25))
# experiment #16
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #17
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #18
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #19
experiments.append(GraphModelImpactExperimentParameters(G=G, T=6, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=2))
# experiment #20
experiments.append(GraphModelImpactExperimentParameters(G=G, T=6, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact=1.25))
# experiment #21
experiments.append(GraphModelImpactExperimentParameters(G=G, T=6, P=5, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact=1.5))
# experiment #22
experiments.append(GraphModelImpactExperimentParameters(G=G, T=6, P=5, impact_types=ImpactType.Addition,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact=0.5))
# experiment #23
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #24
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #25
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=0.75))
# experiment #26
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #27
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #28
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.75))
# experiment #29
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=3,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #30
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=4,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #31
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=5, impact_types=ImpactType.Addition,
maximal_bits=4,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #32
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #33
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=3, impact_types=ImpactType.Addition,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=3))
# experiment #34
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=6, impact_types=ImpactType.Addition,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=3))
# experiment #35
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=6, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=3))
# experiment #36
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=6, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #36
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=6, impact_types=ImpactType.Addition,
maximal_bits=3,
current_attractors=current_attractors,
relative_basins=None,
impact=4))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, T={}, P={}, maximal_bits={}, relative_basins={}, expected_impact={}".\
format(len(experiment.G.vertices),
experiment.T, experiment.P, experiment.maximal_bits, experiment.relative_basins,
experiment.impact)
print experiment.current_attractors
impact = graph_model_impact_score(G=experiment.G, current_attractors=experiment.current_attractors,
max_len=experiment.T,
max_num=experiment.P,
impact_types=experiment.impact_types,
relative_attractor_basin_sizes=experiment.relative_basins,
maximal_bits_of_change=experiment.maximal_bits)
try:
self.assertEqual(impact, experiment.impact)
except AssertionError as e:
print e
print experiment.G
raise e
def test_vertex_model_impact_scores(self):
# TODO: also test the resulting models (assure they have the correct number of attractors)
# TODO: test stochastic kind
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1]))
# experiment #1
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1]))
# experiment #2
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[2]))
# experiment #3
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1.5]))
# experiment #4
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1]))
# experiment #5
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[1],
impacts=[1.5]))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #6
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5]))
# experiment #7
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impacts=[0.9]))
# experiment #8
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1]))
# experiment #9
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impacts=[0.75]))
# experiment #10
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5]))
# experiment #11
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #12
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1, np.nan]))
# experiment #13
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.4, 0.4, 0.1],
impacts=[0.75, np.nan]))
# experiment #14
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5, np.nan]))
# experiment #15
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.25, np.nan]))
# experiment #16
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0, np.nan]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #17
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1] * 3))
# experiment #18
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1] * 3))
# experiment #19
experiments.append(VertexModelImpactExperimentParameters(G=G, T=6, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[2] * 3))
# experiment #20
experiments.append(VertexModelImpactExperimentParameters(G=G, T=6, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impacts=[1.25] * 3))
# experiment #21
experiments.append(VertexModelImpactExperimentParameters(G=G, T=6, P=5, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impacts=[1.5] * 3))
# experiment #22
experiments.append(VertexModelImpactExperimentParameters(G=G, T=6, P=2, impact_types=ImpactType.Addition,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5] * 3))
# experiment #23
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5] * 3))
# experiment #24
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1] * 3))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #25
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.75, 0.75, 0.75]))
# experiment #26
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1, 1, 1]))
# experiment #27
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5, 0.5, 0.5]))
# experiment #28
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.75, 0.75, 0.75]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #29
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1, 1, 1]))
# experiment #30
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1, 1, 3]))
# experiment #31
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1, 1, 3]))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, T={}, P={}, maximal_bits={}, relative_basins={}, expected_impacts={}".\
format(len(experiment.G.vertices),
experiment.T, experiment.P, experiment.maximal_bits, experiment.relative_basins,
experiment.impacts)
print experiment.current_attractors
impacts = vertex_model_impact_scores(G=experiment.G, current_attractors=experiment.current_attractors,
max_len=experiment.T,
max_num=experiment.P,
impact_types=experiment.impact_types,
relative_attractor_basin_sizes=experiment.relative_basins,
maximal_bits_of_change=experiment.maximal_bits)
try:
self.assertEqual(impacts, experiment.impacts)
except AssertionError as e:
print e
print experiment.G
raise e
def test_stochastic_graph_state_impact_scores(self):
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
# experiment #0
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=0))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.Nand, None])
# experiment #1
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=0))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
# experiment #2
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=1))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
# experiment #3
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
# experiment #4
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=0.5))
# experiment #5
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=2, impact=0.5))
# experiment #6
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=3, impact=0))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
# experiment #7
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=1))
# experiment #8
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=2, impact=0.5))
# experiment #9
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=3, impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
# experiment #10
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=0))
# experiment #11
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=2, impact=0))
# experiment #12
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=3, impact=0))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("B", "A"), ("C", "A"), ("D", "A"),
("A", "B"), ("C", "B"), ("D", "B"),
("A", "C"), ("B", "C"), ("D", "C"),
("A", "D"), ("B", "D"), ("C", "D")],
vertex_functions=[lambda a, b, c: a + b + c > 1 for _ in range(4)])
# experiment #13
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=3 / 8.0))
# experiment #14
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=2, impact=1))
# experiment #15
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=3, impact=1))
# experiment #16
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=4, impact=10 / 16.0))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "C")],
vertex_functions=[None, sympy.And, sympy.And])
# 000, 110 and 111 are the steady states. First is stable, other can change on
# right vertex change, B with one step and C immediately.
# experiment #17
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1,
impact=(3 / 8.0 * 0) + (3 / 8.0 * 0.5) +
(1 / 8.0 * 0.5) + (1 / 8.0 * 0)))
# experiment #18
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=2, impact=1 / 16.0))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("A", "B"), ("B", "C"), ("C", "D"),
("D", "D")],
vertex_functions=[None, sympy.And, sympy.And, sympy.And])
# Now 0000 is stable, 1110 changes immediently on last vertex change, 1111 can change in 2, 1, or 0
# steps on change of second, third or last vertex.
# experiment #19
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1,
impact=0.20833333333))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, expected_impact={}".\
format(len(experiment.G.vertices), experiment.impact)
for iteration in range(10):
n_iter = random.randint(700, 1400)
parallel_n_jobs = random.choice([None, 1, 2, 3])
estimated_impact = stochastic_graph_state_impact_score(G=experiment.G, n_iter=n_iter,
bits_of_change=experiment.bits_of_change,
parallel_n_jobs=parallel_n_jobs)
print "estimated_impact={}".format(estimated_impact)
self.assertTrue(abs(estimated_impact - experiment.impact) < 0.1)
def test_stochastic_vertex_state_impact_scores(self):
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
# experiment #0
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[0]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.Nand, None])
# experiment #1
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[0, np.nan]))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
# experiment #2
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[1]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
# experiment #3
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[1, np.nan]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
# experiment #4
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[0.5] * 3))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
# experiment #5
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[1, 1, 1]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
# experiment #6
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[0, 0, 0]))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("B", "A"), ("C", "A"), ("D", "A"),
("A", "B"), ("C", "B"), ("D", "B"),
("A", "C"), ("B", "C"), ("D", "C"),
("A", "D"), ("B", "D"), ("C", "D")],
vertex_functions=[lambda a, b, c: a + b + c > 1 for _ in range(4)])
# experiment #7
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[3 / 8.0] * 4))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "C")],
vertex_functions=[None, sympy.And, sympy.And])
# experiment #8
# 000, 110 and 111 are the steady states. First is stable, other can change on
# right vertex change, B with one step and C immediately.
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G,
impacts=[np.nan, 1/8.0, 0.5]))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("A", "B"), ("B", "C"), ("C", "D"),
("D", "D")],
vertex_functions=[None, sympy.And, sympy.And, sympy.And])
# Now 0000 is stable, 1110 changes immediently on last vertex change, 1111 can change in 2, 1, or 0
# steps on change of second, third or last vertex.
# experiment #9
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G,
impacts=[np.nan,
1/16.0, 1/16.0,
0.5]))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, expected_impacts={}".\
format(len(experiment.G.vertices), experiment.impacts)
for iteration in range(10):
n_iter = random.randint(700, 1400)
parallel_n_jobs = random.choice([None, 1, 2, 3])
estimated_impacts = stochastic_vertex_state_impact_scores(G=experiment.G, n_iter=n_iter,
parallel_n_jobs=parallel_n_jobs)
print "estimated_impacts={}".format(estimated_impacts)
self.assertTrue(len(experiment.impacts) == len(estimated_impacts))
for calculated_impact, estimated_impact in zip(experiment.impacts, estimated_impacts):
if np.isnan(calculated_impact):
self.assertTrue(np.isnan(estimated_impact))
else:
self.assertTrue(abs(estimated_impact - calculated_impact) < 0.1)
def test_stochastic_graph_model_impact_scores(self):
# TODO: also test the resulting models (assure they have the correct number of attractors)
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #1
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #2
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=1))
# experiment #3
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=2))
# experiment #4
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=1.5))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #5
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=0.5))
# experiment #6
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact_type=ImpactType.Invalidation,
impact=0.5))
# experiment #7
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #8
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0))
# experiment #9
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0.5))
# experiment #10
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=0.75))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #11
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=0.5))
# experiment #12
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #13
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0.5))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #14
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #15
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #16
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0.5))
# experiment #17
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(3 / 15.0) * 2 + (12 / 15.0) * 0.5))
# experiment #18
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=3,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0.5))
# experiment #19
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=4,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(3 / 15.0) * 1 + (12 / 15.0) * 0.5))
# experiment #20
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=0.75))
# experiment #21
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=(3 / 15.0) * 1.5 + (12 / 15.0) * 0.75))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #22
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=3 / 4.0))
# experiment #23
basin_sizes = [3 / 8.0 if len(att) > 1 else 1 / 8.0 for att in current_attractors]
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=basin_sizes,
impact_type=ImpactType.Invalidation,
impact=7 / 8.0))
# experiment #24
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=(3 / 15.0) * 1 + (12 / 15.0) *
(0.5 * 3 / 4.0 + 0.5 * 1)))
# experiment #25
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0))
# experiment #26
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(3 / 15.0) * 0.5 + (12 / 15.0) *
(0.5 * 0 + 0.5 * 0.25)))
# experiment #27
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=7 / 16.0))
# experiment #28
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=(3 / 15.0) * 0.75 + (12 / 15.0) *
(0.5 * (3/8.0 + 0) + 0.5 * (3/8.0 + 0.125))))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #29
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=0.5))
# experiment #30
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=(3 / 15.0) * 1 + (12 / 15.0) * 3 / 4.0))
# experiment #31
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(2 / 3.0 * 0.5 + 1 / 3.0 * 2.5)))
# experiment #32
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=(2 / 3.0 * 0.5 + 1 / 3.0 * (
0.5 * 1.5 + 0.5 * 1.5))))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A"), ("B", "B")],
vertex_functions=[sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #33
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=(1 / 3.0 * 0.5 + 2 / 3.0 * 0.25)))
# experiment #34
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact_type=ImpactType.Invalidation,
impact=(1 / 3.0 * 0.5 + 2 / 3.0 * 0.25)))
# experiment #35
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=(1 / 15.0 * 1 +
6 / 15.0 * 3.5 / 6.0 +
8 / 15.0 * 5 / 8.0)))
# experiment #36
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(1 / 3.0 * 0.25 +
2 / 3.0 * 1 / 8.0)))
# experiment #37
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(1 / 15.0 * 0.5 +
6 / 15.0 * 1 / 4.0 +
8 / 15.0 * 2 * 0.5 / 8.0)))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, bits_of_change={}, relative_basins={}, impact_type={}, expected_impact={}".\
format(len(experiment.G.vertices),
experiment.bits_of_change, experiment.relative_basins, experiment.impact_type,
experiment.impact)
print experiment.current_attractors
for use_dubrova in [False, True]:
n_iter = random.randint(800, 880)
attractor_estimation_n_iter = random.randint(50, 55)
parallel_n_jobs = random.choice([None, 1, 2, 3])
estimated_impact = stochastic_graph_model_impact_score(
G=experiment.G, current_attractors=experiment.current_attractors, n_iter=n_iter, use_dubrova=use_dubrova,
bits_of_change=experiment.bits_of_change,
relative_attractor_basin_sizes=experiment.relative_basins,
attractor_estimation_n_iter=attractor_estimation_n_iter,
impact_type=experiment.impact_type,
cur_dubrova_path=dubrova_path,
parallel_n_jobs=parallel_n_jobs)
print "estimated_impact={}".format(estimated_impact)
print "expected_impacts={}".format(experiment.impact)
self.assertTrue(abs(estimated_impact - experiment.impact) < 0.15)
def test_stochastic_vertex_model_impact_scores(self):
# TODO: also test the resulting models (assure they have the correct number of attractors)
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1]))
# experiment #1
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1]))
# experiment #2
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[1]))
# experiment #3
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[2]))
# experiment #4
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[1.5]))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #5
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[0.5]))
# experiment #6
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact_type=ImpactType.Invalidation,
impacts=[0.5]))
# experiment #7
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1]))
# experiment #8
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0]))
# experiment #9
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5]))
# experiment #10
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[0.75]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #11
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[0.5, np.nan]))
# experiment #12
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1, np.nan]))
# experiment #13
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5, np.nan]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #14
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1] * 3))
# experiment #15
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1] * 3))
# experiment #16
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5] * 3))
# experiment #17
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[2] * 3))
# experiment #18
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[0.75] * 3))
# experiment #19
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[1.5] * 3))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #20
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[3 / 4.0] * 3))
# experiment #21
basin_sizes = [3 / 8.0 if len(att) > 1 else 1 / 8.0 for att in current_attractors]
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=basin_sizes,
impact_type=ImpactType.Invalidation,
impacts=[7 / 8.0] * 3))
# experiment #22
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1, 1, 1]))
# experiment #23
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0] * 3))
# experiment #24
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5, 0.5, 0.5]))
# experiment #25
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[7 / 16.0] * 3))
# experiment #26
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[0.75] * 3))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #27
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[0.5] * 3))
# experiment #28
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1] * 3))
# experiment #29
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5, 0.5, 2.5]))
# experiment #30
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[1, 1, 1]))
# experiment #31
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[0.5, 0.5, 1.5]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A"), ("B", "B")],
vertex_functions=[sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #32
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[0.5, 0.25]))
# experiment #33
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact_type=ImpactType.Invalidation,
impacts=[0.5, 0.25]))
# experiment #34
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1, 0.5]))
# experiment #35
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.25, 1 / 8.0]))
# experiment #36
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5, 1 / 4.0]))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, bits_of_change={}, relative_basins={}, impact_type={}, expected_impacts={}".\
format(len(experiment.G.vertices),
experiment.bits_of_change, experiment.relative_basins, experiment.impact_type,
experiment.impacts)
print experiment.current_attractors
for use_dubrova in [False, True]:
n_iter = random.randint(400, 440)
attractor_estimation_n_iter = random.randint(30, 35)
parallel_n_jobs = random.choice([None, 1, 2, 3])
estimated_impacts = stochastic_vertex_model_impact_scores(
G=experiment.G, current_attractors=experiment.current_attractors, n_iter=n_iter, use_dubrova=use_dubrova,
bits_of_change=experiment.bits_of_change,
relative_attractor_basin_sizes=experiment.relative_basins,
attractor_estimation_n_iter=attractor_estimation_n_iter,
impact_type=experiment.impact_type,
cur_dubrova_path=dubrova_path,
parallel_n_jobs=parallel_n_jobs)
self.assertTrue(len(experiment.impacts) == len(estimated_impacts))
print "estimated_impacts={}".format(estimated_impacts)
for calculated_impact, estimated_impact in zip(experiment.impacts, estimated_impacts):
if np.isnan(calculated_impact):
self.assertTrue(np.isnan(estimated_impact))
else:
self.assertTrue(abs(estimated_impact - calculated_impact) < 0.15)
def test_find_num_steady_states(self):
"""test on known toy models"""
# 0, 1
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 0)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=True), 0)
G = graphs.Network(vertex_names=["A"], edges=[],
vertex_functions=[None])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 2)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=True), 2)
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 2)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=True), 2)
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.And])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 0)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=True), 0)
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.Nand])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 2)
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[lambda x: True, lambda x: False])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 1)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=True), 1)
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand]*3)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 0)
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("A", "B"), ("B", "C"), ("C", "D"), ("D", "A")],
vertex_functions=[sympy.Nand]*4)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 2)
# acyclic, should have 2**#input_nodes attractors of length 1
G = graphs.Network(vertex_names=["v1", "v2", "v3", "v4", "v5", "v6"],
edges=[("v1", "v4"), ("v2", "v4"), ("v1", "v5"), ("v4", "v6")],
vertex_functions=[sympy.Nand]*6)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 8)
G = graphs.Network(vertex_names=["A1", "B1", "B2", "C1", "C2"],
edges=[("A1", "A1"), ("B1", "B2"), ("B2", "B1"), ("C1", "C2"), ("C2", "C1")],
vertex_functions=[sympy.And]*5)
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand]*3)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 0)
G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
"\\Attractors - for Ariel\\BNS_Dubrova_2011\\tcr.cnet")
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 8)
def test_find_attractors_dubrova(self):
experiments = []
"""test on known toy models"""
# 0, 1
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=1))
# 2
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[-1], threshold=1)])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
# 3, 4
G = graphs.Network(vertex_names=["A"], edges=[],
vertex_functions=[None])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=2))
# 5, 6
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.And])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=1))
# 7, 8
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[lambda x: True, lambda x: False])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=1))
# 9, 10
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
True])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=3))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=3))
# 11, 12
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
False])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=1))
# 13, 14
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
None])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=4))
# 15
G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
"\\Attractors - for Ariel\\BNS_Dubrova_2011\\tcr.cnet")
# G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
# "\\Attractors - for Ariel\\BNS_Dubrova_2011\\MAPK_large.cnet")
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=9))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, mutate={}, expected_n_attractors={}".format(len(experiment.G.vertices),
experiment.mutate, experiment.n_attractors)
# continue
attractors = find_attractors_dubrova(G=experiment.G,
dubrova_path="../bns_dubrova.exe",
mutate_input_nodes=experiment.mutate)
n_attractors = len(attractors)
try:
self.assertEqual(n_attractors, experiment.n_attractors)
except AssertionError as e:
print e
print experiment.G
raise e
except Exception as e:
raise e
print "testing state order in attractor"
# TODO: expand? random graphs, compare ILP attractors with Dubrova's
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.And, sympy.Nand, True])
desired_attractor = [[0, 0, 1], [0, 1, 1], [1, 1, 1], [1, 0, 1]]
# repeat manually, (otherwise there's mutual dependence of tests).
possible_attractors = [desired_attractor[shift:] + desired_attractor[:shift] for shift in range(4)]
# print possible_attractors
found_attractors = find_attractors_dubrova(G, dubrova_path="../bns_dubrova.exe", mutate_input_nodes=True)
self.assertTrue(len(found_attractors) == 1)
found_attractor = [[int(v) for v in state] for state in found_attractors[0]]
# print found_attractor
self.assertTrue(any(found_attractor == possible_attractors[i] for i in range(len(possible_attractors))))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.And, sympy.Nand])
desired_attractor = [[0, 0], [0, 1], [1, 1], [1, 0]]
# repeat manually, (otherwise there's mutual dependence of tests).
possible_attractors = [desired_attractor[shift:] + desired_attractor[:shift] for shift in range(4)]
# print possible_attractors
found_attractors = find_attractors_dubrova(G, dubrova_path="../bns_dubrova.exe", mutate_input_nodes=True)
self.assertTrue(len(found_attractors) == 1)
found_attractor = [[int(v) for v in state] for state in found_attractors[0]]
# print found_attractor
self.assertTrue(any(found_attractor == possible_attractor for possible_attractor in possible_attractors))
def test_find_attractors_enumerate(self):
experiments = []
"""test on known toy models"""
# 0, 1
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=1))
# 2, 3
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[-1], threshold=1)])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=1))
# 4, 5
G = graphs.Network(vertex_names=["A"], edges=[],
vertex_functions=[None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=2))
# 6, 7
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[-1], threshold=1),
None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=2))
# 8, 9
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=None, n_attractors=2))
# 10, 11
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=None, n_attractors=1))
# 12, 13, 14
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.Nand])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=15, P=None, n_attractors=3))
# 15, 16
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[lambda x: True, lambda x: False])
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=None, n_attractors=1))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=1))
# 17, 18, 19
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 0)])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=4))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=None, n_attractors=4))
# 20
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=None, n_attractors=4))
# 21, 22, 23
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand]*3)
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=None, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=10, P=None, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=5, P=None, n_attractors=1))
# 24, 25
# acyclic, should have 2**#input_nodes attractors of length 1
G = graphs.Network(vertex_names=["v1", "v2", "v3", "v4", "v5", "v6"],
edges=[("v1", "v4"), ("v2", "v4"), ("v1", "v5"), ("v4", "v6")],
vertex_functions=[sympy.Nand]*6)
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=None, n_attractors=8))
# 26, 27
G = graphs.Network(vertex_names=["A1", "B1", "B2", "C1", "C2"],
edges=[("A1", "A1"), ("B1", "B2"), ("B2", "B1"), ("C1", "C2"), ("C2", "C1")],
vertex_functions=[sympy.And]*5)
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=None, n_attractors=20)) # offsets!
# 28, 29
# a failed random graph added as a constant test
G = graphs.Network(
vertex_names=['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16',
'17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31',
'32', '33', '34'],
edges=[('1', '2'), ('2', '16'), ('3', '17'), ('5', '15'), ('6', '29'), ('7', '28'), ('8', '22'),
('9', '28'), ('10', '18'), ('11', '15'), ('12', '24'), ('13', '14'), ('15', '18'), ('16', '26'),
('17', '27'), ('18', '20'), ('19', '23'), ('20', '27'), ('23', '26'), ('24', '29'), ('25', '33'),
('26', '30'), ('27', '32'), ('28', '32'), ('30', '32'), ('31', '34'), ('32', '33'), ('33', '34')],
vertex_functions=[None, None, sympy.Nand, None, None, None, None, None, None, None, None, None, None, None,
sympy.Or, sympy.Nand,
sympy.Nand, sympy.Nand, sympy.Nand, None, sympy.Xor, None, sympy.And, sympy.Nand,
sympy.Xor, None, sympy.And, sympy.Nand, sympy.And, sympy.Xor, sympy.Or, None, sympy.Or,
sympy.And, sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=2**17))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=2**17))
# 30, 31, 32, 33
G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
"\\Attractors - for Ariel\\BNS_Dubrova_2011\\tcr.cnet")
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=None, n_attractors=9))
experiments.append(ILPAttractorExperimentParameters(G=G, T=8, P=None, n_attractors=9))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, T={}, expected_n_attractors={}".format(len(experiment.G.vertices),
experiment.T, experiment.n_attractors)
# continue
simplify = bool(random.randint(0, 1))
key_slice_size = random.randint(1, 15)
print "key_slice_size={}".format(key_slice_size)
n_attractors = len(find_attractors_onestage_enumeration(G=experiment.G, max_len=experiment.T,
verbose=False,
simplify_general_boolean=simplify,
key_slice_size=key_slice_size))
try:
self.assertEqual(n_attractors, experiment.n_attractors)
except AssertionError as e:
print e
print experiment.G
raise e
except Exception as e:
raise e
# TODO: add dubrova v.s. ILP testing again.
| 72.721298
| 125
| 0.442139
| 13,942
| 179,258
| 5.501865
| 0.02869
| 0.107709
| 0.067465
| 0.083656
| 0.938637
| 0.92324
| 0.913007
| 0.903581
| 0.884313
| 0.874158
| 0
| 0.031307
| 0.470417
| 179,258
| 2,464
| 126
| 72.750812
| 0.776714
| 0.064404
| 0
| 0.859482
| 0
| 0
| 0.023939
| 0.004481
| 0
| 0
| 0
| 0.000406
| 0.022715
| 0
| null | null | 0
| 0.004754
| null | null | 0.037507
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
33226b85efc1a5902693f74d1427ddf04b2208f5
| 491
|
py
|
Python
|
regression/errors.py
|
sahitpj/MachineLearning
|
2ce5a337ec432daff64a216df6847ef834bcb8d7
|
[
"MIT"
] | 2
|
2019-01-23T15:51:29.000Z
|
2019-02-01T16:50:33.000Z
|
regression/errors.py
|
sahitpj/MachineLearning
|
2ce5a337ec432daff64a216df6847ef834bcb8d7
|
[
"MIT"
] | null | null | null |
regression/errors.py
|
sahitpj/MachineLearning
|
2ce5a337ec432daff64a216df6847ef834bcb8d7
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch
def MSE(Y_predict, Y):
assert(Y_predict.shape[0] == Y.shape[0])
return np.sum((Y_predict-Y)**2)/Y.shape[0]
def MSE_torch(Y_predict, Y):
assert(Y_predict.shape[0] == Y.shape[0])
return torch.sum((Y_predict-Y)**2)/Y.shape[0]
def SSE(Y_predict, Y):
assert(Y_predict.shape[0] == Y.shape[0])
return np.sum((Y_predict-Y)**2)
def SSE_torch(Y_predict, Y):
assert(Y_predict.shape[0] == Y.shape[0])
return torch.sum((Y_predict-Y)**2)
| 27.277778
| 49
| 0.655804
| 94
| 491
| 3.276596
| 0.170213
| 0.311688
| 0.233766
| 0.194805
| 0.857143
| 0.857143
| 0.857143
| 0.857143
| 0.857143
| 0.792208
| 0
| 0.033493
| 0.148676
| 491
| 18
| 50
| 27.277778
| 0.703349
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 1
| 0.285714
| false
| 0
| 0.142857
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
3366ff7fdf6783678f49b40c54c0357d706affd8
| 21,859
|
py
|
Python
|
fds.analyticsapi.engines/fds/analyticsapi/engines/api/calculations_api.py
|
katsuya-horiuchi/analyticsapi-engines-python-sdk
|
89258b5ddc2a89267d03115db410fd7a14c303ed
|
[
"Apache-2.0"
] | null | null | null |
fds.analyticsapi.engines/fds/analyticsapi/engines/api/calculations_api.py
|
katsuya-horiuchi/analyticsapi-engines-python-sdk
|
89258b5ddc2a89267d03115db410fd7a14c303ed
|
[
"Apache-2.0"
] | null | null | null |
fds.analyticsapi.engines/fds/analyticsapi/engines/api/calculations_api.py
|
katsuya-horiuchi/analyticsapi-engines-python-sdk
|
89258b5ddc2a89267d03115db410fd7a14c303ed
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Engines API
Allow clients to fetch Engines Analytics through APIs. # noqa: E501
The version of the OpenAPI document: 2
Contact: analytics.api.support@factset.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from fds.analyticsapi.engines.api_client import ApiClient
from fds.analyticsapi.engines.exceptions import (
ApiTypeError,
ApiValueError
)
class CalculationsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def cancel_calculation_by_id(self, id, **kwargs): # noqa: E501
"""Cancel calculation by id # noqa: E501
This is the endpoint to cancel a previously submitted calculation request. Instead of doing a GET on the getCalculationById URL, cancel the calculation by doing a DELETE. All individual calculation units within the calculation will be canceled if they have not already finished. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_calculation_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: From url, provided from the location header in the Run Multiple Calculations endpoint. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.cancel_calculation_by_id_with_http_info(id, **kwargs) # noqa: E501
def cancel_calculation_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Cancel calculation by id # noqa: E501
This is the endpoint to cancel a previously submitted calculation request. Instead of doing a GET on the getCalculationById URL, cancel the calculation by doing a DELETE. All individual calculation units within the calculation will be canceled if they have not already finished. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_calculation_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: From url, provided from the location header in the Run Multiple Calculations endpoint. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method cancel_calculation_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `cancel_calculation_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/analytics/engines/v2/calculations/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_calculation_status_by_id(self, id, **kwargs): # noqa: E501
"""Get calculation status by id # noqa: E501
This is the endpoint to check on the progress of a previous calculation request. Response body contains status information of the entire request and each individual calculation unit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_calculation_status_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: From url, provided from the location header in the Run Multiple Calculations endpoint. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CalculationStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_calculation_status_by_id_with_http_info(id, **kwargs) # noqa: E501
def get_calculation_status_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Get calculation status by id # noqa: E501
This is the endpoint to check on the progress of a previous calculation request. Response body contains status information of the entire request and each individual calculation unit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_calculation_status_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: From url, provided from the location header in the Run Multiple Calculations endpoint. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CalculationStatus, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_calculation_status_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_calculation_status_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/analytics/engines/v2/calculations/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CalculationStatus', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_calculation_status_summaries(self, **kwargs): # noqa: E501
"""Get all calculation statuses # noqa: E501
This endpoints returns all active calculation requests. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_calculation_status_summaries(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: dict(str, CalculationStatusSummary)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_calculation_status_summaries_with_http_info(**kwargs) # noqa: E501
def get_calculation_status_summaries_with_http_info(self, **kwargs): # noqa: E501
"""Get all calculation statuses # noqa: E501
This endpoints returns all active calculation requests. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_calculation_status_summaries_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(dict(str, CalculationStatusSummary), status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_calculation_status_summaries" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/analytics/engines/v2/calculations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, CalculationStatusSummary)', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def run_calculation(self, **kwargs): # noqa: E501
"""Run calculation # noqa: E501
This endpoint creates a new calculation and runs the set of calculation units specified in the POST body. This must be used first before get status or cancelling endpoints with a calculation id. A successful response will contain the URL to check the status of the calculation request. Remarks: ⢠Maximum 25 points allowed per calculation and maximum 500 points allowed across all simultaneous calculations. (Refer API documentation for more information) ⢠Any settings in POST body will act as a one-time override over the settings saved in the PA/SPAR/Vault template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.run_calculation(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Calculation calculation:
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.run_calculation_with_http_info(**kwargs) # noqa: E501
def run_calculation_with_http_info(self, **kwargs): # noqa: E501
"""Run calculation # noqa: E501
This endpoint creates a new calculation and runs the set of calculation units specified in the POST body. This must be used first before get status or cancelling endpoints with a calculation id. A successful response will contain the URL to check the status of the calculation request. Remarks: ⢠Maximum 25 points allowed per calculation and maximum 500 points allowed across all simultaneous calculations. (Refer API documentation for more information) ⢠Any settings in POST body will act as a one-time override over the settings saved in the PA/SPAR/Vault template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.run_calculation_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Calculation calculation:
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['calculation'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method run_calculation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'calculation' in local_var_params:
body_params = local_var_params['calculation']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/analytics/engines/v2/calculations', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 48.041758
| 617
| 0.621758
| 2,535
| 21,859
| 5.148323
| 0.100197
| 0.03494
| 0.045054
| 0.027584
| 0.927285
| 0.921002
| 0.9197
| 0.907057
| 0.904222
| 0.904222
| 0
| 0.013285
| 0.311268
| 21,859
| 454
| 618
| 48.147577
| 0.853072
| 0.523308
| 0
| 0.758974
| 0
| 0
| 0.164628
| 0.063361
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046154
| false
| 0
| 0.025641
| 0
| 0.117949
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
685f71ec49e44f869b92e1c0b3fcedd6c1344c51
| 86,043
|
py
|
Python
|
tests/test_provider_hashicorp_aws.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_hashicorp_aws.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_hashicorp_aws.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_hashicorp_aws.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:12:25 UTC)
def test_provider_import():
import terrascript.provider.hashicorp.aws
def test_resource_import():
from terrascript.resource.hashicorp.aws import aws_accessanalyzer_analyzer
from terrascript.resource.hashicorp.aws import aws_acm_certificate
from terrascript.resource.hashicorp.aws import aws_acm_certificate_validation
from terrascript.resource.hashicorp.aws import aws_acmpca_certificate
from terrascript.resource.hashicorp.aws import aws_acmpca_certificate_authority
from terrascript.resource.hashicorp.aws import (
aws_acmpca_certificate_authority_certificate,
)
from terrascript.resource.hashicorp.aws import aws_alb
from terrascript.resource.hashicorp.aws import aws_alb_listener
from terrascript.resource.hashicorp.aws import aws_alb_listener_certificate
from terrascript.resource.hashicorp.aws import aws_alb_listener_rule
from terrascript.resource.hashicorp.aws import aws_alb_target_group
from terrascript.resource.hashicorp.aws import aws_alb_target_group_attachment
from terrascript.resource.hashicorp.aws import aws_ami
from terrascript.resource.hashicorp.aws import aws_ami_copy
from terrascript.resource.hashicorp.aws import aws_ami_from_instance
from terrascript.resource.hashicorp.aws import aws_ami_launch_permission
from terrascript.resource.hashicorp.aws import aws_amplify_app
from terrascript.resource.hashicorp.aws import aws_amplify_backend_environment
from terrascript.resource.hashicorp.aws import aws_amplify_branch
from terrascript.resource.hashicorp.aws import aws_amplify_domain_association
from terrascript.resource.hashicorp.aws import aws_amplify_webhook
from terrascript.resource.hashicorp.aws import aws_api_gateway_account
from terrascript.resource.hashicorp.aws import aws_api_gateway_api_key
from terrascript.resource.hashicorp.aws import aws_api_gateway_authorizer
from terrascript.resource.hashicorp.aws import aws_api_gateway_base_path_mapping
from terrascript.resource.hashicorp.aws import aws_api_gateway_client_certificate
from terrascript.resource.hashicorp.aws import aws_api_gateway_deployment
from terrascript.resource.hashicorp.aws import aws_api_gateway_documentation_part
from terrascript.resource.hashicorp.aws import aws_api_gateway_documentation_version
from terrascript.resource.hashicorp.aws import aws_api_gateway_domain_name
from terrascript.resource.hashicorp.aws import aws_api_gateway_gateway_response
from terrascript.resource.hashicorp.aws import aws_api_gateway_integration
from terrascript.resource.hashicorp.aws import aws_api_gateway_integration_response
from terrascript.resource.hashicorp.aws import aws_api_gateway_method
from terrascript.resource.hashicorp.aws import aws_api_gateway_method_response
from terrascript.resource.hashicorp.aws import aws_api_gateway_method_settings
from terrascript.resource.hashicorp.aws import aws_api_gateway_model
from terrascript.resource.hashicorp.aws import aws_api_gateway_request_validator
from terrascript.resource.hashicorp.aws import aws_api_gateway_resource
from terrascript.resource.hashicorp.aws import aws_api_gateway_rest_api
from terrascript.resource.hashicorp.aws import aws_api_gateway_rest_api_policy
from terrascript.resource.hashicorp.aws import aws_api_gateway_stage
from terrascript.resource.hashicorp.aws import aws_api_gateway_usage_plan
from terrascript.resource.hashicorp.aws import aws_api_gateway_usage_plan_key
from terrascript.resource.hashicorp.aws import aws_api_gateway_vpc_link
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_api
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_api_mapping
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_authorizer
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_deployment
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_domain_name
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_integration
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_integration_response
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_model
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_route
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_route_response
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_stage
from terrascript.resource.hashicorp.aws import aws_apigatewayv2_vpc_link
from terrascript.resource.hashicorp.aws import aws_app_cookie_stickiness_policy
from terrascript.resource.hashicorp.aws import aws_appautoscaling_policy
from terrascript.resource.hashicorp.aws import aws_appautoscaling_scheduled_action
from terrascript.resource.hashicorp.aws import aws_appautoscaling_target
from terrascript.resource.hashicorp.aws import aws_appconfig_application
from terrascript.resource.hashicorp.aws import aws_appconfig_configuration_profile
from terrascript.resource.hashicorp.aws import aws_appconfig_deployment
from terrascript.resource.hashicorp.aws import aws_appconfig_deployment_strategy
from terrascript.resource.hashicorp.aws import aws_appconfig_environment
from terrascript.resource.hashicorp.aws import (
aws_appconfig_hosted_configuration_version,
)
from terrascript.resource.hashicorp.aws import aws_appmesh_gateway_route
from terrascript.resource.hashicorp.aws import aws_appmesh_mesh
from terrascript.resource.hashicorp.aws import aws_appmesh_route
from terrascript.resource.hashicorp.aws import aws_appmesh_virtual_gateway
from terrascript.resource.hashicorp.aws import aws_appmesh_virtual_node
from terrascript.resource.hashicorp.aws import aws_appmesh_virtual_router
from terrascript.resource.hashicorp.aws import aws_appmesh_virtual_service
from terrascript.resource.hashicorp.aws import (
aws_apprunner_auto_scaling_configuration_version,
)
from terrascript.resource.hashicorp.aws import aws_apprunner_connection
from terrascript.resource.hashicorp.aws import (
aws_apprunner_custom_domain_association,
)
from terrascript.resource.hashicorp.aws import aws_apprunner_service
from terrascript.resource.hashicorp.aws import aws_appstream_fleet
from terrascript.resource.hashicorp.aws import aws_appstream_stack
from terrascript.resource.hashicorp.aws import aws_appsync_api_key
from terrascript.resource.hashicorp.aws import aws_appsync_datasource
from terrascript.resource.hashicorp.aws import aws_appsync_function
from terrascript.resource.hashicorp.aws import aws_appsync_graphql_api
from terrascript.resource.hashicorp.aws import aws_appsync_resolver
from terrascript.resource.hashicorp.aws import aws_athena_database
from terrascript.resource.hashicorp.aws import aws_athena_named_query
from terrascript.resource.hashicorp.aws import aws_athena_workgroup
from terrascript.resource.hashicorp.aws import aws_autoscaling_attachment
from terrascript.resource.hashicorp.aws import aws_autoscaling_group
from terrascript.resource.hashicorp.aws import aws_autoscaling_group_tag
from terrascript.resource.hashicorp.aws import aws_autoscaling_lifecycle_hook
from terrascript.resource.hashicorp.aws import aws_autoscaling_notification
from terrascript.resource.hashicorp.aws import aws_autoscaling_policy
from terrascript.resource.hashicorp.aws import aws_autoscaling_schedule
from terrascript.resource.hashicorp.aws import aws_autoscalingplans_scaling_plan
from terrascript.resource.hashicorp.aws import aws_backup_global_settings
from terrascript.resource.hashicorp.aws import aws_backup_plan
from terrascript.resource.hashicorp.aws import aws_backup_region_settings
from terrascript.resource.hashicorp.aws import aws_backup_selection
from terrascript.resource.hashicorp.aws import aws_backup_vault
from terrascript.resource.hashicorp.aws import aws_backup_vault_notifications
from terrascript.resource.hashicorp.aws import aws_backup_vault_policy
from terrascript.resource.hashicorp.aws import aws_batch_compute_environment
from terrascript.resource.hashicorp.aws import aws_batch_job_definition
from terrascript.resource.hashicorp.aws import aws_batch_job_queue
from terrascript.resource.hashicorp.aws import aws_budgets_budget
from terrascript.resource.hashicorp.aws import aws_budgets_budget_action
from terrascript.resource.hashicorp.aws import aws_chime_voice_connector
from terrascript.resource.hashicorp.aws import aws_chime_voice_connector_group
from terrascript.resource.hashicorp.aws import aws_chime_voice_connector_logging
from terrascript.resource.hashicorp.aws import aws_chime_voice_connector_origination
from terrascript.resource.hashicorp.aws import aws_chime_voice_connector_streaming
from terrascript.resource.hashicorp.aws import aws_chime_voice_connector_termination
from terrascript.resource.hashicorp.aws import aws_cloud9_environment_ec2
from terrascript.resource.hashicorp.aws import aws_cloudformation_stack
from terrascript.resource.hashicorp.aws import aws_cloudformation_stack_set
from terrascript.resource.hashicorp.aws import aws_cloudformation_stack_set_instance
from terrascript.resource.hashicorp.aws import aws_cloudformation_type
from terrascript.resource.hashicorp.aws import aws_cloudfront_cache_policy
from terrascript.resource.hashicorp.aws import aws_cloudfront_distribution
from terrascript.resource.hashicorp.aws import aws_cloudfront_function
from terrascript.resource.hashicorp.aws import aws_cloudfront_key_group
from terrascript.resource.hashicorp.aws import (
aws_cloudfront_monitoring_subscription,
)
from terrascript.resource.hashicorp.aws import aws_cloudfront_origin_access_identity
from terrascript.resource.hashicorp.aws import aws_cloudfront_origin_request_policy
from terrascript.resource.hashicorp.aws import aws_cloudfront_public_key
from terrascript.resource.hashicorp.aws import aws_cloudfront_realtime_log_config
from terrascript.resource.hashicorp.aws import aws_cloudhsm_v2_cluster
from terrascript.resource.hashicorp.aws import aws_cloudhsm_v2_hsm
from terrascript.resource.hashicorp.aws import aws_cloudtrail
from terrascript.resource.hashicorp.aws import aws_cloudwatch_composite_alarm
from terrascript.resource.hashicorp.aws import aws_cloudwatch_dashboard
from terrascript.resource.hashicorp.aws import aws_cloudwatch_event_api_destination
from terrascript.resource.hashicorp.aws import aws_cloudwatch_event_archive
from terrascript.resource.hashicorp.aws import aws_cloudwatch_event_bus
from terrascript.resource.hashicorp.aws import aws_cloudwatch_event_bus_policy
from terrascript.resource.hashicorp.aws import aws_cloudwatch_event_connection
from terrascript.resource.hashicorp.aws import aws_cloudwatch_event_permission
from terrascript.resource.hashicorp.aws import aws_cloudwatch_event_rule
from terrascript.resource.hashicorp.aws import aws_cloudwatch_event_target
from terrascript.resource.hashicorp.aws import aws_cloudwatch_log_destination
from terrascript.resource.hashicorp.aws import aws_cloudwatch_log_destination_policy
from terrascript.resource.hashicorp.aws import aws_cloudwatch_log_group
from terrascript.resource.hashicorp.aws import aws_cloudwatch_log_metric_filter
from terrascript.resource.hashicorp.aws import aws_cloudwatch_log_resource_policy
from terrascript.resource.hashicorp.aws import aws_cloudwatch_log_stream
from terrascript.resource.hashicorp.aws import (
aws_cloudwatch_log_subscription_filter,
)
from terrascript.resource.hashicorp.aws import aws_cloudwatch_metric_alarm
from terrascript.resource.hashicorp.aws import aws_cloudwatch_metric_stream
from terrascript.resource.hashicorp.aws import aws_cloudwatch_query_definition
from terrascript.resource.hashicorp.aws import aws_codeartifact_domain
from terrascript.resource.hashicorp.aws import (
aws_codeartifact_domain_permissions_policy,
)
from terrascript.resource.hashicorp.aws import aws_codeartifact_repository
from terrascript.resource.hashicorp.aws import (
aws_codeartifact_repository_permissions_policy,
)
from terrascript.resource.hashicorp.aws import aws_codebuild_project
from terrascript.resource.hashicorp.aws import aws_codebuild_report_group
from terrascript.resource.hashicorp.aws import aws_codebuild_source_credential
from terrascript.resource.hashicorp.aws import aws_codebuild_webhook
from terrascript.resource.hashicorp.aws import aws_codecommit_repository
from terrascript.resource.hashicorp.aws import aws_codecommit_trigger
from terrascript.resource.hashicorp.aws import aws_codedeploy_app
from terrascript.resource.hashicorp.aws import aws_codedeploy_deployment_config
from terrascript.resource.hashicorp.aws import aws_codedeploy_deployment_group
from terrascript.resource.hashicorp.aws import aws_codepipeline
from terrascript.resource.hashicorp.aws import aws_codepipeline_webhook
from terrascript.resource.hashicorp.aws import aws_codestarconnections_connection
from terrascript.resource.hashicorp.aws import aws_codestarconnections_host
from terrascript.resource.hashicorp.aws import (
aws_codestarnotifications_notification_rule,
)
from terrascript.resource.hashicorp.aws import aws_cognito_identity_pool
from terrascript.resource.hashicorp.aws import (
aws_cognito_identity_pool_roles_attachment,
)
from terrascript.resource.hashicorp.aws import aws_cognito_identity_provider
from terrascript.resource.hashicorp.aws import aws_cognito_resource_server
from terrascript.resource.hashicorp.aws import aws_cognito_user_group
from terrascript.resource.hashicorp.aws import aws_cognito_user_pool
from terrascript.resource.hashicorp.aws import aws_cognito_user_pool_client
from terrascript.resource.hashicorp.aws import aws_cognito_user_pool_domain
from terrascript.resource.hashicorp.aws import (
aws_cognito_user_pool_ui_customization,
)
from terrascript.resource.hashicorp.aws import aws_config_aggregate_authorization
from terrascript.resource.hashicorp.aws import aws_config_config_rule
from terrascript.resource.hashicorp.aws import aws_config_configuration_aggregator
from terrascript.resource.hashicorp.aws import aws_config_configuration_recorder
from terrascript.resource.hashicorp.aws import (
aws_config_configuration_recorder_status,
)
from terrascript.resource.hashicorp.aws import aws_config_conformance_pack
from terrascript.resource.hashicorp.aws import aws_config_delivery_channel
from terrascript.resource.hashicorp.aws import (
aws_config_organization_conformance_pack,
)
from terrascript.resource.hashicorp.aws import aws_config_organization_custom_rule
from terrascript.resource.hashicorp.aws import aws_config_organization_managed_rule
from terrascript.resource.hashicorp.aws import aws_config_remediation_configuration
from terrascript.resource.hashicorp.aws import aws_connect_contact_flow
from terrascript.resource.hashicorp.aws import aws_connect_instance
from terrascript.resource.hashicorp.aws import aws_cur_report_definition
from terrascript.resource.hashicorp.aws import aws_customer_gateway
from terrascript.resource.hashicorp.aws import aws_datapipeline_pipeline
from terrascript.resource.hashicorp.aws import aws_datasync_agent
from terrascript.resource.hashicorp.aws import aws_datasync_location_efs
from terrascript.resource.hashicorp.aws import (
aws_datasync_location_fsx_windows_file_system,
)
from terrascript.resource.hashicorp.aws import aws_datasync_location_nfs
from terrascript.resource.hashicorp.aws import aws_datasync_location_s3
from terrascript.resource.hashicorp.aws import aws_datasync_location_smb
from terrascript.resource.hashicorp.aws import aws_datasync_task
from terrascript.resource.hashicorp.aws import aws_dax_cluster
from terrascript.resource.hashicorp.aws import aws_dax_parameter_group
from terrascript.resource.hashicorp.aws import aws_dax_subnet_group
from terrascript.resource.hashicorp.aws import aws_db_cluster_snapshot
from terrascript.resource.hashicorp.aws import aws_db_event_subscription
from terrascript.resource.hashicorp.aws import aws_db_instance
from terrascript.resource.hashicorp.aws import aws_db_instance_role_association
from terrascript.resource.hashicorp.aws import aws_db_option_group
from terrascript.resource.hashicorp.aws import aws_db_parameter_group
from terrascript.resource.hashicorp.aws import aws_db_proxy
from terrascript.resource.hashicorp.aws import aws_db_proxy_default_target_group
from terrascript.resource.hashicorp.aws import aws_db_proxy_endpoint
from terrascript.resource.hashicorp.aws import aws_db_proxy_target
from terrascript.resource.hashicorp.aws import aws_db_security_group
from terrascript.resource.hashicorp.aws import aws_db_snapshot
from terrascript.resource.hashicorp.aws import aws_db_subnet_group
from terrascript.resource.hashicorp.aws import aws_default_network_acl
from terrascript.resource.hashicorp.aws import aws_default_route_table
from terrascript.resource.hashicorp.aws import aws_default_security_group
from terrascript.resource.hashicorp.aws import aws_default_subnet
from terrascript.resource.hashicorp.aws import aws_default_vpc
from terrascript.resource.hashicorp.aws import aws_default_vpc_dhcp_options
from terrascript.resource.hashicorp.aws import aws_devicefarm_project
from terrascript.resource.hashicorp.aws import (
aws_directory_service_conditional_forwarder,
)
from terrascript.resource.hashicorp.aws import aws_directory_service_directory
from terrascript.resource.hashicorp.aws import (
aws_directory_service_log_subscription,
)
from terrascript.resource.hashicorp.aws import aws_dlm_lifecycle_policy
from terrascript.resource.hashicorp.aws import aws_dms_certificate
from terrascript.resource.hashicorp.aws import aws_dms_endpoint
from terrascript.resource.hashicorp.aws import aws_dms_event_subscription
from terrascript.resource.hashicorp.aws import aws_dms_replication_instance
from terrascript.resource.hashicorp.aws import aws_dms_replication_subnet_group
from terrascript.resource.hashicorp.aws import aws_dms_replication_task
from terrascript.resource.hashicorp.aws import aws_docdb_cluster
from terrascript.resource.hashicorp.aws import aws_docdb_cluster_instance
from terrascript.resource.hashicorp.aws import aws_docdb_cluster_parameter_group
from terrascript.resource.hashicorp.aws import aws_docdb_cluster_snapshot
from terrascript.resource.hashicorp.aws import aws_docdb_subnet_group
from terrascript.resource.hashicorp.aws import aws_dx_bgp_peer
from terrascript.resource.hashicorp.aws import aws_dx_connection
from terrascript.resource.hashicorp.aws import aws_dx_connection_association
from terrascript.resource.hashicorp.aws import aws_dx_gateway
from terrascript.resource.hashicorp.aws import aws_dx_gateway_association
from terrascript.resource.hashicorp.aws import aws_dx_gateway_association_proposal
from terrascript.resource.hashicorp.aws import (
aws_dx_hosted_private_virtual_interface,
)
from terrascript.resource.hashicorp.aws import (
aws_dx_hosted_private_virtual_interface_accepter,
)
from terrascript.resource.hashicorp.aws import (
aws_dx_hosted_public_virtual_interface,
)
from terrascript.resource.hashicorp.aws import (
aws_dx_hosted_public_virtual_interface_accepter,
)
from terrascript.resource.hashicorp.aws import (
aws_dx_hosted_transit_virtual_interface,
)
from terrascript.resource.hashicorp.aws import (
aws_dx_hosted_transit_virtual_interface_accepter,
)
from terrascript.resource.hashicorp.aws import aws_dx_lag
from terrascript.resource.hashicorp.aws import aws_dx_private_virtual_interface
from terrascript.resource.hashicorp.aws import aws_dx_public_virtual_interface
from terrascript.resource.hashicorp.aws import aws_dx_transit_virtual_interface
from terrascript.resource.hashicorp.aws import aws_dynamodb_global_table
from terrascript.resource.hashicorp.aws import (
aws_dynamodb_kinesis_streaming_destination,
)
from terrascript.resource.hashicorp.aws import aws_dynamodb_table
from terrascript.resource.hashicorp.aws import aws_dynamodb_table_item
from terrascript.resource.hashicorp.aws import aws_dynamodb_tag
from terrascript.resource.hashicorp.aws import aws_ebs_default_kms_key
from terrascript.resource.hashicorp.aws import aws_ebs_encryption_by_default
from terrascript.resource.hashicorp.aws import aws_ebs_snapshot
from terrascript.resource.hashicorp.aws import aws_ebs_snapshot_copy
from terrascript.resource.hashicorp.aws import aws_ebs_snapshot_import
from terrascript.resource.hashicorp.aws import aws_ebs_volume
from terrascript.resource.hashicorp.aws import aws_ec2_availability_zone_group
from terrascript.resource.hashicorp.aws import aws_ec2_capacity_reservation
from terrascript.resource.hashicorp.aws import aws_ec2_carrier_gateway
from terrascript.resource.hashicorp.aws import aws_ec2_client_vpn_authorization_rule
from terrascript.resource.hashicorp.aws import aws_ec2_client_vpn_endpoint
from terrascript.resource.hashicorp.aws import (
aws_ec2_client_vpn_network_association,
)
from terrascript.resource.hashicorp.aws import aws_ec2_client_vpn_route
from terrascript.resource.hashicorp.aws import aws_ec2_fleet
from terrascript.resource.hashicorp.aws import aws_ec2_local_gateway_route
from terrascript.resource.hashicorp.aws import (
aws_ec2_local_gateway_route_table_vpc_association,
)
from terrascript.resource.hashicorp.aws import aws_ec2_managed_prefix_list
from terrascript.resource.hashicorp.aws import aws_ec2_managed_prefix_list_entry
from terrascript.resource.hashicorp.aws import aws_ec2_tag
from terrascript.resource.hashicorp.aws import aws_ec2_traffic_mirror_filter
from terrascript.resource.hashicorp.aws import aws_ec2_traffic_mirror_filter_rule
from terrascript.resource.hashicorp.aws import aws_ec2_traffic_mirror_session
from terrascript.resource.hashicorp.aws import aws_ec2_traffic_mirror_target
from terrascript.resource.hashicorp.aws import aws_ec2_transit_gateway
from terrascript.resource.hashicorp.aws import (
aws_ec2_transit_gateway_peering_attachment,
)
from terrascript.resource.hashicorp.aws import (
aws_ec2_transit_gateway_peering_attachment_accepter,
)
from terrascript.resource.hashicorp.aws import (
aws_ec2_transit_gateway_prefix_list_reference,
)
from terrascript.resource.hashicorp.aws import aws_ec2_transit_gateway_route
from terrascript.resource.hashicorp.aws import aws_ec2_transit_gateway_route_table
from terrascript.resource.hashicorp.aws import (
aws_ec2_transit_gateway_route_table_association,
)
from terrascript.resource.hashicorp.aws import (
aws_ec2_transit_gateway_route_table_propagation,
)
from terrascript.resource.hashicorp.aws import (
aws_ec2_transit_gateway_vpc_attachment,
)
from terrascript.resource.hashicorp.aws import (
aws_ec2_transit_gateway_vpc_attachment_accepter,
)
from terrascript.resource.hashicorp.aws import aws_ecr_lifecycle_policy
from terrascript.resource.hashicorp.aws import aws_ecr_registry_policy
from terrascript.resource.hashicorp.aws import aws_ecr_replication_configuration
from terrascript.resource.hashicorp.aws import aws_ecr_repository
from terrascript.resource.hashicorp.aws import aws_ecr_repository_policy
from terrascript.resource.hashicorp.aws import aws_ecrpublic_repository
from terrascript.resource.hashicorp.aws import aws_ecs_capacity_provider
from terrascript.resource.hashicorp.aws import aws_ecs_cluster
from terrascript.resource.hashicorp.aws import aws_ecs_service
from terrascript.resource.hashicorp.aws import aws_ecs_tag
from terrascript.resource.hashicorp.aws import aws_ecs_task_definition
from terrascript.resource.hashicorp.aws import aws_efs_access_point
from terrascript.resource.hashicorp.aws import aws_efs_backup_policy
from terrascript.resource.hashicorp.aws import aws_efs_file_system
from terrascript.resource.hashicorp.aws import aws_efs_file_system_policy
from terrascript.resource.hashicorp.aws import aws_efs_mount_target
from terrascript.resource.hashicorp.aws import aws_egress_only_internet_gateway
from terrascript.resource.hashicorp.aws import aws_eip
from terrascript.resource.hashicorp.aws import aws_eip_association
from terrascript.resource.hashicorp.aws import aws_eks_addon
from terrascript.resource.hashicorp.aws import aws_eks_cluster
from terrascript.resource.hashicorp.aws import aws_eks_fargate_profile
from terrascript.resource.hashicorp.aws import aws_eks_identity_provider_config
from terrascript.resource.hashicorp.aws import aws_eks_node_group
from terrascript.resource.hashicorp.aws import aws_elastic_beanstalk_application
from terrascript.resource.hashicorp.aws import (
aws_elastic_beanstalk_application_version,
)
from terrascript.resource.hashicorp.aws import (
aws_elastic_beanstalk_configuration_template,
)
from terrascript.resource.hashicorp.aws import aws_elastic_beanstalk_environment
from terrascript.resource.hashicorp.aws import aws_elasticache_cluster
from terrascript.resource.hashicorp.aws import (
aws_elasticache_global_replication_group,
)
from terrascript.resource.hashicorp.aws import aws_elasticache_parameter_group
from terrascript.resource.hashicorp.aws import aws_elasticache_replication_group
from terrascript.resource.hashicorp.aws import aws_elasticache_security_group
from terrascript.resource.hashicorp.aws import aws_elasticache_subnet_group
from terrascript.resource.hashicorp.aws import aws_elasticache_user
from terrascript.resource.hashicorp.aws import aws_elasticache_user_group
from terrascript.resource.hashicorp.aws import aws_elasticsearch_domain
from terrascript.resource.hashicorp.aws import aws_elasticsearch_domain_policy
from terrascript.resource.hashicorp.aws import aws_elasticsearch_domain_saml_options
from terrascript.resource.hashicorp.aws import aws_elastictranscoder_pipeline
from terrascript.resource.hashicorp.aws import aws_elastictranscoder_preset
from terrascript.resource.hashicorp.aws import aws_elb
from terrascript.resource.hashicorp.aws import aws_elb_attachment
from terrascript.resource.hashicorp.aws import aws_emr_cluster
from terrascript.resource.hashicorp.aws import aws_emr_instance_fleet
from terrascript.resource.hashicorp.aws import aws_emr_instance_group
from terrascript.resource.hashicorp.aws import aws_emr_managed_scaling_policy
from terrascript.resource.hashicorp.aws import aws_emr_security_configuration
from terrascript.resource.hashicorp.aws import aws_flow_log
from terrascript.resource.hashicorp.aws import aws_fms_admin_account
from terrascript.resource.hashicorp.aws import aws_fms_policy
from terrascript.resource.hashicorp.aws import aws_fsx_backup
from terrascript.resource.hashicorp.aws import aws_fsx_lustre_file_system
from terrascript.resource.hashicorp.aws import aws_fsx_ontap_file_system
from terrascript.resource.hashicorp.aws import aws_fsx_windows_file_system
from terrascript.resource.hashicorp.aws import aws_gamelift_alias
from terrascript.resource.hashicorp.aws import aws_gamelift_build
from terrascript.resource.hashicorp.aws import aws_gamelift_fleet
from terrascript.resource.hashicorp.aws import aws_gamelift_game_session_queue
from terrascript.resource.hashicorp.aws import aws_glacier_vault
from terrascript.resource.hashicorp.aws import aws_glacier_vault_lock
from terrascript.resource.hashicorp.aws import aws_globalaccelerator_accelerator
from terrascript.resource.hashicorp.aws import aws_globalaccelerator_endpoint_group
from terrascript.resource.hashicorp.aws import aws_globalaccelerator_listener
from terrascript.resource.hashicorp.aws import aws_glue_catalog_database
from terrascript.resource.hashicorp.aws import aws_glue_catalog_table
from terrascript.resource.hashicorp.aws import aws_glue_classifier
from terrascript.resource.hashicorp.aws import aws_glue_connection
from terrascript.resource.hashicorp.aws import aws_glue_crawler
from terrascript.resource.hashicorp.aws import (
aws_glue_data_catalog_encryption_settings,
)
from terrascript.resource.hashicorp.aws import aws_glue_dev_endpoint
from terrascript.resource.hashicorp.aws import aws_glue_job
from terrascript.resource.hashicorp.aws import aws_glue_ml_transform
from terrascript.resource.hashicorp.aws import aws_glue_partition
from terrascript.resource.hashicorp.aws import aws_glue_registry
from terrascript.resource.hashicorp.aws import aws_glue_resource_policy
from terrascript.resource.hashicorp.aws import aws_glue_schema
from terrascript.resource.hashicorp.aws import aws_glue_security_configuration
from terrascript.resource.hashicorp.aws import aws_glue_trigger
from terrascript.resource.hashicorp.aws import aws_glue_user_defined_function
from terrascript.resource.hashicorp.aws import aws_glue_workflow
from terrascript.resource.hashicorp.aws import aws_guardduty_detector
from terrascript.resource.hashicorp.aws import aws_guardduty_filter
from terrascript.resource.hashicorp.aws import aws_guardduty_invite_accepter
from terrascript.resource.hashicorp.aws import aws_guardduty_ipset
from terrascript.resource.hashicorp.aws import aws_guardduty_member
from terrascript.resource.hashicorp.aws import (
aws_guardduty_organization_admin_account,
)
from terrascript.resource.hashicorp.aws import (
aws_guardduty_organization_configuration,
)
from terrascript.resource.hashicorp.aws import aws_guardduty_publishing_destination
from terrascript.resource.hashicorp.aws import aws_guardduty_threatintelset
from terrascript.resource.hashicorp.aws import aws_iam_access_key
from terrascript.resource.hashicorp.aws import aws_iam_account_alias
from terrascript.resource.hashicorp.aws import aws_iam_account_password_policy
from terrascript.resource.hashicorp.aws import aws_iam_group
from terrascript.resource.hashicorp.aws import aws_iam_group_membership
from terrascript.resource.hashicorp.aws import aws_iam_group_policy
from terrascript.resource.hashicorp.aws import aws_iam_group_policy_attachment
from terrascript.resource.hashicorp.aws import aws_iam_instance_profile
from terrascript.resource.hashicorp.aws import aws_iam_openid_connect_provider
from terrascript.resource.hashicorp.aws import aws_iam_policy
from terrascript.resource.hashicorp.aws import aws_iam_policy_attachment
from terrascript.resource.hashicorp.aws import aws_iam_role
from terrascript.resource.hashicorp.aws import aws_iam_role_policy
from terrascript.resource.hashicorp.aws import aws_iam_role_policy_attachment
from terrascript.resource.hashicorp.aws import aws_iam_saml_provider
from terrascript.resource.hashicorp.aws import aws_iam_server_certificate
from terrascript.resource.hashicorp.aws import aws_iam_service_linked_role
from terrascript.resource.hashicorp.aws import aws_iam_user
from terrascript.resource.hashicorp.aws import aws_iam_user_group_membership
from terrascript.resource.hashicorp.aws import aws_iam_user_login_profile
from terrascript.resource.hashicorp.aws import aws_iam_user_policy
from terrascript.resource.hashicorp.aws import aws_iam_user_policy_attachment
from terrascript.resource.hashicorp.aws import aws_iam_user_ssh_key
from terrascript.resource.hashicorp.aws import aws_imagebuilder_component
from terrascript.resource.hashicorp.aws import (
aws_imagebuilder_distribution_configuration,
)
from terrascript.resource.hashicorp.aws import aws_imagebuilder_image
from terrascript.resource.hashicorp.aws import aws_imagebuilder_image_pipeline
from terrascript.resource.hashicorp.aws import aws_imagebuilder_image_recipe
from terrascript.resource.hashicorp.aws import (
aws_imagebuilder_infrastructure_configuration,
)
from terrascript.resource.hashicorp.aws import aws_inspector_assessment_target
from terrascript.resource.hashicorp.aws import aws_inspector_assessment_template
from terrascript.resource.hashicorp.aws import aws_inspector_resource_group
from terrascript.resource.hashicorp.aws import aws_instance
from terrascript.resource.hashicorp.aws import aws_internet_gateway
from terrascript.resource.hashicorp.aws import aws_iot_certificate
from terrascript.resource.hashicorp.aws import aws_iot_policy
from terrascript.resource.hashicorp.aws import aws_iot_policy_attachment
from terrascript.resource.hashicorp.aws import aws_iot_role_alias
from terrascript.resource.hashicorp.aws import aws_iot_thing
from terrascript.resource.hashicorp.aws import aws_iot_thing_principal_attachment
from terrascript.resource.hashicorp.aws import aws_iot_thing_type
from terrascript.resource.hashicorp.aws import aws_iot_topic_rule
from terrascript.resource.hashicorp.aws import aws_key_pair
from terrascript.resource.hashicorp.aws import aws_kinesis_analytics_application
from terrascript.resource.hashicorp.aws import aws_kinesis_firehose_delivery_stream
from terrascript.resource.hashicorp.aws import aws_kinesis_stream
from terrascript.resource.hashicorp.aws import aws_kinesis_stream_consumer
from terrascript.resource.hashicorp.aws import aws_kinesis_video_stream
from terrascript.resource.hashicorp.aws import aws_kinesisanalyticsv2_application
from terrascript.resource.hashicorp.aws import (
aws_kinesisanalyticsv2_application_snapshot,
)
from terrascript.resource.hashicorp.aws import aws_kms_alias
from terrascript.resource.hashicorp.aws import aws_kms_ciphertext
from terrascript.resource.hashicorp.aws import aws_kms_external_key
from terrascript.resource.hashicorp.aws import aws_kms_grant
from terrascript.resource.hashicorp.aws import aws_kms_key
from terrascript.resource.hashicorp.aws import aws_lakeformation_data_lake_settings
from terrascript.resource.hashicorp.aws import aws_lakeformation_permissions
from terrascript.resource.hashicorp.aws import aws_lakeformation_resource
from terrascript.resource.hashicorp.aws import aws_lambda_alias
from terrascript.resource.hashicorp.aws import aws_lambda_code_signing_config
from terrascript.resource.hashicorp.aws import aws_lambda_event_source_mapping
from terrascript.resource.hashicorp.aws import aws_lambda_function
from terrascript.resource.hashicorp.aws import (
aws_lambda_function_event_invoke_config,
)
from terrascript.resource.hashicorp.aws import aws_lambda_layer_version
from terrascript.resource.hashicorp.aws import aws_lambda_permission
from terrascript.resource.hashicorp.aws import (
aws_lambda_provisioned_concurrency_config,
)
from terrascript.resource.hashicorp.aws import aws_launch_configuration
from terrascript.resource.hashicorp.aws import aws_launch_template
from terrascript.resource.hashicorp.aws import aws_lb
from terrascript.resource.hashicorp.aws import aws_lb_cookie_stickiness_policy
from terrascript.resource.hashicorp.aws import aws_lb_listener
from terrascript.resource.hashicorp.aws import aws_lb_listener_certificate
from terrascript.resource.hashicorp.aws import aws_lb_listener_rule
from terrascript.resource.hashicorp.aws import aws_lb_ssl_negotiation_policy
from terrascript.resource.hashicorp.aws import aws_lb_target_group
from terrascript.resource.hashicorp.aws import aws_lb_target_group_attachment
from terrascript.resource.hashicorp.aws import aws_lex_bot
from terrascript.resource.hashicorp.aws import aws_lex_bot_alias
from terrascript.resource.hashicorp.aws import aws_lex_intent
from terrascript.resource.hashicorp.aws import aws_lex_slot_type
from terrascript.resource.hashicorp.aws import aws_licensemanager_association
from terrascript.resource.hashicorp.aws import (
aws_licensemanager_license_configuration,
)
from terrascript.resource.hashicorp.aws import aws_lightsail_domain
from terrascript.resource.hashicorp.aws import aws_lightsail_instance
from terrascript.resource.hashicorp.aws import aws_lightsail_instance_public_ports
from terrascript.resource.hashicorp.aws import aws_lightsail_key_pair
from terrascript.resource.hashicorp.aws import aws_lightsail_static_ip
from terrascript.resource.hashicorp.aws import aws_lightsail_static_ip_attachment
from terrascript.resource.hashicorp.aws import (
aws_load_balancer_backend_server_policy,
)
from terrascript.resource.hashicorp.aws import aws_load_balancer_listener_policy
from terrascript.resource.hashicorp.aws import aws_load_balancer_policy
from terrascript.resource.hashicorp.aws import aws_macie2_account
from terrascript.resource.hashicorp.aws import aws_macie2_classification_job
from terrascript.resource.hashicorp.aws import aws_macie2_custom_data_identifier
from terrascript.resource.hashicorp.aws import aws_macie2_findings_filter
from terrascript.resource.hashicorp.aws import aws_macie2_invitation_accepter
from terrascript.resource.hashicorp.aws import aws_macie2_member
from terrascript.resource.hashicorp.aws import aws_macie2_organization_admin_account
from terrascript.resource.hashicorp.aws import aws_macie_member_account_association
from terrascript.resource.hashicorp.aws import aws_macie_s3_bucket_association
from terrascript.resource.hashicorp.aws import aws_main_route_table_association
from terrascript.resource.hashicorp.aws import aws_media_convert_queue
from terrascript.resource.hashicorp.aws import aws_media_package_channel
from terrascript.resource.hashicorp.aws import aws_media_store_container
from terrascript.resource.hashicorp.aws import aws_media_store_container_policy
from terrascript.resource.hashicorp.aws import aws_mq_broker
from terrascript.resource.hashicorp.aws import aws_mq_configuration
from terrascript.resource.hashicorp.aws import aws_msk_cluster
from terrascript.resource.hashicorp.aws import aws_msk_configuration
from terrascript.resource.hashicorp.aws import aws_msk_scram_secret_association
from terrascript.resource.hashicorp.aws import aws_mwaa_environment
from terrascript.resource.hashicorp.aws import aws_nat_gateway
from terrascript.resource.hashicorp.aws import aws_neptune_cluster
from terrascript.resource.hashicorp.aws import aws_neptune_cluster_endpoint
from terrascript.resource.hashicorp.aws import aws_neptune_cluster_instance
from terrascript.resource.hashicorp.aws import aws_neptune_cluster_parameter_group
from terrascript.resource.hashicorp.aws import aws_neptune_cluster_snapshot
from terrascript.resource.hashicorp.aws import aws_neptune_event_subscription
from terrascript.resource.hashicorp.aws import aws_neptune_parameter_group
from terrascript.resource.hashicorp.aws import aws_neptune_subnet_group
from terrascript.resource.hashicorp.aws import aws_network_acl
from terrascript.resource.hashicorp.aws import aws_network_acl_rule
from terrascript.resource.hashicorp.aws import aws_network_interface
from terrascript.resource.hashicorp.aws import aws_network_interface_attachment
from terrascript.resource.hashicorp.aws import aws_network_interface_sg_attachment
from terrascript.resource.hashicorp.aws import aws_networkfirewall_firewall
from terrascript.resource.hashicorp.aws import aws_networkfirewall_firewall_policy
from terrascript.resource.hashicorp.aws import (
aws_networkfirewall_logging_configuration,
)
from terrascript.resource.hashicorp.aws import aws_networkfirewall_resource_policy
from terrascript.resource.hashicorp.aws import aws_networkfirewall_rule_group
from terrascript.resource.hashicorp.aws import aws_opsworks_application
from terrascript.resource.hashicorp.aws import aws_opsworks_custom_layer
from terrascript.resource.hashicorp.aws import aws_opsworks_ganglia_layer
from terrascript.resource.hashicorp.aws import aws_opsworks_haproxy_layer
from terrascript.resource.hashicorp.aws import aws_opsworks_instance
from terrascript.resource.hashicorp.aws import aws_opsworks_java_app_layer
from terrascript.resource.hashicorp.aws import aws_opsworks_memcached_layer
from terrascript.resource.hashicorp.aws import aws_opsworks_mysql_layer
from terrascript.resource.hashicorp.aws import aws_opsworks_nodejs_app_layer
from terrascript.resource.hashicorp.aws import aws_opsworks_permission
from terrascript.resource.hashicorp.aws import aws_opsworks_php_app_layer
from terrascript.resource.hashicorp.aws import aws_opsworks_rails_app_layer
from terrascript.resource.hashicorp.aws import aws_opsworks_rds_db_instance
from terrascript.resource.hashicorp.aws import aws_opsworks_stack
from terrascript.resource.hashicorp.aws import aws_opsworks_static_web_layer
from terrascript.resource.hashicorp.aws import aws_opsworks_user_profile
from terrascript.resource.hashicorp.aws import aws_organizations_account
from terrascript.resource.hashicorp.aws import (
aws_organizations_delegated_administrator,
)
from terrascript.resource.hashicorp.aws import aws_organizations_organization
from terrascript.resource.hashicorp.aws import aws_organizations_organizational_unit
from terrascript.resource.hashicorp.aws import aws_organizations_policy
from terrascript.resource.hashicorp.aws import aws_organizations_policy_attachment
from terrascript.resource.hashicorp.aws import aws_pinpoint_adm_channel
from terrascript.resource.hashicorp.aws import aws_pinpoint_apns_channel
from terrascript.resource.hashicorp.aws import aws_pinpoint_apns_sandbox_channel
from terrascript.resource.hashicorp.aws import aws_pinpoint_apns_voip_channel
from terrascript.resource.hashicorp.aws import (
aws_pinpoint_apns_voip_sandbox_channel,
)
from terrascript.resource.hashicorp.aws import aws_pinpoint_app
from terrascript.resource.hashicorp.aws import aws_pinpoint_baidu_channel
from terrascript.resource.hashicorp.aws import aws_pinpoint_email_channel
from terrascript.resource.hashicorp.aws import aws_pinpoint_event_stream
from terrascript.resource.hashicorp.aws import aws_pinpoint_gcm_channel
from terrascript.resource.hashicorp.aws import aws_pinpoint_sms_channel
from terrascript.resource.hashicorp.aws import aws_placement_group
from terrascript.resource.hashicorp.aws import aws_prometheus_workspace
from terrascript.resource.hashicorp.aws import aws_proxy_protocol_policy
from terrascript.resource.hashicorp.aws import aws_qldb_ledger
from terrascript.resource.hashicorp.aws import aws_quicksight_group
from terrascript.resource.hashicorp.aws import aws_quicksight_group_membership
from terrascript.resource.hashicorp.aws import aws_quicksight_user
from terrascript.resource.hashicorp.aws import aws_ram_principal_association
from terrascript.resource.hashicorp.aws import aws_ram_resource_association
from terrascript.resource.hashicorp.aws import aws_ram_resource_share
from terrascript.resource.hashicorp.aws import aws_ram_resource_share_accepter
from terrascript.resource.hashicorp.aws import aws_rds_cluster
from terrascript.resource.hashicorp.aws import aws_rds_cluster_endpoint
from terrascript.resource.hashicorp.aws import aws_rds_cluster_instance
from terrascript.resource.hashicorp.aws import aws_rds_cluster_parameter_group
from terrascript.resource.hashicorp.aws import aws_rds_cluster_role_association
from terrascript.resource.hashicorp.aws import aws_rds_global_cluster
from terrascript.resource.hashicorp.aws import aws_redshift_cluster
from terrascript.resource.hashicorp.aws import aws_redshift_event_subscription
from terrascript.resource.hashicorp.aws import aws_redshift_parameter_group
from terrascript.resource.hashicorp.aws import aws_redshift_security_group
from terrascript.resource.hashicorp.aws import aws_redshift_snapshot_copy_grant
from terrascript.resource.hashicorp.aws import aws_redshift_snapshot_schedule
from terrascript.resource.hashicorp.aws import (
aws_redshift_snapshot_schedule_association,
)
from terrascript.resource.hashicorp.aws import aws_redshift_subnet_group
from terrascript.resource.hashicorp.aws import aws_resourcegroups_group
from terrascript.resource.hashicorp.aws import aws_route
from terrascript.resource.hashicorp.aws import aws_route53_delegation_set
from terrascript.resource.hashicorp.aws import aws_route53_health_check
from terrascript.resource.hashicorp.aws import aws_route53_hosted_zone_dnssec
from terrascript.resource.hashicorp.aws import aws_route53_key_signing_key
from terrascript.resource.hashicorp.aws import aws_route53_query_log
from terrascript.resource.hashicorp.aws import aws_route53_record
from terrascript.resource.hashicorp.aws import aws_route53_resolver_dnssec_config
from terrascript.resource.hashicorp.aws import aws_route53_resolver_endpoint
from terrascript.resource.hashicorp.aws import aws_route53_resolver_firewall_config
from terrascript.resource.hashicorp.aws import (
aws_route53_resolver_firewall_domain_list,
)
from terrascript.resource.hashicorp.aws import aws_route53_resolver_firewall_rule
from terrascript.resource.hashicorp.aws import (
aws_route53_resolver_firewall_rule_group,
)
from terrascript.resource.hashicorp.aws import (
aws_route53_resolver_firewall_rule_group_association,
)
from terrascript.resource.hashicorp.aws import aws_route53_resolver_query_log_config
from terrascript.resource.hashicorp.aws import (
aws_route53_resolver_query_log_config_association,
)
from terrascript.resource.hashicorp.aws import aws_route53_resolver_rule
from terrascript.resource.hashicorp.aws import aws_route53_resolver_rule_association
from terrascript.resource.hashicorp.aws import (
aws_route53_vpc_association_authorization,
)
from terrascript.resource.hashicorp.aws import aws_route53_zone
from terrascript.resource.hashicorp.aws import aws_route53_zone_association
from terrascript.resource.hashicorp.aws import (
aws_route53recoverycontrolconfig_cluster,
)
from terrascript.resource.hashicorp.aws import (
aws_route53recoverycontrolconfig_control_panel,
)
from terrascript.resource.hashicorp.aws import (
aws_route53recoverycontrolconfig_routing_control,
)
from terrascript.resource.hashicorp.aws import (
aws_route53recoverycontrolconfig_safety_rule,
)
from terrascript.resource.hashicorp.aws import aws_route53recoveryreadiness_cell
from terrascript.resource.hashicorp.aws import (
aws_route53recoveryreadiness_readiness_check,
)
from terrascript.resource.hashicorp.aws import (
aws_route53recoveryreadiness_recovery_group,
)
from terrascript.resource.hashicorp.aws import (
aws_route53recoveryreadiness_resource_set,
)
from terrascript.resource.hashicorp.aws import aws_route_table
from terrascript.resource.hashicorp.aws import aws_route_table_association
from terrascript.resource.hashicorp.aws import aws_s3_access_point
from terrascript.resource.hashicorp.aws import aws_s3_account_public_access_block
from terrascript.resource.hashicorp.aws import aws_s3_bucket
from terrascript.resource.hashicorp.aws import aws_s3_bucket_analytics_configuration
from terrascript.resource.hashicorp.aws import aws_s3_bucket_inventory
from terrascript.resource.hashicorp.aws import aws_s3_bucket_metric
from terrascript.resource.hashicorp.aws import aws_s3_bucket_notification
from terrascript.resource.hashicorp.aws import aws_s3_bucket_object
from terrascript.resource.hashicorp.aws import aws_s3_bucket_ownership_controls
from terrascript.resource.hashicorp.aws import aws_s3_bucket_policy
from terrascript.resource.hashicorp.aws import aws_s3_bucket_public_access_block
from terrascript.resource.hashicorp.aws import aws_s3_object_copy
from terrascript.resource.hashicorp.aws import aws_s3control_bucket
from terrascript.resource.hashicorp.aws import (
aws_s3control_bucket_lifecycle_configuration,
)
from terrascript.resource.hashicorp.aws import aws_s3control_bucket_policy
from terrascript.resource.hashicorp.aws import aws_s3outposts_endpoint
from terrascript.resource.hashicorp.aws import aws_sagemaker_app
from terrascript.resource.hashicorp.aws import aws_sagemaker_app_image_config
from terrascript.resource.hashicorp.aws import aws_sagemaker_code_repository
from terrascript.resource.hashicorp.aws import aws_sagemaker_device_fleet
from terrascript.resource.hashicorp.aws import aws_sagemaker_domain
from terrascript.resource.hashicorp.aws import aws_sagemaker_endpoint
from terrascript.resource.hashicorp.aws import aws_sagemaker_endpoint_configuration
from terrascript.resource.hashicorp.aws import aws_sagemaker_feature_group
from terrascript.resource.hashicorp.aws import aws_sagemaker_flow_definition
from terrascript.resource.hashicorp.aws import aws_sagemaker_human_task_ui
from terrascript.resource.hashicorp.aws import aws_sagemaker_image
from terrascript.resource.hashicorp.aws import aws_sagemaker_image_version
from terrascript.resource.hashicorp.aws import aws_sagemaker_model
from terrascript.resource.hashicorp.aws import aws_sagemaker_model_package_group
from terrascript.resource.hashicorp.aws import aws_sagemaker_notebook_instance
from terrascript.resource.hashicorp.aws import (
aws_sagemaker_notebook_instance_lifecycle_configuration,
)
from terrascript.resource.hashicorp.aws import aws_sagemaker_user_profile
from terrascript.resource.hashicorp.aws import aws_sagemaker_workforce
from terrascript.resource.hashicorp.aws import aws_sagemaker_workteam
from terrascript.resource.hashicorp.aws import aws_schemas_discoverer
from terrascript.resource.hashicorp.aws import aws_schemas_registry
from terrascript.resource.hashicorp.aws import aws_schemas_schema
from terrascript.resource.hashicorp.aws import aws_secretsmanager_secret
from terrascript.resource.hashicorp.aws import aws_secretsmanager_secret_policy
from terrascript.resource.hashicorp.aws import aws_secretsmanager_secret_rotation
from terrascript.resource.hashicorp.aws import aws_secretsmanager_secret_version
from terrascript.resource.hashicorp.aws import aws_security_group
from terrascript.resource.hashicorp.aws import aws_security_group_rule
from terrascript.resource.hashicorp.aws import aws_securityhub_account
from terrascript.resource.hashicorp.aws import aws_securityhub_action_target
from terrascript.resource.hashicorp.aws import aws_securityhub_insight
from terrascript.resource.hashicorp.aws import aws_securityhub_invite_accepter
from terrascript.resource.hashicorp.aws import aws_securityhub_member
from terrascript.resource.hashicorp.aws import (
aws_securityhub_organization_admin_account,
)
from terrascript.resource.hashicorp.aws import (
aws_securityhub_organization_configuration,
)
from terrascript.resource.hashicorp.aws import aws_securityhub_product_subscription
from terrascript.resource.hashicorp.aws import aws_securityhub_standards_control
from terrascript.resource.hashicorp.aws import (
aws_securityhub_standards_subscription,
)
from terrascript.resource.hashicorp.aws import (
aws_serverlessapplicationrepository_cloudformation_stack,
)
from terrascript.resource.hashicorp.aws import aws_service_discovery_http_namespace
from terrascript.resource.hashicorp.aws import aws_service_discovery_instance
from terrascript.resource.hashicorp.aws import (
aws_service_discovery_private_dns_namespace,
)
from terrascript.resource.hashicorp.aws import (
aws_service_discovery_public_dns_namespace,
)
from terrascript.resource.hashicorp.aws import aws_service_discovery_service
from terrascript.resource.hashicorp.aws import (
aws_servicecatalog_budget_resource_association,
)
from terrascript.resource.hashicorp.aws import aws_servicecatalog_constraint
from terrascript.resource.hashicorp.aws import (
aws_servicecatalog_organizations_access,
)
from terrascript.resource.hashicorp.aws import aws_servicecatalog_portfolio
from terrascript.resource.hashicorp.aws import aws_servicecatalog_portfolio_share
from terrascript.resource.hashicorp.aws import (
aws_servicecatalog_principal_portfolio_association,
)
from terrascript.resource.hashicorp.aws import aws_servicecatalog_product
from terrascript.resource.hashicorp.aws import (
aws_servicecatalog_product_portfolio_association,
)
from terrascript.resource.hashicorp.aws import (
aws_servicecatalog_provisioned_product,
)
from terrascript.resource.hashicorp.aws import (
aws_servicecatalog_provisioning_artifact,
)
from terrascript.resource.hashicorp.aws import aws_servicecatalog_service_action
from terrascript.resource.hashicorp.aws import aws_servicecatalog_tag_option
from terrascript.resource.hashicorp.aws import (
aws_servicecatalog_tag_option_resource_association,
)
from terrascript.resource.hashicorp.aws import aws_servicequotas_service_quota
from terrascript.resource.hashicorp.aws import aws_ses_active_receipt_rule_set
from terrascript.resource.hashicorp.aws import aws_ses_configuration_set
from terrascript.resource.hashicorp.aws import aws_ses_domain_dkim
from terrascript.resource.hashicorp.aws import aws_ses_domain_identity
from terrascript.resource.hashicorp.aws import aws_ses_domain_identity_verification
from terrascript.resource.hashicorp.aws import aws_ses_domain_mail_from
from terrascript.resource.hashicorp.aws import aws_ses_email_identity
from terrascript.resource.hashicorp.aws import aws_ses_event_destination
from terrascript.resource.hashicorp.aws import aws_ses_identity_notification_topic
from terrascript.resource.hashicorp.aws import aws_ses_identity_policy
from terrascript.resource.hashicorp.aws import aws_ses_receipt_filter
from terrascript.resource.hashicorp.aws import aws_ses_receipt_rule
from terrascript.resource.hashicorp.aws import aws_ses_receipt_rule_set
from terrascript.resource.hashicorp.aws import aws_ses_template
from terrascript.resource.hashicorp.aws import aws_sfn_activity
from terrascript.resource.hashicorp.aws import aws_sfn_state_machine
from terrascript.resource.hashicorp.aws import aws_shield_protection
from terrascript.resource.hashicorp.aws import aws_shield_protection_group
from terrascript.resource.hashicorp.aws import aws_signer_signing_job
from terrascript.resource.hashicorp.aws import aws_signer_signing_profile
from terrascript.resource.hashicorp.aws import aws_signer_signing_profile_permission
from terrascript.resource.hashicorp.aws import aws_simpledb_domain
from terrascript.resource.hashicorp.aws import aws_snapshot_create_volume_permission
from terrascript.resource.hashicorp.aws import aws_sns_platform_application
from terrascript.resource.hashicorp.aws import aws_sns_sms_preferences
from terrascript.resource.hashicorp.aws import aws_sns_topic
from terrascript.resource.hashicorp.aws import aws_sns_topic_policy
from terrascript.resource.hashicorp.aws import aws_sns_topic_subscription
from terrascript.resource.hashicorp.aws import aws_spot_datafeed_subscription
from terrascript.resource.hashicorp.aws import aws_spot_fleet_request
from terrascript.resource.hashicorp.aws import aws_spot_instance_request
from terrascript.resource.hashicorp.aws import aws_sqs_queue
from terrascript.resource.hashicorp.aws import aws_sqs_queue_policy
from terrascript.resource.hashicorp.aws import aws_ssm_activation
from terrascript.resource.hashicorp.aws import aws_ssm_association
from terrascript.resource.hashicorp.aws import aws_ssm_document
from terrascript.resource.hashicorp.aws import aws_ssm_maintenance_window
from terrascript.resource.hashicorp.aws import aws_ssm_maintenance_window_target
from terrascript.resource.hashicorp.aws import aws_ssm_maintenance_window_task
from terrascript.resource.hashicorp.aws import aws_ssm_parameter
from terrascript.resource.hashicorp.aws import aws_ssm_patch_baseline
from terrascript.resource.hashicorp.aws import aws_ssm_patch_group
from terrascript.resource.hashicorp.aws import aws_ssm_resource_data_sync
from terrascript.resource.hashicorp.aws import aws_ssoadmin_account_assignment
from terrascript.resource.hashicorp.aws import (
aws_ssoadmin_managed_policy_attachment,
)
from terrascript.resource.hashicorp.aws import aws_ssoadmin_permission_set
from terrascript.resource.hashicorp.aws import (
aws_ssoadmin_permission_set_inline_policy,
)
from terrascript.resource.hashicorp.aws import aws_storagegateway_cache
from terrascript.resource.hashicorp.aws import (
aws_storagegateway_cached_iscsi_volume,
)
from terrascript.resource.hashicorp.aws import (
aws_storagegateway_file_system_association,
)
from terrascript.resource.hashicorp.aws import aws_storagegateway_gateway
from terrascript.resource.hashicorp.aws import aws_storagegateway_nfs_file_share
from terrascript.resource.hashicorp.aws import aws_storagegateway_smb_file_share
from terrascript.resource.hashicorp.aws import (
aws_storagegateway_stored_iscsi_volume,
)
from terrascript.resource.hashicorp.aws import aws_storagegateway_tape_pool
from terrascript.resource.hashicorp.aws import aws_storagegateway_upload_buffer
from terrascript.resource.hashicorp.aws import aws_storagegateway_working_storage
from terrascript.resource.hashicorp.aws import aws_subnet
from terrascript.resource.hashicorp.aws import aws_swf_domain
from terrascript.resource.hashicorp.aws import aws_synthetics_canary
from terrascript.resource.hashicorp.aws import aws_timestreamwrite_database
from terrascript.resource.hashicorp.aws import aws_timestreamwrite_table
from terrascript.resource.hashicorp.aws import aws_transfer_access
from terrascript.resource.hashicorp.aws import aws_transfer_server
from terrascript.resource.hashicorp.aws import aws_transfer_ssh_key
from terrascript.resource.hashicorp.aws import aws_transfer_user
from terrascript.resource.hashicorp.aws import aws_volume_attachment
from terrascript.resource.hashicorp.aws import aws_vpc
from terrascript.resource.hashicorp.aws import aws_vpc_dhcp_options
from terrascript.resource.hashicorp.aws import aws_vpc_dhcp_options_association
from terrascript.resource.hashicorp.aws import aws_vpc_endpoint
from terrascript.resource.hashicorp.aws import (
aws_vpc_endpoint_connection_notification,
)
from terrascript.resource.hashicorp.aws import (
aws_vpc_endpoint_route_table_association,
)
from terrascript.resource.hashicorp.aws import aws_vpc_endpoint_service
from terrascript.resource.hashicorp.aws import (
aws_vpc_endpoint_service_allowed_principal,
)
from terrascript.resource.hashicorp.aws import aws_vpc_endpoint_subnet_association
from terrascript.resource.hashicorp.aws import aws_vpc_ipv4_cidr_block_association
from terrascript.resource.hashicorp.aws import aws_vpc_peering_connection
from terrascript.resource.hashicorp.aws import aws_vpc_peering_connection_accepter
from terrascript.resource.hashicorp.aws import aws_vpc_peering_connection_options
from terrascript.resource.hashicorp.aws import aws_vpn_connection
from terrascript.resource.hashicorp.aws import aws_vpn_connection_route
from terrascript.resource.hashicorp.aws import aws_vpn_gateway
from terrascript.resource.hashicorp.aws import aws_vpn_gateway_attachment
from terrascript.resource.hashicorp.aws import aws_vpn_gateway_route_propagation
from terrascript.resource.hashicorp.aws import aws_waf_byte_match_set
from terrascript.resource.hashicorp.aws import aws_waf_geo_match_set
from terrascript.resource.hashicorp.aws import aws_waf_ipset
from terrascript.resource.hashicorp.aws import aws_waf_rate_based_rule
from terrascript.resource.hashicorp.aws import aws_waf_regex_match_set
from terrascript.resource.hashicorp.aws import aws_waf_regex_pattern_set
from terrascript.resource.hashicorp.aws import aws_waf_rule
from terrascript.resource.hashicorp.aws import aws_waf_rule_group
from terrascript.resource.hashicorp.aws import aws_waf_size_constraint_set
from terrascript.resource.hashicorp.aws import aws_waf_sql_injection_match_set
from terrascript.resource.hashicorp.aws import aws_waf_web_acl
from terrascript.resource.hashicorp.aws import aws_waf_xss_match_set
from terrascript.resource.hashicorp.aws import aws_wafregional_byte_match_set
from terrascript.resource.hashicorp.aws import aws_wafregional_geo_match_set
from terrascript.resource.hashicorp.aws import aws_wafregional_ipset
from terrascript.resource.hashicorp.aws import aws_wafregional_rate_based_rule
from terrascript.resource.hashicorp.aws import aws_wafregional_regex_match_set
from terrascript.resource.hashicorp.aws import aws_wafregional_regex_pattern_set
from terrascript.resource.hashicorp.aws import aws_wafregional_rule
from terrascript.resource.hashicorp.aws import aws_wafregional_rule_group
from terrascript.resource.hashicorp.aws import aws_wafregional_size_constraint_set
from terrascript.resource.hashicorp.aws import (
aws_wafregional_sql_injection_match_set,
)
from terrascript.resource.hashicorp.aws import aws_wafregional_web_acl
from terrascript.resource.hashicorp.aws import aws_wafregional_web_acl_association
from terrascript.resource.hashicorp.aws import aws_wafregional_xss_match_set
from terrascript.resource.hashicorp.aws import aws_wafv2_ip_set
from terrascript.resource.hashicorp.aws import aws_wafv2_regex_pattern_set
from terrascript.resource.hashicorp.aws import aws_wafv2_rule_group
from terrascript.resource.hashicorp.aws import aws_wafv2_web_acl
from terrascript.resource.hashicorp.aws import aws_wafv2_web_acl_association
from terrascript.resource.hashicorp.aws import (
aws_wafv2_web_acl_logging_configuration,
)
from terrascript.resource.hashicorp.aws import aws_worklink_fleet
from terrascript.resource.hashicorp.aws import (
aws_worklink_website_certificate_authority_association,
)
from terrascript.resource.hashicorp.aws import aws_workspaces_directory
from terrascript.resource.hashicorp.aws import aws_workspaces_ip_group
from terrascript.resource.hashicorp.aws import aws_workspaces_workspace
from terrascript.resource.hashicorp.aws import aws_xray_encryption_config
from terrascript.resource.hashicorp.aws import aws_xray_group
from terrascript.resource.hashicorp.aws import aws_xray_sampling_rule
def test_datasource_import():
from terrascript.data.hashicorp.aws import aws_acm_certificate
from terrascript.data.hashicorp.aws import aws_acmpca_certificate
from terrascript.data.hashicorp.aws import aws_acmpca_certificate_authority
from terrascript.data.hashicorp.aws import aws_alb
from terrascript.data.hashicorp.aws import aws_alb_listener
from terrascript.data.hashicorp.aws import aws_alb_target_group
from terrascript.data.hashicorp.aws import aws_ami
from terrascript.data.hashicorp.aws import aws_ami_ids
from terrascript.data.hashicorp.aws import aws_api_gateway_api_key
from terrascript.data.hashicorp.aws import aws_api_gateway_domain_name
from terrascript.data.hashicorp.aws import aws_api_gateway_resource
from terrascript.data.hashicorp.aws import aws_api_gateway_rest_api
from terrascript.data.hashicorp.aws import aws_api_gateway_vpc_link
from terrascript.data.hashicorp.aws import aws_apigatewayv2_api
from terrascript.data.hashicorp.aws import aws_apigatewayv2_apis
from terrascript.data.hashicorp.aws import aws_appmesh_mesh
from terrascript.data.hashicorp.aws import aws_appmesh_virtual_service
from terrascript.data.hashicorp.aws import aws_arn
from terrascript.data.hashicorp.aws import aws_autoscaling_group
from terrascript.data.hashicorp.aws import aws_autoscaling_groups
from terrascript.data.hashicorp.aws import aws_availability_zone
from terrascript.data.hashicorp.aws import aws_availability_zones
from terrascript.data.hashicorp.aws import aws_backup_plan
from terrascript.data.hashicorp.aws import aws_backup_selection
from terrascript.data.hashicorp.aws import aws_backup_vault
from terrascript.data.hashicorp.aws import aws_batch_compute_environment
from terrascript.data.hashicorp.aws import aws_batch_job_queue
from terrascript.data.hashicorp.aws import aws_billing_service_account
from terrascript.data.hashicorp.aws import aws_caller_identity
from terrascript.data.hashicorp.aws import aws_canonical_user_id
from terrascript.data.hashicorp.aws import aws_cloudformation_export
from terrascript.data.hashicorp.aws import aws_cloudformation_stack
from terrascript.data.hashicorp.aws import aws_cloudformation_type
from terrascript.data.hashicorp.aws import aws_cloudfront_cache_policy
from terrascript.data.hashicorp.aws import aws_cloudfront_distribution
from terrascript.data.hashicorp.aws import aws_cloudfront_function
from terrascript.data.hashicorp.aws import (
aws_cloudfront_log_delivery_canonical_user_id,
)
from terrascript.data.hashicorp.aws import aws_cloudfront_origin_request_policy
from terrascript.data.hashicorp.aws import aws_cloudhsm_v2_cluster
from terrascript.data.hashicorp.aws import aws_cloudtrail_service_account
from terrascript.data.hashicorp.aws import aws_cloudwatch_event_connection
from terrascript.data.hashicorp.aws import aws_cloudwatch_event_source
from terrascript.data.hashicorp.aws import aws_cloudwatch_log_group
from terrascript.data.hashicorp.aws import aws_cloudwatch_log_groups
from terrascript.data.hashicorp.aws import aws_codeartifact_authorization_token
from terrascript.data.hashicorp.aws import aws_codeartifact_repository_endpoint
from terrascript.data.hashicorp.aws import aws_codecommit_repository
from terrascript.data.hashicorp.aws import aws_codestarconnections_connection
from terrascript.data.hashicorp.aws import aws_cognito_user_pools
from terrascript.data.hashicorp.aws import aws_connect_contact_flow
from terrascript.data.hashicorp.aws import aws_connect_instance
from terrascript.data.hashicorp.aws import aws_cur_report_definition
from terrascript.data.hashicorp.aws import aws_customer_gateway
from terrascript.data.hashicorp.aws import aws_db_cluster_snapshot
from terrascript.data.hashicorp.aws import aws_db_event_categories
from terrascript.data.hashicorp.aws import aws_db_instance
from terrascript.data.hashicorp.aws import aws_db_snapshot
from terrascript.data.hashicorp.aws import aws_db_subnet_group
from terrascript.data.hashicorp.aws import aws_default_tags
from terrascript.data.hashicorp.aws import aws_directory_service_directory
from terrascript.data.hashicorp.aws import aws_docdb_engine_version
from terrascript.data.hashicorp.aws import aws_docdb_orderable_db_instance
from terrascript.data.hashicorp.aws import aws_dx_connection
from terrascript.data.hashicorp.aws import aws_dx_gateway
from terrascript.data.hashicorp.aws import aws_dx_location
from terrascript.data.hashicorp.aws import aws_dx_locations
from terrascript.data.hashicorp.aws import aws_dynamodb_table
from terrascript.data.hashicorp.aws import aws_ebs_default_kms_key
from terrascript.data.hashicorp.aws import aws_ebs_encryption_by_default
from terrascript.data.hashicorp.aws import aws_ebs_snapshot
from terrascript.data.hashicorp.aws import aws_ebs_snapshot_ids
from terrascript.data.hashicorp.aws import aws_ebs_volume
from terrascript.data.hashicorp.aws import aws_ebs_volumes
from terrascript.data.hashicorp.aws import aws_ec2_coip_pool
from terrascript.data.hashicorp.aws import aws_ec2_coip_pools
from terrascript.data.hashicorp.aws import aws_ec2_instance_type
from terrascript.data.hashicorp.aws import aws_ec2_instance_type_offering
from terrascript.data.hashicorp.aws import aws_ec2_instance_type_offerings
from terrascript.data.hashicorp.aws import aws_ec2_local_gateway
from terrascript.data.hashicorp.aws import aws_ec2_local_gateway_route_table
from terrascript.data.hashicorp.aws import aws_ec2_local_gateway_route_tables
from terrascript.data.hashicorp.aws import aws_ec2_local_gateway_virtual_interface
from terrascript.data.hashicorp.aws import (
aws_ec2_local_gateway_virtual_interface_group,
)
from terrascript.data.hashicorp.aws import (
aws_ec2_local_gateway_virtual_interface_groups,
)
from terrascript.data.hashicorp.aws import aws_ec2_local_gateways
from terrascript.data.hashicorp.aws import aws_ec2_managed_prefix_list
from terrascript.data.hashicorp.aws import aws_ec2_spot_price
from terrascript.data.hashicorp.aws import aws_ec2_transit_gateway
from terrascript.data.hashicorp.aws import (
aws_ec2_transit_gateway_dx_gateway_attachment,
)
from terrascript.data.hashicorp.aws import (
aws_ec2_transit_gateway_peering_attachment,
)
from terrascript.data.hashicorp.aws import aws_ec2_transit_gateway_route_table
from terrascript.data.hashicorp.aws import aws_ec2_transit_gateway_route_tables
from terrascript.data.hashicorp.aws import aws_ec2_transit_gateway_vpc_attachment
from terrascript.data.hashicorp.aws import aws_ec2_transit_gateway_vpn_attachment
from terrascript.data.hashicorp.aws import aws_ecr_authorization_token
from terrascript.data.hashicorp.aws import aws_ecr_image
from terrascript.data.hashicorp.aws import aws_ecr_repository
from terrascript.data.hashicorp.aws import aws_ecs_cluster
from terrascript.data.hashicorp.aws import aws_ecs_container_definition
from terrascript.data.hashicorp.aws import aws_ecs_service
from terrascript.data.hashicorp.aws import aws_ecs_task_definition
from terrascript.data.hashicorp.aws import aws_efs_access_point
from terrascript.data.hashicorp.aws import aws_efs_access_points
from terrascript.data.hashicorp.aws import aws_efs_file_system
from terrascript.data.hashicorp.aws import aws_efs_mount_target
from terrascript.data.hashicorp.aws import aws_eip
from terrascript.data.hashicorp.aws import aws_eks_addon
from terrascript.data.hashicorp.aws import aws_eks_cluster
from terrascript.data.hashicorp.aws import aws_eks_cluster_auth
from terrascript.data.hashicorp.aws import aws_eks_clusters
from terrascript.data.hashicorp.aws import aws_eks_node_group
from terrascript.data.hashicorp.aws import aws_eks_node_groups
from terrascript.data.hashicorp.aws import aws_elastic_beanstalk_application
from terrascript.data.hashicorp.aws import aws_elastic_beanstalk_hosted_zone
from terrascript.data.hashicorp.aws import aws_elastic_beanstalk_solution_stack
from terrascript.data.hashicorp.aws import aws_elasticache_cluster
from terrascript.data.hashicorp.aws import aws_elasticache_replication_group
from terrascript.data.hashicorp.aws import aws_elasticache_user
from terrascript.data.hashicorp.aws import aws_elasticsearch_domain
from terrascript.data.hashicorp.aws import aws_elb
from terrascript.data.hashicorp.aws import aws_elb_hosted_zone_id
from terrascript.data.hashicorp.aws import aws_elb_service_account
from terrascript.data.hashicorp.aws import aws_globalaccelerator_accelerator
from terrascript.data.hashicorp.aws import aws_glue_connection
from terrascript.data.hashicorp.aws import aws_glue_data_catalog_encryption_settings
from terrascript.data.hashicorp.aws import aws_glue_script
from terrascript.data.hashicorp.aws import aws_guardduty_detector
from terrascript.data.hashicorp.aws import aws_iam_account_alias
from terrascript.data.hashicorp.aws import aws_iam_group
from terrascript.data.hashicorp.aws import aws_iam_instance_profile
from terrascript.data.hashicorp.aws import aws_iam_policy
from terrascript.data.hashicorp.aws import aws_iam_policy_document
from terrascript.data.hashicorp.aws import aws_iam_role
from terrascript.data.hashicorp.aws import aws_iam_roles
from terrascript.data.hashicorp.aws import aws_iam_server_certificate
from terrascript.data.hashicorp.aws import aws_iam_session_context
from terrascript.data.hashicorp.aws import aws_iam_user
from terrascript.data.hashicorp.aws import aws_iam_users
from terrascript.data.hashicorp.aws import aws_identitystore_group
from terrascript.data.hashicorp.aws import aws_identitystore_user
from terrascript.data.hashicorp.aws import aws_imagebuilder_component
from terrascript.data.hashicorp.aws import (
aws_imagebuilder_distribution_configuration,
)
from terrascript.data.hashicorp.aws import aws_imagebuilder_image
from terrascript.data.hashicorp.aws import aws_imagebuilder_image_pipeline
from terrascript.data.hashicorp.aws import aws_imagebuilder_image_recipe
from terrascript.data.hashicorp.aws import (
aws_imagebuilder_infrastructure_configuration,
)
from terrascript.data.hashicorp.aws import aws_inspector_rules_packages
from terrascript.data.hashicorp.aws import aws_instance
from terrascript.data.hashicorp.aws import aws_instances
from terrascript.data.hashicorp.aws import aws_internet_gateway
from terrascript.data.hashicorp.aws import aws_iot_endpoint
from terrascript.data.hashicorp.aws import aws_ip_ranges
from terrascript.data.hashicorp.aws import aws_kinesis_stream
from terrascript.data.hashicorp.aws import aws_kinesis_stream_consumer
from terrascript.data.hashicorp.aws import aws_kms_alias
from terrascript.data.hashicorp.aws import aws_kms_ciphertext
from terrascript.data.hashicorp.aws import aws_kms_key
from terrascript.data.hashicorp.aws import aws_kms_public_key
from terrascript.data.hashicorp.aws import aws_kms_secret
from terrascript.data.hashicorp.aws import aws_kms_secrets
from terrascript.data.hashicorp.aws import aws_lakeformation_data_lake_settings
from terrascript.data.hashicorp.aws import aws_lakeformation_permissions
from terrascript.data.hashicorp.aws import aws_lakeformation_resource
from terrascript.data.hashicorp.aws import aws_lambda_alias
from terrascript.data.hashicorp.aws import aws_lambda_code_signing_config
from terrascript.data.hashicorp.aws import aws_lambda_function
from terrascript.data.hashicorp.aws import aws_lambda_invocation
from terrascript.data.hashicorp.aws import aws_lambda_layer_version
from terrascript.data.hashicorp.aws import aws_launch_configuration
from terrascript.data.hashicorp.aws import aws_launch_template
from terrascript.data.hashicorp.aws import aws_lb
from terrascript.data.hashicorp.aws import aws_lb_listener
from terrascript.data.hashicorp.aws import aws_lb_target_group
from terrascript.data.hashicorp.aws import aws_lex_bot
from terrascript.data.hashicorp.aws import aws_lex_bot_alias
from terrascript.data.hashicorp.aws import aws_lex_intent
from terrascript.data.hashicorp.aws import aws_lex_slot_type
from terrascript.data.hashicorp.aws import aws_mq_broker
from terrascript.data.hashicorp.aws import aws_msk_broker_nodes
from terrascript.data.hashicorp.aws import aws_msk_cluster
from terrascript.data.hashicorp.aws import aws_msk_configuration
from terrascript.data.hashicorp.aws import aws_msk_kafka_version
from terrascript.data.hashicorp.aws import aws_nat_gateway
from terrascript.data.hashicorp.aws import aws_neptune_engine_version
from terrascript.data.hashicorp.aws import aws_neptune_orderable_db_instance
from terrascript.data.hashicorp.aws import aws_network_acls
from terrascript.data.hashicorp.aws import aws_network_interface
from terrascript.data.hashicorp.aws import aws_network_interfaces
from terrascript.data.hashicorp.aws import (
aws_organizations_delegated_administrators,
)
from terrascript.data.hashicorp.aws import aws_organizations_delegated_services
from terrascript.data.hashicorp.aws import aws_organizations_organization
from terrascript.data.hashicorp.aws import aws_organizations_organizational_units
from terrascript.data.hashicorp.aws import aws_outposts_outpost
from terrascript.data.hashicorp.aws import aws_outposts_outpost_instance_type
from terrascript.data.hashicorp.aws import aws_outposts_outpost_instance_types
from terrascript.data.hashicorp.aws import aws_outposts_outposts
from terrascript.data.hashicorp.aws import aws_outposts_site
from terrascript.data.hashicorp.aws import aws_outposts_sites
from terrascript.data.hashicorp.aws import aws_partition
from terrascript.data.hashicorp.aws import aws_prefix_list
from terrascript.data.hashicorp.aws import aws_pricing_product
from terrascript.data.hashicorp.aws import aws_qldb_ledger
from terrascript.data.hashicorp.aws import aws_ram_resource_share
from terrascript.data.hashicorp.aws import aws_rds_certificate
from terrascript.data.hashicorp.aws import aws_rds_cluster
from terrascript.data.hashicorp.aws import aws_rds_engine_version
from terrascript.data.hashicorp.aws import aws_rds_orderable_db_instance
from terrascript.data.hashicorp.aws import aws_redshift_cluster
from terrascript.data.hashicorp.aws import aws_redshift_orderable_cluster
from terrascript.data.hashicorp.aws import aws_redshift_service_account
from terrascript.data.hashicorp.aws import aws_region
from terrascript.data.hashicorp.aws import aws_regions
from terrascript.data.hashicorp.aws import aws_resourcegroupstaggingapi_resources
from terrascript.data.hashicorp.aws import aws_route
from terrascript.data.hashicorp.aws import aws_route53_delegation_set
from terrascript.data.hashicorp.aws import aws_route53_resolver_endpoint
from terrascript.data.hashicorp.aws import aws_route53_resolver_rule
from terrascript.data.hashicorp.aws import aws_route53_resolver_rules
from terrascript.data.hashicorp.aws import aws_route53_zone
from terrascript.data.hashicorp.aws import aws_route_table
from terrascript.data.hashicorp.aws import aws_route_tables
from terrascript.data.hashicorp.aws import aws_s3_bucket
from terrascript.data.hashicorp.aws import aws_s3_bucket_object
from terrascript.data.hashicorp.aws import aws_s3_bucket_objects
from terrascript.data.hashicorp.aws import aws_sagemaker_prebuilt_ecr_image
from terrascript.data.hashicorp.aws import aws_secretsmanager_secret
from terrascript.data.hashicorp.aws import aws_secretsmanager_secret_rotation
from terrascript.data.hashicorp.aws import aws_secretsmanager_secret_version
from terrascript.data.hashicorp.aws import aws_security_group
from terrascript.data.hashicorp.aws import aws_security_groups
from terrascript.data.hashicorp.aws import (
aws_serverlessapplicationrepository_application,
)
from terrascript.data.hashicorp.aws import aws_service_discovery_dns_namespace
from terrascript.data.hashicorp.aws import aws_servicecatalog_constraint
from terrascript.data.hashicorp.aws import aws_servicecatalog_launch_paths
from terrascript.data.hashicorp.aws import aws_servicecatalog_portfolio
from terrascript.data.hashicorp.aws import aws_servicecatalog_portfolio_constraints
from terrascript.data.hashicorp.aws import aws_servicecatalog_product
from terrascript.data.hashicorp.aws import aws_servicequotas_service
from terrascript.data.hashicorp.aws import aws_servicequotas_service_quota
from terrascript.data.hashicorp.aws import aws_sfn_activity
from terrascript.data.hashicorp.aws import aws_sfn_state_machine
from terrascript.data.hashicorp.aws import aws_signer_signing_job
from terrascript.data.hashicorp.aws import aws_signer_signing_profile
from terrascript.data.hashicorp.aws import aws_sns_topic
from terrascript.data.hashicorp.aws import aws_sqs_queue
from terrascript.data.hashicorp.aws import aws_ssm_document
from terrascript.data.hashicorp.aws import aws_ssm_parameter
from terrascript.data.hashicorp.aws import aws_ssm_patch_baseline
from terrascript.data.hashicorp.aws import aws_ssoadmin_instances
from terrascript.data.hashicorp.aws import aws_ssoadmin_permission_set
from terrascript.data.hashicorp.aws import aws_storagegateway_local_disk
from terrascript.data.hashicorp.aws import aws_subnet
from terrascript.data.hashicorp.aws import aws_subnet_ids
from terrascript.data.hashicorp.aws import aws_subnets
from terrascript.data.hashicorp.aws import aws_transfer_server
from terrascript.data.hashicorp.aws import aws_vpc
from terrascript.data.hashicorp.aws import aws_vpc_dhcp_options
from terrascript.data.hashicorp.aws import aws_vpc_endpoint
from terrascript.data.hashicorp.aws import aws_vpc_endpoint_service
from terrascript.data.hashicorp.aws import aws_vpc_peering_connection
from terrascript.data.hashicorp.aws import aws_vpc_peering_connections
from terrascript.data.hashicorp.aws import aws_vpcs
from terrascript.data.hashicorp.aws import aws_vpn_gateway
from terrascript.data.hashicorp.aws import aws_waf_ipset
from terrascript.data.hashicorp.aws import aws_waf_rate_based_rule
from terrascript.data.hashicorp.aws import aws_waf_rule
from terrascript.data.hashicorp.aws import aws_waf_web_acl
from terrascript.data.hashicorp.aws import aws_wafregional_ipset
from terrascript.data.hashicorp.aws import aws_wafregional_rate_based_rule
from terrascript.data.hashicorp.aws import aws_wafregional_rule
from terrascript.data.hashicorp.aws import aws_wafregional_web_acl
from terrascript.data.hashicorp.aws import aws_wafv2_ip_set
from terrascript.data.hashicorp.aws import aws_wafv2_regex_pattern_set
from terrascript.data.hashicorp.aws import aws_wafv2_rule_group
from terrascript.data.hashicorp.aws import aws_wafv2_web_acl
from terrascript.data.hashicorp.aws import aws_workspaces_bundle
from terrascript.data.hashicorp.aws import aws_workspaces_directory
from terrascript.data.hashicorp.aws import aws_workspaces_image
from terrascript.data.hashicorp.aws import aws_workspaces_workspace
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.hashicorp.aws
#
# t = terrascript.provider.hashicorp.aws.aws()
# s = str(t)
#
# assert 'https://github.com/hashicorp/terraform-provider-aws' in s
# assert '3.60.0' in s
| 36.198149
| 88
| 0.828411
| 10,824
| 86,043
| 6.29102
| 0.060698
| 0.19103
| 0.285488
| 0.333069
| 0.953638
| 0.95073
| 0.949438
| 0.939951
| 0.621611
| 0.238509
| 0
| 0.002538
| 0.129784
| 86,043
| 2,376
| 89
| 36.213384
| 0.906886
| 0.005625
| 0
| 0.079937
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000421
| 0
| 1
| 0.002351
| true
| 0.000784
| 0.84953
| 0
| 0.851881
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
6871206b107c77e5010b872990ec9f121aef422a
| 82
|
py
|
Python
|
main/settings.py
|
mcXrd/weatherforecast
|
ff2d4b0ee520c7691f331249e004514d2d93ea09
|
[
"MIT"
] | null | null | null |
main/settings.py
|
mcXrd/weatherforecast
|
ff2d4b0ee520c7691f331249e004514d2d93ea09
|
[
"MIT"
] | null | null | null |
main/settings.py
|
mcXrd/weatherforecast
|
ff2d4b0ee520c7691f331249e004514d2d93ea09
|
[
"MIT"
] | null | null | null |
from secrets import OPENWEATHER_API_KEY
OPENWEATHER_API_KEY = OPENWEATHER_API_KEY
| 27.333333
| 41
| 0.902439
| 12
| 82
| 5.666667
| 0.5
| 0.617647
| 0.75
| 0.823529
| 0.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085366
| 82
| 2
| 42
| 41
| 0.906667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
68a08971afef0174acae7b307a9a271a10ee33c3
| 159
|
py
|
Python
|
carling/io/__init__.py
|
mc-digital/carling
|
7d335741731bca7177bb14483f4ee48512109a49
|
[
"Apache-2.0"
] | 6
|
2020-11-26T02:24:17.000Z
|
2021-02-22T02:18:32.000Z
|
carling/io/__init__.py
|
mc-digital/carling
|
7d335741731bca7177bb14483f4ee48512109a49
|
[
"Apache-2.0"
] | 19
|
2021-02-19T08:00:57.000Z
|
2022-03-22T05:22:57.000Z
|
carling/io/__init__.py
|
mc-digital/carling
|
7d335741731bca7177bb14483f4ee48512109a49
|
[
"Apache-2.0"
] | 1
|
2021-03-25T04:40:48.000Z
|
2021-03-25T04:40:48.000Z
|
from .avro_schema import generate_avro_schema_from_template, load_avro_schema
__all__ = (
"generate_avro_schema_from_template",
"load_avro_schema",
)
| 22.714286
| 77
| 0.805031
| 21
| 159
| 5.285714
| 0.380952
| 0.45045
| 0.324324
| 0.396396
| 0.792793
| 0.792793
| 0.792793
| 0.792793
| 0
| 0
| 0
| 0
| 0.125786
| 159
| 6
| 78
| 26.5
| 0.798561
| 0
| 0
| 0
| 1
| 0
| 0.314465
| 0.213836
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
68a87bf6e820376223f0144892cf1b17ae900203
| 42
|
py
|
Python
|
language-python-test/test/features/operators/isnot.py
|
wbadart/language-python
|
6c048c215ff7fe4a5d5cc36ba3c17a666af74821
|
[
"BSD-3-Clause"
] | null | null | null |
language-python-test/test/features/operators/isnot.py
|
wbadart/language-python
|
6c048c215ff7fe4a5d5cc36ba3c17a666af74821
|
[
"BSD-3-Clause"
] | null | null | null |
language-python-test/test/features/operators/isnot.py
|
wbadart/language-python
|
6c048c215ff7fe4a5d5cc36ba3c17a666af74821
|
[
"BSD-3-Clause"
] | null | null | null |
3 is not 4
3 is not 3
3 is not 4 is not 5
| 10.5
| 19
| 0.642857
| 15
| 42
| 1.8
| 0.333333
| 0.740741
| 0.666667
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259259
| 0.357143
| 42
| 3
| 20
| 14
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d79ae6d785f90292f01ef8e84002a05c5652b9b7
| 164
|
py
|
Python
|
calculator/exception/__init__.py
|
kamilcieslik/test_house_price_lib
|
98a9c9ada05b7cac1e9b835cc15031619cfa8e13
|
[
"MIT"
] | null | null | null |
calculator/exception/__init__.py
|
kamilcieslik/test_house_price_lib
|
98a9c9ada05b7cac1e9b835cc15031619cfa8e13
|
[
"MIT"
] | null | null | null |
calculator/exception/__init__.py
|
kamilcieslik/test_house_price_lib
|
98a9c9ada05b7cac1e9b835cc15031619cfa8e13
|
[
"MIT"
] | null | null | null |
from .construction_year_violation_exception import ConstructionYearViolationException
from .flat_parameter_mismatch_exception import FlatParameterMismatchException
| 54.666667
| 85
| 0.939024
| 14
| 164
| 10.571429
| 0.785714
| 0.202703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 164
| 2
| 86
| 82
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d7a21b0beee29d6dc070d9304d24c4d79a19a8e2
| 46,881
|
py
|
Python
|
tests/test_eda/test_no_pdist_sdm.py
|
logstar/scedar
|
f8cd7dab9f885a0c58d127a9ee3b143d7e4bae06
|
[
"MIT"
] | 17
|
2018-07-24T08:07:03.000Z
|
2020-01-14T11:06:31.000Z
|
tests/test_eda/test_no_pdist_sdm.py
|
benstear/scedar
|
056fc2fd1bdb2473700e4c24ec4dab3996f2b5fc
|
[
"MIT"
] | 2
|
2021-05-30T15:40:56.000Z
|
2021-11-08T08:38:42.000Z
|
tests/test_eda/test_no_pdist_sdm.py
|
benstear/scedar
|
056fc2fd1bdb2473700e4c24ec4dab3996f2b5fc
|
[
"MIT"
] | 8
|
2018-08-23T01:06:59.000Z
|
2019-11-07T22:19:47.000Z
|
import numpy as np
import matplotlib as mpl
mpl.use("agg", warn=False) # noqa
import matplotlib.pyplot as plt
import seaborn as sns
import sklearn.metrics.pairwise
import scipy.cluster.hierarchy as sch
import scipy.sparse as spsp
import scedar.eda as eda
import pytest
class TestNoPdistSampleDistanceMatrix(object):
"""docstring for TestSampleDistanceMatrix"""
x_3x2 = spsp.csr_matrix([[0, 0], [1, 1], [2, 2]])
x_2x4_spsp = spsp.csr_matrix(np.array([[0, 1, 2, 3], [1, 2, 0, 6]]))
x_2x4_arr = np.array([[0, 1, 2, 3], [1, 2, 0, 6]])
def test_valid_init(self):
sdm = eda.SampleDistanceMatrix(self.x_3x2, metric='euclidean',
use_pdist=False)
with pytest.raises(ValueError) as excinfo:
sdm.d
with pytest.raises(ValueError) as excinfo:
sdm3 = eda.SampleDistanceMatrix(
self.x_2x4_spsp, metric='correlation', use_pdist=False,
nprocs=5).d
dist_mat = np.array([[0, np.sqrt(2), np.sqrt(8)],
[np.sqrt(2), 0, np.sqrt(2)],
[np.sqrt(8), np.sqrt(2), 0]])
with pytest.raises(ValueError) as excinfo:
sdm4 = eda.SampleDistanceMatrix(
self.x_3x2, dist_mat, use_pdist=False)
sdm5 = eda.SampleDistanceMatrix([[1, 2]], metric='euclidean',
use_pdist=False)
assert sdm5.tsne(n_iter=250).shape == (1, 2)
def test_empty_init(self):
with pytest.raises(ValueError) as excinfo:
eda.SampleDistanceMatrix(np.empty(0), metric='euclidean')
sdm = eda.SampleDistanceMatrix(
np.empty((0, 0)), metric='euclidean', use_pdist=False)
assert len(sdm.sids) == 0
assert len(sdm.fids) == 0
assert sdm._x.shape == (0, 0)
with pytest.raises(ValueError) as excinfo:
assert sdm._d.shape == (0, 0)
with pytest.raises(ValueError) as excinfo:
assert sdm._col_sorted_d.shape == (0, 0)
with pytest.raises(ValueError) as excinfo:
assert sdm._col_argsorted_d.shape == (0, 0)
assert sdm.tsne(n_iter=250).shape == (0, 0)
def test_init_wrong_metric(self):
# when d is None, metric cannot be precomputed
with pytest.raises(Exception) as excinfo:
eda.SampleDistanceMatrix(
self.x_3x2, metric='precomputed', use_pdist=False)
# lazy load d
eda.SampleDistanceMatrix(self.x_3x2, metric='unknown', use_pdist=False)
with pytest.raises(Exception) as excinfo:
eda.SampleDistanceMatrix(
self.x_3x2, metric='unknown', use_pdist=False).d
eda.SampleDistanceMatrix(self.x_3x2, metric=1, use_pdist=False)
with pytest.raises(Exception) as excinfo:
eda.SampleDistanceMatrix(self.x_3x2, metric=1, use_pdist=False).d
eda.SampleDistanceMatrix(self.x_3x2, metric=1., use_pdist=False)
with pytest.raises(Exception) as excinfo:
eda.SampleDistanceMatrix(self.x_3x2, metric=1., use_pdist=False).d
eda.SampleDistanceMatrix(self.x_3x2, metric=('euclidean', ),
use_pdist=False)
with pytest.raises(Exception) as excinfo:
eda.SampleDistanceMatrix(
self.x_3x2, metric=('euclidean', ), use_pdist=False).d
eda.SampleDistanceMatrix(self.x_3x2, metric=['euclidean'],
use_pdist=False)
with pytest.raises(Exception) as excinfo:
eda.SampleDistanceMatrix(self.x_3x2, metric=['euclidean'],
use_pdist=False).d
def test_sort_features(self):
x = np.array([[0, 2, 30, 10],
[1, 2, 30, 10],
[0, 3, 33, 10],
[2, 5, 30, 7],
[2, 5, 30, 9]])
x = spsp.csr_matrix(x)
sdm = eda.SampleDistanceMatrix(
x, metric='euclidean', use_pdist=False)
sdm2 = eda.SampleDistanceMatrix(
x, metric='euclidean', use_pdist=False)
sdm2.sort_features(fdist_metric='euclidean', optimal_ordering=True)
assert sdm2.fids == [2, 3, 1, 0]
def test_get_tsne_kv(self):
tmet = 'euclidean'
sdm = eda.SampleDistanceMatrix(
self.x_3x2, metric=tmet, use_pdist=False)
assert sdm.get_tsne_kv(1) is None
assert sdm.get_tsne_kv(1) is None
assert sdm.get_tsne_kv(0) is None
assert sdm.get_tsne_kv(2) is None
def test_get_tsne_kv_wrong_args(self):
tmet = 'euclidean'
sdm = eda.SampleDistanceMatrix(
self.x_3x2, metric=tmet, use_pdist=False)
with pytest.raises(ValueError) as excinfo:
sdm.get_tsne_kv([1, 2, 3])
with pytest.raises(ValueError) as excinfo:
sdm.get_tsne_kv({1: 2})
def test_put_tsne_wrong_args(self):
tmet = 'euclidean'
sdm = eda.SampleDistanceMatrix(
self.x_3x2, metric=tmet, use_pdist=False)
with pytest.raises(ValueError) as excinfo:
sdm.put_tsne(1, [1, 2, 3])
with pytest.raises(ValueError) as excinfo:
sdm.put_tsne({1: 2}, [1, 2, 3])
def test_tsne(self):
tmet = 'euclidean'
tsne_kwargs = {'metric': tmet, 'n_iter': 250,
'random_state': 123}
ref_tsne = eda.tsne(self.x_3x2.toarray(), **tsne_kwargs)
sdm = eda.SampleDistanceMatrix(
self.x_3x2, metric=tmet, use_pdist=False)
assert sdm.tsne_lut == {}
tsne1 = sdm.tsne(n_iter=250, random_state=123)
np.testing.assert_allclose(ref_tsne, tsne1)
np.testing.assert_allclose(ref_tsne, sdm._last_tsne)
assert tsne1.shape == (3, 2)
assert len(sdm.tsne_lut) == 1
tsne2 = sdm.tsne(store_res=False, **tsne_kwargs)
np.testing.assert_allclose(ref_tsne, tsne2)
assert len(sdm.tsne_lut) == 1
with pytest.raises(Exception) as excinfo:
wrong_metric_kwargs = tsne_kwargs.copy()
wrong_metric_kwargs['metric'] = 'correlation'
sdm.tsne(**wrong_metric_kwargs)
assert len(sdm.tsne_lut) == 1
with pytest.raises(Exception) as excinfo:
sdm.tsne(metric='precomputed')
tsne3 = sdm.tsne(store_res=True, **tsne_kwargs)
np.testing.assert_allclose(ref_tsne, tsne3)
# (param, ind) as key, so same params get an extra entry.
assert len(sdm.tsne_lut) == 2
np.testing.assert_allclose(tsne1, sdm.get_tsne_kv(1)[1])
np.testing.assert_allclose(tsne3, sdm.get_tsne_kv(2)[1])
assert tsne1 is not sdm.get_tsne_kv(1)[1]
assert tsne3 is not sdm.get_tsne_kv(2)[1]
tsne4 = sdm.tsne(store_res=True, n_iter=250, random_state=123)
np.testing.assert_allclose(ref_tsne, tsne4)
np.testing.assert_allclose(sdm.get_tsne_kv(3)[1], tsne4)
assert len(sdm.tsne_lut) == 3
tsne5 = sdm.tsne(store_res=True, n_iter=251, random_state=123)
tsne6 = sdm.tsne(store_res=True, n_iter=251, random_state=123)
np.testing.assert_allclose(tsne6, tsne5)
np.testing.assert_allclose(tsne5, sdm.get_tsne_kv(4)[1])
np.testing.assert_allclose(tsne6, sdm.get_tsne_kv(5)[1])
assert len(sdm.tsne_lut) == 5
def test_par_tsne(self):
tmet = 'euclidean'
param_list = [{'metric': tmet, 'n_iter': 250, 'random_state': 123},
{'metric': tmet, 'n_iter': 250, 'random_state': 125},
{'metric': tmet, 'n_iter': 250, 'random_state': 123}]
ref_tsne = eda.tsne(self.x_3x2.toarray(), **param_list[0])
sdm = eda.SampleDistanceMatrix(
self.x_3x2, metric=tmet, use_pdist=False)
# If not store, should not update lut
sdm.par_tsne(param_list, store_res=False)
assert sdm._lazy_load_last_tsne is None
assert sdm.tsne_lut == {}
# store results
tsne1, tsne2, tsne3 = sdm.par_tsne(param_list)
np.testing.assert_allclose(ref_tsne, tsne1)
np.testing.assert_allclose(ref_tsne, tsne3)
np.testing.assert_allclose(ref_tsne, sdm._last_tsne)
assert tsne1.shape == (3, 2)
assert len(sdm.tsne_lut) == 3
np.testing.assert_allclose(tsne1, sdm.get_tsne_kv(1)[1])
np.testing.assert_allclose(tsne2, sdm.get_tsne_kv(2)[1])
np.testing.assert_allclose(tsne3, sdm.get_tsne_kv(3)[1])
np.testing.assert_allclose(tsne3, sdm.get_tsne_kv(1)[1])
def test_par_tsne_mp(self):
tmet = 'euclidean'
param_list = [{'metric': tmet, 'n_iter': 250, 'random_state': 123},
{'metric': tmet, 'n_iter': 250, 'random_state': 125},
{'metric': tmet, 'n_iter': 250, 'random_state': 123}]
ref_tsne = eda.tsne(self.x_3x2.toarray(), **param_list[0])
sdm = eda.SampleDistanceMatrix(
self.x_3x2, metric=tmet, use_pdist=False)
# If not store, should not update lut
sdm.par_tsne(param_list, store_res=False, nprocs=3)
assert sdm._lazy_load_last_tsne is None
assert sdm.tsne_lut == {}
# store results
tsne1, tsne2, tsne3 = sdm.par_tsne(param_list, nprocs=3)
np.testing.assert_allclose(ref_tsne, tsne1)
np.testing.assert_allclose(ref_tsne, tsne3)
np.testing.assert_allclose(ref_tsne, sdm._last_tsne)
assert tsne1.shape == (3, 2)
assert len(sdm.tsne_lut) == 3
np.testing.assert_allclose(tsne1, sdm.get_tsne_kv(1)[1])
np.testing.assert_allclose(tsne2, sdm.get_tsne_kv(2)[1])
np.testing.assert_allclose(tsne3, sdm.get_tsne_kv(3)[1])
np.testing.assert_allclose(tsne3, sdm.get_tsne_kv(1)[1])
def test_tsne_default_init(self):
tmet = 'euclidean'
tsne_kwargs = {'metric': tmet, 'n_iter': 250,
'random_state': 123}
ref_tsne = eda.tsne(self.x_3x2.toarray(), **tsne_kwargs)
sdm = eda.SampleDistanceMatrix(
self.x_3x2, metric=tmet, use_pdist=False)
init_tsne = sdm._last_tsne
assert init_tsne.shape == (3, 2)
assert len(sdm.tsne_lut) == 1
tsne2 = sdm.tsne(store_res=True, **tsne_kwargs)
np.testing.assert_allclose(ref_tsne, tsne2)
assert len(sdm.tsne_lut) == 2
def test_ind_x(self):
sids = list("abcdef")
fids = list(range(10, 20))
sdm = eda.SampleDistanceMatrix(
np.random.ranf(60).reshape(6, -1),
sids=sids, fids=fids, use_pdist=False)
# select sf
ss_sdm = sdm.ind_x([0, 5], list(range(9)))
assert ss_sdm._x.shape == (2, 9)
assert ss_sdm.sids == ['a', 'f']
assert ss_sdm.fids == list(range(10, 19))
with pytest.raises(Exception) as excinfo:
ss_sdm.d
# select with Default
ss_sdm = sdm.ind_x()
assert ss_sdm._x.shape == (6, 10)
assert ss_sdm.sids == list("abcdef")
assert ss_sdm.fids == list(range(10, 20))
with pytest.raises(Exception) as excinfo:
ss_sdm.d
# select with None
ss_sdm = sdm.ind_x(None, None)
assert ss_sdm._x.shape == (6, 10)
assert ss_sdm.sids == list("abcdef")
assert ss_sdm.fids == list(range(10, 20))
with pytest.raises(Exception) as excinfo:
ss_sdm.d
# select non-existent inds
with pytest.raises(IndexError) as excinfo:
sdm.ind_x([6])
with pytest.raises(IndexError) as excinfo:
sdm.ind_x(None, ['a'])
def test_ind_x_empty(self):
sids = list("abcdef")
fids = list(range(10, 20))
sdm = eda.SampleDistanceMatrix(
np.random.ranf(60).reshape(6, -1), sids=sids, fids=fids,
use_pdist=False)
empty_s = sdm.ind_x([])
assert empty_s._x.shape == (0, 10)
with pytest.raises(Exception) as excinfo:
empty_s._d
assert empty_s._sids.shape == (0,)
assert empty_s._fids.shape == (10,)
empty_f = sdm.ind_x(None, [])
assert empty_f._x.shape == (6, 0)
with pytest.raises(Exception) as excinfo:
empty_f._d
assert empty_f._sids.shape == (6,)
assert empty_f._fids.shape == (0,)
empty_sf = sdm.ind_x([], [])
assert empty_sf._x.shape == (0, 0)
with pytest.raises(Exception) as excinfo:
empty_sf._d
assert empty_sf._sids.shape == (0,)
assert empty_sf._fids.shape == (0,)
def test_id_x(self):
sids = list("abcdef")
fids = list(range(10, 20))
sdm = eda.SampleDistanceMatrix(
np.random.ranf(60).reshape(6, -1),
sids=sids, fids=fids, use_pdist=False)
# select sf
ss_sdm = sdm.id_x(['a', 'f'], list(range(10, 15)))
assert ss_sdm._x.shape == (2, 5)
assert ss_sdm.sids == ['a', 'f']
assert ss_sdm.fids == list(range(10, 15))
with pytest.raises(Exception) as excinfo:
ss_sdm.d
# select with Default
ss_sdm = sdm.id_x()
assert ss_sdm._x.shape == (6, 10)
assert ss_sdm.sids == list("abcdef")
assert ss_sdm.fids == list(range(10, 20))
with pytest.raises(Exception) as excinfo:
ss_sdm.d
# select with None
ss_sdm = sdm.id_x(None, None)
assert ss_sdm._x.shape == (6, 10)
assert ss_sdm.sids == list("abcdef")
assert ss_sdm.fids == list(range(10, 20))
with pytest.raises(Exception) as excinfo:
ss_sdm.d
# select non-existent inds
# id lookup raises ValueError
with pytest.raises(ValueError) as excinfo:
sdm.id_x([6])
with pytest.raises(ValueError) as excinfo:
sdm.id_x(None, ['a'])
def test_id_x_empty(self):
sids = list("abcdef")
fids = list(range(10, 20))
sdm = eda.SampleDistanceMatrix(
np.random.ranf(60).reshape(6, -1),
sids=sids, fids=fids, use_pdist=False)
empty_s = sdm.id_x([])
assert empty_s._x.shape == (0, 10)
with pytest.raises(Exception) as excinfo:
empty_s._d
assert empty_s._sids.shape == (0,)
assert empty_s._fids.shape == (10,)
empty_f = sdm.id_x(None, [])
assert empty_f._x.shape == (6, 0)
with pytest.raises(Exception) as excinfo:
empty_f._d
assert empty_f._sids.shape == (6,)
assert empty_f._fids.shape == (0,)
empty_sf = sdm.id_x([], [])
assert empty_sf._x.shape == (0, 0)
with pytest.raises(Exception) as excinfo:
empty_sf._d
assert empty_sf._sids.shape == (0,)
assert empty_sf._fids.shape == (0,)
def test_getter(self):
tmet = 'euclidean'
sdm = eda.SampleDistanceMatrix(
self.x_3x2, metric=tmet, use_pdist=False)
with pytest.raises(Exception) as excinfo:
sdm.d
assert sdm.metric == tmet
assert sdm.tsne_lut == {}
assert sdm.tsne_lut is not sdm._tsne_lut
assert sdm.tsne_lut == sdm._tsne_lut
sdm.tsne(n_iter=250)
assert sdm.tsne_lut is not sdm._tsne_lut
for k in sdm.tsne_lut:
np.testing.assert_equal(sdm.tsne_lut[k], sdm._tsne_lut[k])
def test_s_ith_nn_d(self):
nn_sdm = eda.SampleDistanceMatrix([[0], [1], [5], [6], [10], [20]],
metric='euclidean', use_pdist=False)
with pytest.raises(ValueError) as excinfo:
nn_sdm.s_ith_nn_d(0)
def test_s_ith_nn_ind(self):
nn_sdm = eda.SampleDistanceMatrix([[0, 0, 0], [1, 1, 1], [5, 5, 5],
[6, 6, 6], [10, 10, 10],
[20, 20, 20]],
metric='euclidean',
use_pdist=False)
with pytest.raises(ValueError) as excinfo:
nn_sdm.s_ith_nn_ind(0)
def test_knn_ind_lut(self):
nn_sdm = eda.SampleDistanceMatrix([[0, 0, 0], [1, 1, 1], [5, 5, 5],
[6, 6, 6], [10, 10, 10],
[20, 20, 20]],
metric='euclidean', use_pdist=False)
with pytest.raises(ValueError) as excinfo:
nn_sdm.s_knn_ind_lut(0)
@pytest.mark.mpl_image_compare
def test_sdm_tsne_feature_gradient_plot(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
fig = sdm.tsne_feature_gradient_plot(
'5', figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
@pytest.mark.mpl_image_compare
def test_sdm_tsne_feature_gradient_plus10_plot(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
fig = sdm.tsne_feature_gradient_plot(
'5', transform=lambda x: x + 10, figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
@pytest.mark.mpl_image_compare
def test_sdm_tsne_feature_gradient_plot_sslabs(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
sdm.tsne_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels='a',
transform=lambda x: np.log(x+1),
figsize=(10, 10), s=50)
fig = sdm.tsne_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels='a',
figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
@pytest.mark.mpl_image_compare
def test_sdm_tsne_feature_gradient_plot_sslabs_empty(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
fig = sdm.tsne_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels=[],
figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
def test_sdm_tsne_feature_gradient_plot_sslabs_wrong_args(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
# Mismatch labels
with pytest.raises(ValueError) as excinfo:
sdm.tsne_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels=[11],
figsize=(10, 10), s=50)
with pytest.raises(ValueError) as excinfo:
sdm.tsne_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels=['i'],
figsize=(10, 10), s=50)
# labels not provided
with pytest.raises(ValueError) as excinfo:
sdm.tsne_feature_gradient_plot(
'5', selected_labels=[11], figsize=(10, 10), s=50)
def test_sdm_tsne_feature_gradient_plot_wrong_args(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x, sids=sids, fids=fids, use_pdist=False)
with pytest.raises(ValueError):
sdm.tsne_feature_gradient_plot('5', transform=2)
# wrong labels size
with pytest.raises(ValueError):
sdm.tsne_feature_gradient_plot('5', figsize=(10, 10),
s=50, labels=[])
with pytest.raises(ValueError):
sdm.tsne_feature_gradient_plot('5', figsize=(10, 10),
s=50, labels=[1])
with pytest.raises(ValueError):
sdm.tsne_feature_gradient_plot('5', figsize=(10, 10),
s=50, labels=[2])
# wrong gradient length
with pytest.raises(ValueError):
sdm.tsne_feature_gradient_plot([0, 1])
with pytest.raises(ValueError):
sdm.tsne_feature_gradient_plot(11)
with pytest.raises(ValueError):
sdm.tsne_feature_gradient_plot(11)
with pytest.raises(ValueError):
sdm.tsne_feature_gradient_plot(-1)
with pytest.raises(ValueError):
sdm.tsne_feature_gradient_plot(5)
with pytest.raises(ValueError):
sdm.tsne_feature_gradient_plot('123')
@pytest.mark.mpl_image_compare
def test_sdm_tsne_plot(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
g = x_sorted[:, 5]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
return sdm.tsne_plot(g, figsize=(10, 10), s=50)
@pytest.mark.mpl_image_compare
def test_sdm_pca_feature_gradient_plot(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
fig = sdm.pca_feature_gradient_plot(
'5', figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
@pytest.mark.mpl_image_compare
def test_sdm_pca_feature_gradient_plus10_plot(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
fig = sdm.pca_feature_gradient_plot(
'5', transform=lambda x: x + 10, figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
@pytest.mark.mpl_image_compare
def test_sdm_pca_feature_gradient_plot_sslabs(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
sdm.pca_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels='a',
transform=lambda x: np.log(x+1),
figsize=(10, 10), s=50)
fig = sdm.pca_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels='a',
figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
@pytest.mark.mpl_image_compare
def test_sdm_pca_feature_gradient_plot_sslabs_empty(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
fig = sdm.pca_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels=[],
figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
def test_sdm_pca_feature_gradient_plot_sslabs_wrong_args(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
# Mismatch labels
with pytest.raises(ValueError) as excinfo:
sdm.pca_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels=[11],
figsize=(10, 10), s=50)
with pytest.raises(ValueError) as excinfo:
sdm.pca_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels=['i'],
figsize=(10, 10), s=50)
# labels not provided
with pytest.raises(ValueError) as excinfo:
sdm.pca_feature_gradient_plot(
'5', selected_labels=[11], figsize=(10, 10), s=50)
def test_sdm_pca_feature_gradient_plot_wrong_args(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x, sids=sids, fids=fids, use_pdist=False)
with pytest.raises(ValueError):
sdm.pca_feature_gradient_plot('5', transform=2)
# wrong labels size
with pytest.raises(ValueError):
sdm.pca_feature_gradient_plot('5', figsize=(10, 10),
s=50, labels=[])
with pytest.raises(ValueError):
sdm.pca_feature_gradient_plot('5', figsize=(10, 10),
s=50, labels=[1])
with pytest.raises(ValueError):
sdm.pca_feature_gradient_plot('5', figsize=(10, 10),
s=50, labels=[2])
# wrong gradient length
with pytest.raises(ValueError):
sdm.pca_feature_gradient_plot([0, 1])
with pytest.raises(ValueError):
sdm.pca_feature_gradient_plot(11)
with pytest.raises(ValueError):
sdm.pca_feature_gradient_plot(11)
with pytest.raises(ValueError):
sdm.pca_feature_gradient_plot(-1)
with pytest.raises(ValueError):
sdm.pca_feature_gradient_plot(5)
with pytest.raises(ValueError):
sdm.pca_feature_gradient_plot('123')
@pytest.mark.mpl_image_compare
def test_sdm_pca_plot(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
g = x_sorted[:, 5]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
return sdm.pca_plot(gradient=g, figsize=(10, 10), s=50)
def test_pca_dim(self):
np.random.seed(123)
x5k = np.random.normal(size=5000)
sdm = eda.SampleDistanceMatrix(
x5k.reshape(20, -1), use_pdist=False)
assert sdm._pca_x.shape == (20, 20)
def test_pca_var_explained(self):
np.random.seed(123)
x5k = np.random.normal(size=5000)
sdm = eda.SampleDistanceMatrix(x5k.reshape(20, -1), use_pdist=False)
assert sdm._skd_pca.explained_variance_.shape == (20,)
assert sdm._skd_pca.explained_variance_ratio_.shape == (20,)
@pytest.mark.mpl_image_compare
def test_sdm_nopdist_umap_feature_gradient_plot_npd(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
fig = sdm.umap_feature_gradient_plot(
'5', figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
@pytest.mark.mpl_image_compare
def test_sdm_nopdist_umap_feature_gradient_plus10_plot_npd(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
fig = sdm.umap_feature_gradient_plot(
'5', transform=lambda x: x + 10, figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
@pytest.mark.mpl_image_compare
def test_sdm_nopdist_umap_feature_gradient_plot_npd_sslabs(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
sdm.umap_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels='a',
transform=lambda x: np.log(x+1),
figsize=(10, 10), s=50)
fig = sdm.umap_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels='a',
figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
@pytest.mark.mpl_image_compare
def test_sdm_nopdist_umap_feature_gradient_plot_npd_sslabs_empty(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
fig = sdm.umap_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels=[],
figsize=(10, 10), s=50)
np.testing.assert_equal(sdm._x, x_sorted)
np.testing.assert_equal(sdm._sids, sids)
np.testing.assert_equal(sdm._fids, fids)
return fig
def test_sdm_umap_feature_gradient_plot_npd_sslabs_wrong_args(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
# Mismatch labels
with pytest.raises(ValueError) as excinfo:
sdm.umap_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels=[11],
figsize=(10, 10), s=50)
with pytest.raises(ValueError) as excinfo:
sdm.umap_feature_gradient_plot(
'5', labels=list('abcdefgh'), selected_labels=['i'],
figsize=(10, 10), s=50)
# labels not provided
with pytest.raises(ValueError) as excinfo:
sdm.umap_feature_gradient_plot(
'5', selected_labels=[11], figsize=(10, 10), s=50)
def test_sdm_umap_feature_gradient_plot_npd_wrong_args(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
sdm = eda.SampleDistanceMatrix(
x, sids=sids, fids=fids, use_pdist=False)
with pytest.raises(ValueError):
sdm.umap_feature_gradient_plot('5', transform=2)
# wrong labels size
with pytest.raises(ValueError):
sdm.umap_feature_gradient_plot('5', figsize=(10, 10),
s=50, labels=[])
with pytest.raises(ValueError):
sdm.umap_feature_gradient_plot('5', figsize=(10, 10),
s=50, labels=[1])
with pytest.raises(ValueError):
sdm.umap_feature_gradient_plot('5', figsize=(10, 10),
s=50, labels=[2])
# wrong gradient length
with pytest.raises(ValueError):
sdm.umap_feature_gradient_plot([0, 1])
with pytest.raises(ValueError):
sdm.umap_feature_gradient_plot(11)
with pytest.raises(ValueError):
sdm.umap_feature_gradient_plot(11)
with pytest.raises(ValueError):
sdm.umap_feature_gradient_plot(-1)
with pytest.raises(ValueError):
sdm.umap_feature_gradient_plot(5)
with pytest.raises(ValueError):
sdm.umap_feature_gradient_plot('123')
@pytest.mark.mpl_image_compare
def test_sdm_nopdist_umap_plot_npd(self):
sids = list(range(8))
fids = [str(i) for i in range(10)]
np.random.seed(123)
x = np.random.ranf(80).reshape(8, -1)
x_sorted = x[np.argsort(x[:, 5])]
g = x_sorted[:, 5]
sdm = eda.SampleDistanceMatrix(
x_sorted, sids=sids, fids=fids, use_pdist=False)
return sdm.umap_plot(gradient=g, figsize=(10, 10), s=50)
def test_umap_dim(self):
np.random.seed(123)
x5k = np.random.normal(size=5000)
sdm = eda.SampleDistanceMatrix(x5k.reshape(20, -1), use_pdist=False)
assert sdm._umap_x.shape == (20, 2)
def test_s_knn_connectivity_matrix(self):
nn_sdm = eda.SampleDistanceMatrix(
[[0], [1], [5]], metric='euclidean', use_pdist=False)
np.testing.assert_allclose(
[[0, 1, 0], [1, 0, 0], [0, 4, 0]],
nn_sdm.s_knn_connectivity_matrix(1).toarray())
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=False, use_pca=False).shape == (3, 3)
with pytest.raises(ValueError):
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=False, use_pca=False,
index_params={})
with pytest.raises(ValueError):
assert nn_sdm.s_knn_connectivity_matrix(0)
with pytest.raises(ValueError):
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=False, use_pca=False,
index_params=None, query_params={}).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=True, use_pca=False).shape == (3, 3)
# hnsw can only handle vectors with more than one non-0 elements.
nn_sdm = eda.SampleDistanceMatrix(
[[1, 2, 3], [2, 0, 0], [6, 0, 0]],
metric='cosine', use_pdist=False)
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=True, use_pca=True).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=True, use_pca=False).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=False, use_pca=True).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=True, use_pca=True, index_params={},
query_params={}, verbose=True).shape == (3, 3)
nn_sdm = eda.SampleDistanceMatrix(
[[1, 2, 3], [2, 0, 0], [6, 0, 0]],
metric='euclidean', use_pdist=False)
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=True, use_pca=True).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=True, use_pca=False).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=False, use_pca=True).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=True, use_pca=True, index_params={},
query_params={}, verbose=True).shape == (3, 3)
nn_sdm = eda.SampleDistanceMatrix(
[[1, 2, 3], [2, 0, 0], [6, 0, 0]],
metric='euclidean', use_pdist=False)
assert nn_sdm.s_knn_connectivity_matrix(
1, metric='cosine', use_hnsw=True, use_pca=True).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, use_hnsw=True, use_pca=False).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, metric='cosine', use_hnsw=False, use_pca=True).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, metric='cosine', use_hnsw=True, use_pca=True, index_params={},
query_params={}, verbose=True).shape == (3, 3)
with pytest.raises(ValueError):
assert nn_sdm.s_knn_connectivity_matrix(
1, metric='correlation', use_hnsw=True, use_pca=False,
index_params={}, query_params={},
verbose=True).shape == (3, 3)
with pytest.raises(ValueError):
assert nn_sdm.s_knn_connectivity_matrix(
1, metric='correlation', use_hnsw=True, use_pca=True,
index_params={}, query_params={},
verbose=True).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, metric='cosine', use_hnsw=False, use_pca=True,
verbose=True).shape == (3, 3)
assert nn_sdm.s_knn_connectivity_matrix(
1, metric='cosine', use_hnsw=False, use_pca=False,
verbose=True).shape == (3, 3)
np.random.seed(123)
x5k = spsp.csr_matrix(np.random.normal(size=5000))
nn_sdm = eda.SampleDistanceMatrix(x5k.reshape(1000, 5), use_pdist=False)
index_params = {
"efConstruction": 5,
"M": 5,
"delaunay_type": 2,
"post": 0,
"indexThreadQty": 1
}
query_params = {
"efSearch": 5
}
nn_sdm._s_knns_hnsw(1, metric='cosine',
index_params=index_params,
query_params=query_params)
with pytest.raises(ValueError):
nn_sdm._s_knns_hnsw(
0, use_pca=False, index_params={})
@pytest.mark.mpl_image_compare
def test_s_knn_graph_grad_lab(self):
np.random.seed(123)
x = np.concatenate((np.random.normal(0, 1, 10),
np.random.normal(20, 1, 20))).reshape(30, -1)
sdm = eda.SampleDistanceMatrix(x, metric='euclidean', use_pdist=False)
sdm.s_knn_graph(5, figsize=(5, 5))
assert (5, 1) in sdm._knn_ng_lut
assert len(sdm._knn_ng_lut) == 1
# use cache
sdm.s_knn_graph(5, figsize=(5, 5))
sdm.s_knn_graph(5, figsize=(5, 5), fa2_kwargs={})
sdm.s_knn_graph(5, figsize=(5, 5), nx_draw_kwargs={})
assert len(sdm._knn_ng_lut) == 1
gradient = np.array([1] * 10 + [10] * 20)
labs = gradient = np.array([1] * 10 + [2] * 20)
return sdm.s_knn_graph(5, gradient=gradient, labels=labs,
figsize=(5, 5),
alpha=0.8, random_state=123)
@pytest.mark.mpl_image_compare
def test_s_knn_graph_grad_lab_same_marker(self):
np.random.seed(123)
x = np.concatenate((np.random.normal(0, 1, 10),
np.random.normal(20, 1, 20))).reshape(30, -1)
sdm = eda.SampleDistanceMatrix(x, metric='euclidean', use_pdist=False)
sdm.s_knn_graph(5, figsize=(5, 5))
assert (5, 1) in sdm._knn_ng_lut
assert len(sdm._knn_ng_lut) == 1
gradient = np.array([1] * 10 + [10] * 20)
labs = gradient = np.array([1] * 10 + [2] * 20)
return sdm.s_knn_graph(5, gradient=gradient, labels=labs,
different_label_markers=False,
figsize=(5, 5),
alpha=0.8, random_state=123)
@pytest.mark.mpl_image_compare
def test_s_knn_graph_grad_nolab(self):
np.random.seed(123)
x = np.concatenate((np.random.normal(0, 1, 10),
np.random.normal(20, 1, 20))).reshape(30, -1)
sdm = eda.SampleDistanceMatrix(x, metric='euclidean', use_pdist=False)
sdm.s_knn_graph(5, figsize=(5, 5))
assert (5, 1) in sdm._knn_ng_lut
assert len(sdm._knn_ng_lut) == 1
# use cache
sdm.s_knn_graph(5, figsize=(5, 5))
sdm.s_knn_graph(5, figsize=(5, 5), fa2_kwargs={})
sdm.s_knn_graph(5, figsize=(5, 5), nx_draw_kwargs={})
assert len(sdm._knn_ng_lut) == 1
gradient = np.array([1] * 10 + [10] * 20)
return sdm.s_knn_graph(5, gradient=gradient, figsize=(5, 5),
alpha=0.8, random_state=123)
@pytest.mark.mpl_image_compare
def test_s_knn_graph_nograd_nolab(self):
np.random.seed(123)
x = np.concatenate((np.random.normal(0, 1, 10),
np.random.normal(20, 1, 20))).reshape(30, -1)
sdm = eda.SampleDistanceMatrix(x, metric='euclidean', use_pdist=False)
sdm.s_knn_graph(5, figsize=(5, 5))
assert (5, 1) in sdm._knn_ng_lut
assert len(sdm._knn_ng_lut) == 1
# use cache
sdm.s_knn_graph(5, figsize=(5, 5))
sdm.s_knn_graph(5, figsize=(5, 5), fa2_kwargs={})
sdm.s_knn_graph(5, figsize=(5, 5), nx_draw_kwargs={})
assert len(sdm._knn_ng_lut) == 1
return sdm.s_knn_graph(5, figsize=(5, 5),
alpha=0.8, random_state=123)
@pytest.mark.mpl_image_compare
def test_s_knn_graph_nograd_lab(self):
np.random.seed(123)
x = np.concatenate((np.random.normal(0, 1, 10),
np.random.normal(20, 1, 20))).reshape(30, -1)
sdm = eda.SampleDistanceMatrix(x, metric='euclidean', use_pdist=False)
sdm.s_knn_graph(5, figsize=(5, 5))
assert (5, 1) in sdm._knn_ng_lut
assert len(sdm._knn_ng_lut) == 1
# use cache
sdm.s_knn_graph(5, figsize=(5, 5))
sdm.s_knn_graph(5, figsize=(5, 5), fa2_kwargs={})
sdm.s_knn_graph(5, figsize=(5, 5), nx_draw_kwargs={})
assert len(sdm._knn_ng_lut) == 1
labs = np.array([1] * 10 + [2] * 20)
return sdm.s_knn_graph(5, labels=labs, figsize=(5, 5),
alpha=0.8, random_state=123)
@pytest.mark.mpl_image_compare
def test_s_knn_graph_nograd_lab_same_marker(self):
np.random.seed(123)
x = np.concatenate((np.random.normal(0, 1, 10),
np.random.normal(20, 1, 20))).reshape(30, -1)
sdm = eda.SampleDistanceMatrix(x, metric='euclidean', use_pdist=False)
sdm.s_knn_graph(5, figsize=(5, 5))
assert (5, 1) in sdm._knn_ng_lut
assert len(sdm._knn_ng_lut) == 1
# use cache
sdm.s_knn_graph(5, figsize=(5, 5))
sdm.s_knn_graph(5, figsize=(5, 5), fa2_kwargs={})
sdm.s_knn_graph(5, figsize=(5, 5), nx_draw_kwargs={})
assert len(sdm._knn_ng_lut) == 1
labs = np.array([1] * 10 + [2] * 20)
return sdm.s_knn_graph(5, labels=labs, figsize=(5, 5),
different_label_markers=False,
alpha=0.8, random_state=123)
def test_knn_ind_lut(self):
nn_sdm = eda.SampleDistanceMatrix([[0, 0, 0], [1, 1, 1], [5, 5, 5],
[6, 6, 6], [10, 10, 10],
[20, 20, 20]],
metric='euclidean', use_pdist=False)
assert nn_sdm.s_knn_ind_lut(0) == dict(zip(range(6), [[]]*6))
assert (nn_sdm.s_knn_ind_lut(1) ==
dict(zip(range(6), [[1], [0], [3], [2], [3], [4]])))
assert (nn_sdm.s_knn_ind_lut(2) ==
dict(zip(range(6), [[1, 2], [0, 2], [3, 1],
[2, 4], [3, 2], [4, 3]])))
assert (nn_sdm.s_knn_ind_lut(3) ==
dict(zip(range(6), [[1, 2, 3], [0, 2, 3], [3, 1, 0],
[2, 4, 1], [3, 2, 1], [4, 3, 2]])))
nn_sdm.s_knn_ind_lut(5)
nn_sdm_npd = eda.SampleDistanceMatrix([[x] for x in range(20)],
metric='euclidean',
use_pdist=False)
nn_sdm_pd = eda.SampleDistanceMatrix([[x] for x in range(20)],
metric='euclidean',
use_pdist=False)
assert nn_sdm_npd.s_knn_ind_lut(0) == dict(zip(range(20), [[]]*20))
assert (nn_sdm_npd.s_knn_ind_lut(10) == nn_sdm_pd.s_knn_ind_lut(10))
def test_knn_ind_lut_wrong_args(self):
nn_sdm = eda.SampleDistanceMatrix([[0, 0, 0], [1, 1, 1], [5, 5, 5],
[6, 6, 6], [10, 10, 10],
[20, 20, 20]],
metric='euclidean', use_pdist=False)
with pytest.raises(ValueError) as excinfo:
nn_sdm.s_knn_ind_lut(-1)
with pytest.raises(ValueError) as excinfo:
nn_sdm.s_knn_ind_lut(-0.5)
with pytest.raises(ValueError) as excinfo:
nn_sdm.s_knn_ind_lut(6)
with pytest.raises(ValueError) as excinfo:
nn_sdm.s_knn_ind_lut(6.5)
with pytest.raises(ValueError) as excinfo:
nn_sdm.s_knn_ind_lut(7)
with pytest.raises(ValueError) as excinfo:
nn_sdm.s_knn_ind_lut(7)
| 40.908377
| 80
| 0.574412
| 6,362
| 46,881
| 4.012575
| 0.041339
| 0.035255
| 0.056409
| 0.068239
| 0.923809
| 0.899561
| 0.884362
| 0.874804
| 0.865128
| 0.844641
| 0
| 0.049318
| 0.29673
| 46,881
| 1,145
| 81
| 40.944105
| 0.724962
| 0.016446
| 0
| 0.750776
| 0
| 0
| 0.020013
| 0
| 0
| 0
| 0
| 0
| 0.193382
| 1
| 0.053775
| false
| 0
| 0.009307
| 0
| 0.088935
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0bce78965ea682b392f31d75633061fe1344627f
| 147,024
|
py
|
Python
|
test/messenger_test.py
|
AustinHellerRepo/SocketQueuedMessageFramework
|
da17577feb24fba68913f54be4e5eca6f35c9b43
|
[
"MIT"
] | 1
|
2021-12-16T01:36:31.000Z
|
2021-12-16T01:36:31.000Z
|
test/messenger_test.py
|
AustinHellerRepo/SocketQueuedMessageFramework
|
da17577feb24fba68913f54be4e5eca6f35c9b43
|
[
"MIT"
] | null | null | null |
test/messenger_test.py
|
AustinHellerRepo/SocketQueuedMessageFramework
|
da17577feb24fba68913f54be4e5eca6f35c9b43
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
import unittest
from src.austin_heller_repo.socket_queued_message_framework import ClientMessenger, ServerMessenger, ClientServerMessage, ClientServerMessageTypeEnum, Structure, StructureStateEnum, StructureFactory, StructureTransitionException, StructureInfluence, SourceTypeEnum, ClientMessengerFactory, ServerMessengerFactory
from austin_heller_repo.socket import ClientSocketFactory, ServerSocketFactory, ReadWriteSocketClosedException
from austin_heller_repo.common import HostPointer
from austin_heller_repo.kafka_manager import KafkaSequentialQueueFactory, KafkaManager, KafkaWrapper, KafkaManagerFactory
from austin_heller_repo.threading import start_thread, Semaphore, SingletonMemorySequentialQueueFactory
from typing import List, Tuple, Dict, Callable, Type
import uuid
import time
from datetime import datetime
from abc import ABC, abstractmethod
import multiprocessing as mp
import matplotlib.pyplot as plt
import math
is_socket_debug_active = False
is_client_messenger_debug_active = False
is_server_messenger_debug_active = False
is_kafka_debug_active = False
is_kafka_sequential_queue = False
is_plotted = False
def get_default_local_host_pointer() -> HostPointer:
return HostPointer(
host_address="0.0.0.0",
host_port=36429
)
def get_default_kafka_host_pointer() -> HostPointer:
return HostPointer(
host_address="0.0.0.0",
host_port=9092
)
def get_default_kafka_manager_factory() -> KafkaManagerFactory:
return KafkaManagerFactory(
kafka_wrapper=KafkaWrapper(
host_pointer=get_default_kafka_host_pointer()
),
read_polling_seconds=0,
is_cancelled_polling_seconds=0.01,
new_topic_partitions_total=1,
new_topic_replication_factor=1,
remove_topic_cluster_propagation_blocking_timeout_seconds=30,
is_debug=is_kafka_debug_active
)
def get_default_client_messenger_factory() -> ClientMessengerFactory:
return ClientMessengerFactory(
client_socket_factory=ClientSocketFactory(
to_server_packet_bytes_length=4096,
is_debug=is_socket_debug_active
),
server_host_pointer=get_default_local_host_pointer(),
client_server_message_class=BaseClientServerMessage,
is_debug=is_client_messenger_debug_active
)
def get_default_server_messenger_factory() -> ServerMessengerFactory:
if is_kafka_sequential_queue:
kafka_topic_name = str(uuid.uuid4())
kafka_manager = get_default_kafka_manager_factory().get_kafka_manager()
kafka_manager.add_topic(
topic_name=kafka_topic_name
).get_result()
sequential_queue_factory = KafkaSequentialQueueFactory(
kafka_manager=kafka_manager,
kafka_topic_name=kafka_topic_name
)
else:
sequential_queue_factory = SingletonMemorySequentialQueueFactory()
return ServerMessengerFactory(
server_socket_factory_and_local_host_pointer_per_source_type={
BaseSourceTypeEnum.Main: (
ServerSocketFactory(
to_client_packet_bytes_length=4096,
listening_limit_total=10,
accept_timeout_seconds=10.0,
is_debug=is_socket_debug_active
),
get_default_local_host_pointer()
)
},
client_server_message_class=BaseClientServerMessage,
source_type_enum_class=BaseSourceTypeEnum,
server_messenger_source_type=BaseSourceTypeEnum.ServerMessenger,
structure_factory=ButtonStructureFactory(),
is_debug=is_server_messenger_debug_active
)
class BaseClientServerMessageTypeEnum(ClientServerMessageTypeEnum):
HelloWorld = "hello_world" # basic test
Announce = "announce" # announces name to structure
AnnounceFailed = "announce_failed" # announce failed to apply to structure
PressButton = "press_button" # structural influence, three presses cause broadcast of transmission to users
ResetButton = "reset_button" # structural_influence, resets number of presses and informs button pressers that it was reset
ResetTransmission = "reset_transmission" # directed to specific users that pressed the button
ThreePressesTransmission = "three_presses_transmission" # broadcasts to all users that the button was pressed three times and then resets the button
PingRequest = "ping_request" # pings the server and gets a response
PingResponse = "ping_response" # the response from the ping request
EchoRequest = "echo_request" # records the messages that should be echoed back
EchoResponse = "echo_response" # the response containing the echo message
ErrorOnGetClientServerMessageType = "error_on_get_client_server_message_type"
ErrorRequest = "error_request" # a request that throws an exception as defined in the constructor
ErrorResponse = "error_response" # the response that will throw a predefined exception
PowerButton = "power_button" # increments a child structure by the number of presses processed by the parent structure
PowerOverloadTransmission = "power_overload_transmission" # if the power button is pressed three times at any stage of normal button presses an overload transmission is sent out to all clients involved
PowerButtonFailed = "power_button_failed" # power was already overloaded when attempted
TimerRequest = "timer_request" # set a timer for a later response
TimerResponse = "timer_response" # a response scheduled by the timer_request
class BaseSourceTypeEnum(SourceTypeEnum):
Main = "main"
ServerMessenger = "server_messenger"
class BaseClientServerMessage(ClientServerMessage, ABC):
@classmethod
def get_client_server_message_type_class(cls) -> Type[ClientServerMessageTypeEnum]:
return BaseClientServerMessageTypeEnum
class HelloWorldBaseClientServerMessage(BaseClientServerMessage):
def __init__(self):
super().__init__(
destination_uuid=None
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.HelloWorld
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class AnnounceBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, name: str):
super().__init__(
destination_uuid=None
)
self.__name = name
def get_name(self) -> str:
return self.__name
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.Announce
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
json_object["name"] = self.__name
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
print(f"{datetime.utcnow()}: AnnounceBaseClientServerMessage: get_structural_error_client_server_message_response: structure state: {structure_transition_exception.get_structure_state()}")
return AnnounceFailedBaseClientServerMessage(
destination_uuid=destination_uuid
)
class AnnounceFailedBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.AnnounceFailed
def to_json(self) -> Dict:
json_object = super().to_json()
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class PressButtonBaseClientServerMessage(BaseClientServerMessage):
def __init__(self):
super().__init__(
destination_uuid=None
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PressButton
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class ResetButtonBaseClientServerMessage(BaseClientServerMessage):
def __init__(self):
super().__init__(
destination_uuid=None
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.ResetButton
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class ResetTransmissionBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.ResetTransmission
def to_json(self) -> Dict:
json_object = super().to_json()
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class ThreePressesTransmissionBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, power: str, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
self.__power = power
def get_power(self) -> str:
return self.__power
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.ThreePressesTransmission
def to_json(self) -> Dict:
json_object = super().to_json()
json_object["power"] = self.__power
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class PingRequestBaseClientServerMessage(BaseClientServerMessage):
def __init__(self):
super().__init__(
destination_uuid=None
)
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PingRequest
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class PingResponseBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, ping_index: int, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
self.__ping_index = ping_index
def get_ping_index(self) -> int:
return self.__ping_index
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PingResponse
def to_json(self) -> Dict:
json_object = super().to_json()
json_object["ping_index"] = self.__ping_index
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class EchoRequestBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, message: str, is_ordered: bool):
super().__init__(
destination_uuid=None
)
self.__message = message
self.__is_ordered = is_ordered
def get_message(self) -> str:
return self.__message
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.EchoRequest
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
json_object["message"] = self.__message
json_object["is_ordered"] = self.__is_ordered
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class EchoResponseBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, message: str, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
self.__message = message
def get_message(self) -> str:
return self.__message
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.EchoResponse
def to_json(self) -> Dict:
json_object = super().to_json()
json_object["message"] = self.__message
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class ErrorRequestBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, is_constructor_exception_to_set: str = None, constructor_exception: str = None, to_json_exception: str = None, get_structural_error_client_server_message_response_exception: str = None, response_constructor_arguments: Dict = None):
super().__init__(
destination_uuid=None
)
self.__is_constructor_exception_to_set = is_constructor_exception_to_set
self.__constructor_exception = constructor_exception
self.__to_json_exception = to_json_exception
self.__get_structural_error_client_server_message_response_exception = get_structural_error_client_server_message_response_exception
self.__response_constructor_arguments = response_constructor_arguments
if self.__constructor_exception is not None:
raise Exception(self.__constructor_exception)
if self.__is_constructor_exception_to_set is not None:
self.__constructor_exception = self.__is_constructor_exception_to_set
self.__is_constructor_exception_to_set = None
def get_response_constructor_arguments(self) -> Dict:
return self.__response_constructor_arguments
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.ErrorRequest
def to_json(self) -> Dict:
if self.__to_json_exception is not None:
raise Exception(self.__to_json_exception)
json_object = super().to_json()
del json_object["destination_uuid"]
json_object["is_constructor_exception_to_set"] = self.__is_constructor_exception_to_set
json_object["constructor_exception"] = self.__constructor_exception
json_object["to_json_exception"] = self.__to_json_exception
json_object["get_structural_error_client_server_message_response_exception"] = self.__get_structural_error_client_server_message_response_exception
json_object["response_constructor_arguments"] = self.__response_constructor_arguments
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
if self.__get_structural_error_client_server_message_response_exception is not None:
raise Exception(self.__get_structural_error_client_server_message_response_exception)
return None
class ErrorResponseBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, destination_uuid: str, is_constructor_exception_to_set: str = None, constructor_exception: str = None, to_json_exception: str = None, get_structural_error_client_server_message_response_exception: str = None):
super().__init__(
destination_uuid=destination_uuid
)
self.__is_constructor_exception_to_set = is_constructor_exception_to_set
self.__constructor_exception = constructor_exception
self.__to_json_exception = to_json_exception
self.__get_structural_error_client_server_message_response_exception = get_structural_error_client_server_message_response_exception
if self.__constructor_exception is not None:
raise Exception(self.__constructor_exception)
if self.__is_constructor_exception_to_set is not None:
self.__constructor_exception = self.__is_constructor_exception_to_set
self.__is_constructor_exception_to_set = None
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.ErrorResponse
def to_json(self) -> Dict:
if self.__to_json_exception is not None:
raise Exception(self.__to_json_exception)
json_object = super().to_json()
json_object["is_constructor_exception_to_set"] = self.__is_constructor_exception_to_set
json_object["constructor_exception"] = self.__constructor_exception
json_object["to_json_exception"] = self.__to_json_exception
json_object["get_structural_error_client_server_message_response_exception"] = self.__get_structural_error_client_server_message_response_exception
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
if self.__get_structural_error_client_server_message_response_exception is not None:
raise Exception(self.__get_structural_error_client_server_message_response_exception)
return None
class PowerButtonBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, is_anonymous: bool):
super().__init__(
destination_uuid=None
)
self.__is_anonymous = is_anonymous # if an overload should not be sent back to them due to this message
def is_anonymous(self) -> bool:
return self.__is_anonymous
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PowerButton
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
json_object["is_anonymous"] = self.__is_anonymous
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return PowerButtonFailedBaseClientServerMessage(
destination_uuid=destination_uuid
)
class PowerOverloadTransmissionBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PowerOverloadTransmission
def to_json(self) -> Dict:
json_object = super().to_json()
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class PowerButtonFailedBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
pass
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.PowerButtonFailed
def to_json(self) -> Dict:
json_object = super().to_json()
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class TimerRequestBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, message: str, seconds: float):
super().__init__(
destination_uuid=None
)
self.__message = message
self.__seconds = seconds
def get_message(self) -> str:
return self.__message
def get_seconds(self) -> float:
return self.__seconds
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.TimerRequest
def to_json(self) -> Dict:
json_object = super().to_json()
del json_object["destination_uuid"]
json_object["message"] = self.__message
json_object["seconds"] = self.__seconds
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class TimerResponseBaseClientServerMessage(BaseClientServerMessage):
def __init__(self, *, message: str, destination_uuid: str):
super().__init__(
destination_uuid=destination_uuid
)
self.__message = message
def get_message(self) -> str:
return self.__message
@classmethod
def get_client_server_message_type(cls) -> ClientServerMessageTypeEnum:
return BaseClientServerMessageTypeEnum.TimerResponse
def to_json(self) -> Dict:
json_object = super().to_json()
json_object["message"] = self.__message
return json_object
def get_structural_error_client_server_message_response(self, structure_transition_exception: StructureTransitionException, destination_uuid: str) -> ClientServerMessage:
return None
class PowerStructureStateEnum(StructureStateEnum):
Underpowered = "underpower"
Powered = "powered"
Overpowered = "overpowered"
class PowerStructure(Structure):
def __init__(self):
super().__init__(
states=PowerStructureStateEnum,
initial_state=PowerStructureStateEnum.Underpowered
)
self.__power_total = 0
self.__source_uuids_to_inform_on_power_overload = [] # type: List[str]
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PowerButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=PowerStructureStateEnum.Underpowered,
end_structure_state=PowerStructureStateEnum.Underpowered,
on_transition=self.__power_button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PowerButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=PowerStructureStateEnum.Powered,
end_structure_state=PowerStructureStateEnum.Powered,
on_transition=self.__power_button_pressed
)
def on_client_connected(self, *, source_uuid: str, source_type: SourceTypeEnum, tag_json: Dict):
raise Exception(f"Unexpected client connected: {source_uuid}: {source_type}: {tag_json}")
def add_source_uuid_for_power_overload_transmission(self, *, source_uuid: str):
if source_uuid not in self.__source_uuids_to_inform_on_power_overload:
self.__source_uuids_to_inform_on_power_overload.append(source_uuid)
def get_power(self) -> str:
if self.__power_total < 3:
return "underpowered"
elif self.__power_total == 3:
return "powered"
else:
return "overpowered"
def __power_button_pressed(self, structure_influence: StructureInfluence):
print(f"{datetime.utcnow()}: PowerStructure: __power_button_pressed: start")
print(f"get state: {self.get_state()}")
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
power_button = structure_influence.get_client_server_message() # type: PowerButtonBaseClientServerMessage
source_uuid = structure_influence.get_source_uuid()
if not power_button.is_anonymous():
self.add_source_uuid_for_power_overload_transmission(
source_uuid=source_uuid
)
self.__power_total += 1
if self.__power_total == 3:
# set the state to "powered"
self.set_state(
structure_state=PowerStructureStateEnum.Powered
)
elif self.__power_total == 4:
# set the state to "overpowered"
# NOTE this will also permit an impossible state change if another power button message is sent
self.set_state(
structure_state=PowerStructureStateEnum.Overpowered
)
for source_uuid in self.__source_uuids_to_inform_on_power_overload:
self.send_client_server_message(
client_server_message=PowerOverloadTransmissionBaseClientServerMessage(
destination_uuid=source_uuid
)
)
self.__source_uuids_to_inform_on_power_overload.clear()
print(f"{datetime.utcnow()}: PowerStructure: __power_button_pressed: end")
def dispose(self):
pass
class ButtonStructureStateEnum(StructureStateEnum):
ZeroPresses = "zero_presses"
OnePress = "one_press"
TwoPresses = "two_presses"
ThreePresses = "three_presses"
class ButtonStructure(Structure):
def __init__(self):
super().__init__(
states=ButtonStructureStateEnum,
initial_state=ButtonStructureStateEnum.ZeroPresses
)
self.__main_source_uuids = [] # type: List[str]
self.__pressed_button_source_uuids = [] # type: List[str]
self.__name_per_client_uuid = {} # type: Dict[str, str]
self.__presses_total = 0
self.__pings_total = 0
self.__power_structure = PowerStructure()
self.register_child_structure(
structure=self.__power_structure
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.Announce,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__name_announced
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PressButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.OnePress,
on_transition=self.__button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PressButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.OnePress,
end_structure_state=ButtonStructureStateEnum.TwoPresses,
on_transition=self.__button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PressButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.TwoPresses,
end_structure_state=ButtonStructureStateEnum.ThreePresses,
on_transition=self.__button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.ResetButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__button_reset
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.ResetButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.OnePress,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__button_reset
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.ResetButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.TwoPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__button_reset
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.ThreePressesTransmission,
from_source_type=BaseSourceTypeEnum.ServerMessenger,
start_structure_state=ButtonStructureStateEnum.ThreePresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__three_presses_transmission_sent
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PingRequest,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__ping_requested
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.EchoRequest,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__echo_requested
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.ErrorRequest,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__error_requested
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PowerButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__power_button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PowerButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.OnePress,
end_structure_state=ButtonStructureStateEnum.OnePress,
on_transition=self.__power_button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.PowerButton,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.TwoPresses,
end_structure_state=ButtonStructureStateEnum.TwoPresses,
on_transition=self.__power_button_pressed
)
self.add_transition(
client_server_message_type=BaseClientServerMessageTypeEnum.TimerRequest,
from_source_type=BaseSourceTypeEnum.Main,
start_structure_state=ButtonStructureStateEnum.ZeroPresses,
end_structure_state=ButtonStructureStateEnum.ZeroPresses,
on_transition=self.__timer_requested
)
def on_client_connected(self, *, source_uuid: str, source_type: SourceTypeEnum, tag_json: Dict):
if source_type == BaseSourceTypeEnum.Main:
self.__main_source_uuids.append(source_uuid)
else:
raise Exception(f"Unexpected client connected: {source_uuid}: {source_type}: {tag_json}")
def __name_announced(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
announce = structure_influence.get_client_server_message() # type: AnnounceBaseClientServerMessage
source_uuid = structure_influence.get_source_uuid()
self.__name_per_client_uuid[source_uuid] = announce.get_name()
def __button_pressed(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
source_uuid = structure_influence.get_source_uuid()
if source_uuid not in self.__pressed_button_source_uuids:
self.__pressed_button_source_uuids.append(source_uuid)
if source_uuid in self.__name_per_client_uuid:
print(f"button pressed by {self.__name_per_client_uuid[source_uuid]}")
else:
print(f"button pressed by {source_uuid}")
self.__presses_total += 1
if self.__presses_total == 3:
self.send_client_server_message(
client_server_message=ThreePressesTransmissionBaseClientServerMessage(
destination_uuid=source_uuid,
power=self.__power_structure.get_power()
)
)
def __button_reset(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
for source_uuid in self.__pressed_button_source_uuids:
client_server_message = ResetTransmissionBaseClientServerMessage(
destination_uuid=source_uuid
)
self.send_client_server_message(
client_server_message=client_server_message
)
self.__pressed_button_source_uuids.clear()
def __three_presses_transmission_sent(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.ServerMessenger:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
self.__pressed_button_source_uuids.clear()
def __ping_requested(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
source_uuid = structure_influence.get_source_uuid()
self.send_client_server_message(
client_server_message=PingResponseBaseClientServerMessage(
destination_uuid=source_uuid,
ping_index=self.__pings_total
)
)
self.__pings_total += 1
def __echo_requested(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
echo_request = structure_influence.get_client_server_message() # type: EchoRequestBaseClientServerMessage
source_uuid = structure_influence.get_source_uuid()
message = echo_request.get_message()
self.send_client_server_message(
client_server_message=EchoResponseBaseClientServerMessage(
message=message,
destination_uuid=source_uuid
)
)
def __error_requested(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
error_request = structure_influence.get_client_server_message() # type: ErrorRequestBaseClientServerMessage
source_uuid = structure_influence.get_source_uuid()
constructor_arguments = error_request.get_response_constructor_arguments()
if constructor_arguments is None:
constructor_arguments = {}
constructor_arguments["destination_uuid"] = source_uuid
self.send_client_server_message(
client_server_message=ErrorResponseBaseClientServerMessage(
**constructor_arguments
)
)
def __power_button_pressed(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
self.__power_structure.update_structure(
structure_influence=structure_influence
)
def __timer_requested(self, structure_influence: StructureInfluence):
if structure_influence.get_source_type() != BaseSourceTypeEnum.Main:
raise Exception(f"Unexpected source type: {structure_influence.get_source_type()}.")
timer_request = structure_influence.get_client_server_message() # type: TimerRequestBaseClientServerMessage
source_uuid = structure_influence.get_source_uuid()
def timer_thread_method():
nonlocal timer_request
nonlocal source_uuid
time.sleep(timer_request.get_seconds())
self.send_client_server_message(
client_server_message=TimerResponseBaseClientServerMessage(
destination_uuid=source_uuid,
message=timer_request.get_message()
)
)
start_thread(timer_thread_method)
def dispose(self):
self.__power_structure.dispose()
class ButtonStructureFactory(StructureFactory):
def __init__(self):
super().__init__()
pass
def get_structure(self) -> Structure:
return ButtonStructure()
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
##############################################################################################
class MessengerTest(unittest.TestCase):
def setUp(self) -> None:
print(f"{datetime.utcnow()}: setUp: start")
if is_kafka_sequential_queue:
kafka_manager = get_default_kafka_manager_factory().get_kafka_manager()
print(f"setUp: initialized: {datetime.utcnow()}")
topics = kafka_manager.get_topics().get_result() # type: List[str]
print(f"setUp: get_topics: {datetime.utcnow()}")
for topic in topics:
print(f"setUp: topic: {topic}: {datetime.utcnow()}")
async_handle = kafka_manager.remove_topic(
topic_name=topic
)
print(f"setUp: async: {topic}: {datetime.utcnow()}")
async_handle.get_result()
print(f"setUp: result: {topic}: {datetime.utcnow()}")
time.sleep(1)
print(f"{datetime.utcnow()}: setUp: end")
def test_initialize_client_messenger(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
self.assertIsNotNone(client_messenger)
client_messenger.dispose()
def test_initialize_server_messenger(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
self.assertIsNotNone(server_messenger)
def test_server_messenger_start_and_stop(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(3)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(5)
def test_connect_client_to_server_and_client_disposes_first(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
client_messenger.send_to_server(
client_server_message=HelloWorldBaseClientServerMessage()
)
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
time.sleep(1)
def test_connect_client_to_server_and_server_stops_first(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
client_messenger.send_to_server(
client_server_message=HelloWorldBaseClientServerMessage()
)
time.sleep(1)
server_messenger.stop_receiving_from_clients()
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
def test_connect_client_to_server_client_receives_and_client_disposes_first(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
time.sleep(1)
def callback(client_server_message: ClientServerMessage):
raise Exception("Unexpected response")
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(1)
client_messenger.send_to_server(
client_server_message=HelloWorldBaseClientServerMessage()
)
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
time.sleep(1)
if found_exception is not None:
raise found_exception
def test_connect_client_to_server_client_receives_and_server_stops_first(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
time.sleep(1)
def callback(client_server_message: ClientServerMessage):
raise Exception("Unexpected response")
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(1)
client_messenger.send_to_server(
client_server_message=HelloWorldBaseClientServerMessage()
)
time.sleep(1)
server_messenger.stop_receiving_from_clients()
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
self.assertIsInstance(found_exception, ReadWriteSocketClosedException)
def test_press_button_three_times(self):
# send three presses and wait for a reply
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ThreePressesTransmissionBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Test Name"
)
)
print(f"{datetime.utcnow()}: sending first press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending second press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending third press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_one_client_sends_two_presses_then_reset(self):
# send two presses of the button, then send a reset, and finally wait for a reply
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ResetTransmissionBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Test Name"
)
)
print(f"{datetime.utcnow()}: sending first press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending second press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending reset")
client_messenger.send_to_server(
client_server_message=ResetButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_two_clients_each_send_one_press_then_reset(self):
first_client_messenger = get_default_client_messenger_factory().get_client_messenger()
second_client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
first_client_messenger.connect_to_server()
second_client_messenger.connect_to_server()
callback_total = 0
def first_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: first_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ResetTransmissionBaseClientServerMessage)
first_found_exception = None # type: Exception
def first_on_exception(exception: Exception):
nonlocal first_found_exception
first_found_exception = exception
first_client_messenger.receive_from_server(
callback=first_callback,
on_exception=first_on_exception
)
def second_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: second_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ResetTransmissionBaseClientServerMessage)
second_found_exception = None # type: Exception
def second_on_exception(exception: Exception):
nonlocal second_found_exception
second_found_exception = exception
second_client_messenger.receive_from_server(
callback=second_callback,
on_exception=second_on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
first_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
print(f"{datetime.utcnow()}: sending second announcement")
second_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Second"
)
)
time.sleep(1)
print(f"{datetime.utcnow()}: sending first press")
first_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending second press")
second_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: sending reset")
first_client_messenger.send_to_server(
client_server_message=ResetButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
first_client_messenger.dispose()
second_client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(2, callback_total)
self.assertIsNone(first_found_exception)
self.assertIsNone(second_found_exception)
def test_two_clients_each_send_one_press_then_third_client_reset(self):
first_client_messenger = get_default_client_messenger_factory().get_client_messenger()
second_client_messenger = get_default_client_messenger_factory().get_client_messenger()
third_client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
first_client_messenger.connect_to_server()
second_client_messenger.connect_to_server()
third_client_messenger.connect_to_server()
callback_total = 0
def first_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: first_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ResetTransmissionBaseClientServerMessage)
first_found_exception = None # type: Exception
def first_on_exception(exception: Exception):
nonlocal first_found_exception
first_found_exception = exception
first_client_messenger.receive_from_server(
callback=first_callback,
on_exception=first_on_exception
)
def second_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: second_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ResetTransmissionBaseClientServerMessage)
second_found_exception = None # type: Exception
def second_on_exception(exception: Exception):
nonlocal second_found_exception
second_found_exception = exception
second_client_messenger.receive_from_server(
callback=second_callback,
on_exception=second_on_exception
)
def third_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: third_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
raise Exception(f"Third client should not receive a message.")
third_found_exception = None # type: Exception
def third_on_exception(exception: Exception):
nonlocal third_found_exception
third_found_exception = exception
third_client_messenger.receive_from_server(
callback=third_callback,
on_exception=third_on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
first_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
print(f"{datetime.utcnow()}: sending second announcement")
second_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Second"
)
)
print(f"{datetime.utcnow()}: sending third announcement")
third_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Third"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending first press")
first_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending second press")
second_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending reset")
third_client_messenger.send_to_server(
client_server_message=ResetButtonBaseClientServerMessage()
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
first_client_messenger.dispose()
second_client_messenger.dispose()
third_client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(first_found_exception)
self.assertIsNone(second_found_exception)
self.assertIsNone(third_found_exception)
self.assertEqual(2, callback_total)
def test_client_disconnects_before_receiving_intended_message(self):
# the first client sends a press, disconnects, then the second client resets
# the server messenger should detect that the client disconnected and release the socket gracefully
first_client_messenger = get_default_client_messenger_factory().get_client_messenger()
second_client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
first_client_messenger.connect_to_server()
second_client_messenger.connect_to_server()
callback_total = 0
def first_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: first_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
raise Exception("This client should have been disposed of already.")
first_found_exception = None # type: Exception
def first_on_exception(exception: Exception):
nonlocal first_found_exception
first_found_exception = exception
first_client_messenger.receive_from_server(
callback=first_callback,
on_exception=first_on_exception
)
def second_callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: second_callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
raise Exception("This client should not be receiving a message.")
second_found_exception = None # type: Exception
def second_on_exception(exception: Exception):
nonlocal second_found_exception
second_found_exception = exception
second_client_messenger.receive_from_server(
callback=second_callback,
on_exception=second_on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
first_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending second announcement")
second_client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Second"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending first press")
first_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(1)
print(f"{datetime.utcnow()}: disposing first client")
first_client_messenger.dispose()
time.sleep(1)
print(f"{datetime.utcnow()}: sending reset")
second_client_messenger.send_to_server(
client_server_message=ResetButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
second_client_messenger.dispose()
time.sleep(1)
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
time.sleep(1)
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(0, callback_total)
self.assertIsNone(first_found_exception)
self.assertIsNone(second_found_exception)
def test_ping(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
ping_response_base_client_server_message = client_server_message # type: PingResponseBaseClientServerMessage
self.assertEqual(0, ping_response_base_client_server_message.get_ping_index())
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending ping")
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_single_client_quickly_pings_using_threading(self):
# spam pings and detect timing differences between sends and receives
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
expected_pings_total = 1000
callback_total = 0
expected_ping_index = 0
first_message_datetime = None # type: datetime
last_message_datetime = None # type: datetime
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
nonlocal expected_ping_index
nonlocal first_message_datetime
nonlocal last_message_datetime
nonlocal expected_pings_total
#print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
ping_response_base_client_server_message = client_server_message # type: PingResponseBaseClientServerMessage
self.assertEqual(expected_ping_index, ping_response_base_client_server_message.get_ping_index())
expected_ping_index += 1
if expected_ping_index == 1:
first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
last_message_datetime = datetime.utcnow()
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending first press")
sent_first_ping_datetime = None # type: datetime
sent_last_ping_datetime = None # type: datetime
def ping_thread_method():
nonlocal client_messenger
nonlocal expected_pings_total
nonlocal sent_first_ping_datetime
nonlocal sent_last_ping_datetime
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
while last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(expected_pings_total, callback_total)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {first_message_datetime}")
print(f"Diff: {(first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {last_message_datetime}")
print(f"Diff: {(last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (last_message_datetime - first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Messages per seconds: {messages_per_second}")
print(f"Seconds per message: {1.0 / messages_per_second}")
self.assertIsNone(found_exception)
def test_single_client_quickly_pings_burst(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
expected_pings_total = 1000
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal found_exception
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
received_first_message_datetime = None # type: datetime
received_last_message_datetime = None # type: datetime
callback_semaphore = Semaphore()
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_first_message_datetime
nonlocal received_last_message_datetime
nonlocal callback_semaphore
#print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
ping_response_base_client_server_message = client_server_message # type: PingResponseBaseClientServerMessage
self.assertEqual(expected_ping_index, ping_response_base_client_server_message.get_ping_index())
callback_semaphore.acquire()
expected_ping_index += 1
if expected_ping_index == 1:
received_first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
received_last_message_datetime = datetime.utcnow()
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
while received_last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {received_first_message_datetime}")
print(f"Diff: {(received_first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {received_last_message_datetime}")
print(f"Diff: {(received_last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (sent_last_ping_datetime - sent_first_ping_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to send all messages: {seconds_total}")
print(f"Sent messages per seconds: {messages_per_second}")
print(f"Seconds per sent message: {1.0 / messages_per_second}")
seconds_total = (received_last_message_datetime - received_first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to receive all messages: {seconds_total}")
print(f"Received messages per seconds: {messages_per_second}")
print(f"Seconds per received message: {1.0 / messages_per_second}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_single_client_quickly_pings_delayed(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
test_seconds = 10
test_messages_per_second = 500
expected_pings_total = test_seconds * test_messages_per_second
delay_between_sending_message_seconds = (1.0 / test_messages_per_second) * 0.6
#expected_pings_total = 1000
#delay_between_sending_message_seconds = 0.0025
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal delay_between_sending_message_seconds
nonlocal found_exception
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
callback_semaphore = Semaphore()
received_datetimes = [] # type: List[datetime]
sent_datetimes = [] # type: List[datetime]
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_datetimes
nonlocal callback_semaphore
#print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
ping_response_base_client_server_message = client_server_message # type: PingResponseBaseClientServerMessage
#self.assertEqual(expected_ping_index, ping_response_base_client_server_message.get_ping_index())
callback_semaphore.acquire()
expected_ping_index += 1
received_datetimes.append(datetime.utcnow())
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
print(f"{datetime.utcnow()}: starting to send messages")
sent_datetimes.append(datetime.utcnow())
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
time.sleep(delay_between_sending_message_seconds)
for index in range(expected_pings_total - 2):
sent_datetimes.append(datetime.utcnow())
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
time.sleep(delay_between_sending_message_seconds)
sent_datetimes.append(datetime.utcnow())
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
while len(received_datetimes) != expected_pings_total:
time.sleep(1)
print(f"len(received_datetimes): {len(received_datetimes)}")
time.sleep(1)
self.assertEqual(expected_pings_total, len(sent_datetimes))
self.assertEqual(expected_pings_total, len(received_datetimes))
diff_seconds_totals = [] # type: List[float]
for sent_datetime, received_datetime in zip(sent_datetimes, received_datetimes):
seconds_total = (received_datetime - sent_datetime).total_seconds()
diff_seconds_totals.append(seconds_total)
print(f"Time to send {(sent_datetimes[-1] - sent_datetimes[0]).total_seconds()} seconds")
print(f"Messages per second to send: {expected_pings_total / (sent_datetimes[-1] - sent_datetimes[0]).total_seconds()}")
print(f"Time to receive {(received_datetimes[-1] - received_datetimes[0]).total_seconds()} seconds")
print(f"Messages per second to receive: {expected_pings_total / (received_datetimes[-1] - received_datetimes[0]).total_seconds()}")
print(f"Min diff seconds {min(diff_seconds_totals)} at {diff_seconds_totals.index(min(diff_seconds_totals))}")
print(f"Max diff seconds {max(diff_seconds_totals)} at {diff_seconds_totals.index(max(diff_seconds_totals))}")
print(f"Ave diff seconds {sum(diff_seconds_totals)/expected_pings_total}")
if is_plotted:
plt.scatter(sent_datetimes, range(len(sent_datetimes)), s=1, c="red")
plt.scatter(received_datetimes, range(len(received_datetimes)), s=1, c="blue")
plt.show()
cutoff = 150
print(f"Min diff seconds {min(diff_seconds_totals[cutoff:])} at {diff_seconds_totals.index(min(diff_seconds_totals[cutoff:]))}")
print(f"Max diff seconds {max(diff_seconds_totals[cutoff:])} at {diff_seconds_totals.index(max(diff_seconds_totals[cutoff:]))}")
print(f"Ave diff seconds {sum(diff_seconds_totals[cutoff:]) / (expected_pings_total - cutoff)}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_single_client_quickly_echos_burst_0B(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
expected_pings_total = 1000
message_contents = ""
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal found_exception
nonlocal message_contents
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
received_first_message_datetime = None # type: datetime
received_last_message_datetime = None # type: datetime
callback_semaphore = Semaphore()
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_first_message_datetime
nonlocal received_last_message_datetime
nonlocal callback_semaphore
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
callback_semaphore.acquire()
expected_ping_index += 1
if expected_ping_index == 1:
received_first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
received_last_message_datetime = datetime.utcnow()
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
print(f"{datetime.utcnow()}: waiting for messages")
while received_last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {received_first_message_datetime}")
print(f"Diff: {(received_first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {received_last_message_datetime}")
print(f"Diff: {(received_last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (sent_last_ping_datetime - sent_first_ping_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to send all messages: {seconds_total}")
print(f"Sent messages per seconds: {messages_per_second}")
print(f"Seconds per sent message: {1.0 / messages_per_second}")
seconds_total = (received_last_message_datetime - received_first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to receive all messages: {seconds_total}")
print(f"Received messages per seconds: {messages_per_second}")
print(f"Seconds per received message: {1.0 / messages_per_second}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_single_client_quickly_echos_burst_1KB(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
expected_pings_total = 1000
message_contents = "12345678" * 128
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal found_exception
nonlocal message_contents
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
received_first_message_datetime = None # type: datetime
received_last_message_datetime = None # type: datetime
callback_semaphore = Semaphore()
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_first_message_datetime
nonlocal received_last_message_datetime
nonlocal callback_semaphore
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
callback_semaphore.acquire()
expected_ping_index += 1
if expected_ping_index == 1:
received_first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
received_last_message_datetime = datetime.utcnow()
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
print(f"{datetime.utcnow()}: waiting for messages")
while received_last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {received_first_message_datetime}")
print(f"Diff: {(received_first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {received_last_message_datetime}")
print(f"Diff: {(received_last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (sent_last_ping_datetime - sent_first_ping_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to send all messages: {seconds_total}")
print(f"Sent messages per seconds: {messages_per_second}")
print(f"Seconds per sent message: {1.0 / messages_per_second}")
seconds_total = (received_last_message_datetime - received_first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to receive all messages: {seconds_total}")
print(f"Received messages per seconds: {messages_per_second}")
print(f"Seconds per received message: {1.0 / messages_per_second}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_single_client_quickly_echos_burst_5KB(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
expected_pings_total = 1000
message_contents = "12345678" * 128 * 5
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal found_exception
nonlocal message_contents
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
received_first_message_datetime = None # type: datetime
received_last_message_datetime = None # type: datetime
callback_semaphore = Semaphore()
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_first_message_datetime
nonlocal received_last_message_datetime
nonlocal callback_semaphore
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
callback_semaphore.acquire()
expected_ping_index += 1
if expected_ping_index == 1:
received_first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
received_last_message_datetime = datetime.utcnow()
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
print(f"{datetime.utcnow()}: waiting for messages")
while received_last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {received_first_message_datetime}")
print(f"Diff: {(received_first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {received_last_message_datetime}")
print(f"Diff: {(received_last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (sent_last_ping_datetime - sent_first_ping_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to send all messages: {seconds_total}")
print(f"Sent messages per seconds: {messages_per_second}")
print(f"Seconds per sent message: {1.0 / messages_per_second}")
seconds_total = (received_last_message_datetime - received_first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to receive all messages: {seconds_total}")
print(f"Received messages per seconds: {messages_per_second}")
print(f"Seconds per received message: {1.0 / messages_per_second}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_single_client_quickly_echos_burst_10KB(self):
# spam pings and detect timing differences between sends and receives
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
expected_pings_total = 1000
message_contents = "12345678" * 128 * 10
print(f"{datetime.utcnow()}: sending first press")
found_exception = None # type: Exception
def ping_thread_method():
nonlocal expected_pings_total
nonlocal found_exception
nonlocal message_contents
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
expected_ping_index = 0
received_first_message_datetime = None # type: datetime
received_last_message_datetime = None # type: datetime
callback_semaphore = Semaphore()
def callback(client_server_message: ClientServerMessage):
nonlocal expected_pings_total
nonlocal expected_ping_index
nonlocal received_first_message_datetime
nonlocal received_last_message_datetime
nonlocal callback_semaphore
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
callback_semaphore.acquire()
expected_ping_index += 1
if expected_ping_index == 1:
received_first_message_datetime = datetime.utcnow()
if expected_ping_index == expected_pings_total:
received_last_message_datetime = datetime.utcnow()
callback_semaphore.release()
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
sent_first_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
for index in range(expected_pings_total - 2):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
sent_last_ping_datetime = datetime.utcnow()
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=message_contents,
is_ordered=True
)
)
print(f"{datetime.utcnow()}: waiting for messages")
while received_last_message_datetime is None:
time.sleep(1)
time.sleep(1)
print(f"Sent first message datetime: {sent_first_ping_datetime}")
print(f"Received first message datetime: {received_first_message_datetime}")
print(f"Diff: {(received_first_message_datetime - sent_first_ping_datetime).total_seconds()} seconds")
print(f"Sent last message datetime: {sent_last_ping_datetime}")
print(f"Received last message datetime: {received_last_message_datetime}")
print(f"Diff: {(received_last_message_datetime - sent_last_ping_datetime).total_seconds()} seconds")
seconds_total = (sent_last_ping_datetime - sent_first_ping_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to send all messages: {seconds_total}")
print(f"Sent messages per seconds: {messages_per_second}")
print(f"Seconds per sent message: {1.0 / messages_per_second}")
seconds_total = (received_last_message_datetime - received_first_message_datetime).total_seconds()
messages_per_second = expected_pings_total / seconds_total
print(f"Seconds to receive all messages: {seconds_total}")
print(f"Received messages per seconds: {messages_per_second}")
print(f"Seconds per received message: {1.0 / messages_per_second}")
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
ping_thread = start_thread(ping_thread_method)
ping_thread.join()
time.sleep(0.1)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertIsNone(found_exception)
def test_client_attempts_message_impossible_for_structure_state_but_exception_in_callback(self):
# attempt to reset the presses without first pressing the button
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
expected_exception = Exception(f"Client should not receive any messages as part of this test.")
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
nonlocal expected_exception
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
raise expected_exception
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(1)
print(f"{datetime.utcnow()}: sending announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Test Name"
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNotNone(found_exception)
self.assertEqual(expected_exception, found_exception)
def test_client_attempts_message_impossible_for_structure_state(self):
# attempt to reset the presses without first pressing the button
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, AnnounceFailedBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending press")
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(1)
print(f"{datetime.utcnow()}: sending announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="Test Name"
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_client_attempts_message_impossible_for_child_structure_state(self):
# call power 4 times
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(power_overload_transmission: PowerOverloadTransmissionBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(power_overload_transmission, PowerOverloadTransmissionBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: fourth power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: fourth power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_order_of_messages(self):
# send multiple messages from the same client to the server, expecting the response order to be the same
messages_total = 1000
print(f"{datetime.utcnow()}: setting up server")
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
print(f"{datetime.utcnow()}: setting up client")
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
callback_total = 0
last_message_index = -1
failed_at_message_index = None # type: int
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
nonlocal last_message_index
nonlocal failed_at_message_index
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
echo_response_client_server_message = client_server_message # type: EchoResponseBaseClientServerMessage
if int(echo_response_client_server_message.get_message()) == last_message_index + 1:
# correct message received
last_message_index += 1
else:
if failed_at_message_index is None:
failed_at_message_index = last_message_index
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
if found_exception is None:
found_exception = exception
# TODO determine why the first thread to spawn as a part of the connect_to_server process does not die
client_messenger.connect_to_server()
time.sleep(1)
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(1)
for message_index in range(messages_total):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(message_index),
is_ordered=True
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing client messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposing client messenger: end")
time.sleep(1)
print(f"{datetime.utcnow()}: server_messenger.stop_receiving_from_clients(): start")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: server_messenger.stop_receiving_from_clients(): end")
if found_exception is not None:
raise found_exception
self.assertIsNone(failed_at_message_index)
print(f"end")
def test_two_clients_becoming_out_of_sync(self):
# as the delay between two different clients send messages shrinks, how often are the messages received in the wrong order
current_delay_between_messages_seconds = 1
delay_percentage_decrease_delta = 0.1
minimum_delay_between_messages_seconds = 0.0001
accepted_delay_between_messages_that_could_result_in_disorder = 0.001
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
print(f"{datetime.utcnow()}: setting up clients")
time.sleep(1)
client_messengers = [] # type: List[ClientMessenger]
client_messengers.append(get_default_client_messenger_factory().get_client_messenger())
client_messengers.append(get_default_client_messenger_factory().get_client_messenger())
callback_total = 0
last_message_index = -1
failed_at_message_index = None # type: int
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
nonlocal last_message_index
nonlocal failed_at_message_index
#print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
echo_response_client_server_message = client_server_message # type: EchoResponseBaseClientServerMessage
if int(echo_response_client_server_message.get_message()) == last_message_index + 1:
# correct message received
last_message_index += 1
else:
if failed_at_message_index is None:
failed_at_message_index = last_message_index
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
if found_exception is None:
found_exception = exception
for client_messenger in client_messengers:
client_messenger.connect_to_server()
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending messages")
client_messengers_index = 0
message_index = 0
client_messengers[client_messengers_index].send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(message_index),
is_ordered=True
)
)
message_index += 1
while minimum_delay_between_messages_seconds < current_delay_between_messages_seconds and failed_at_message_index is None:
time.sleep(current_delay_between_messages_seconds)
client_messengers_index += 1
if client_messengers_index == len(client_messengers):
client_messengers_index = 0
client_messengers[client_messengers_index].send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(message_index),
is_ordered=True
)
)
message_index += 1
current_delay_between_messages_seconds -= current_delay_between_messages_seconds * delay_percentage_decrease_delta
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(5)
print(f"{datetime.utcnow()}: disposing")
for client_messenger in client_messengers:
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
if found_exception is not None:
raise found_exception
print(f"{datetime.utcnow()}: last successful index {failed_at_message_index} with delay of {current_delay_between_messages_seconds} seconds")
self.assertLess(current_delay_between_messages_seconds, accepted_delay_between_messages_that_could_result_in_disorder)
def test_dispose_client_too_quickly_before_receiving_all_messages(self):
# a thread seems to remain alive when this happens
# NOTE: the client_socket read only gets to 988 before it stops reading
messages_total = 1000
print(f"{datetime.utcnow()}: setting up server")
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
print(f"{datetime.utcnow()}: setting up client")
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
callback_total = 0
last_message_index = -1
failed_at_message_index = None # type: int
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
nonlocal last_message_index
nonlocal failed_at_message_index
# print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
self.assertIsInstance(client_server_message, EchoResponseBaseClientServerMessage)
echo_response_client_server_message = client_server_message # type: EchoResponseBaseClientServerMessage
if int(echo_response_client_server_message.get_message()) == last_message_index + 1:
# correct message received
last_message_index += 1
else:
if failed_at_message_index is None:
failed_at_message_index = last_message_index
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
if found_exception is None:
found_exception = exception
print(f"{datetime.utcnow()}: connecting to server")
client_messenger.connect_to_server()
print(f"{datetime.utcnow()}: receiving from server")
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending messages")
for message_index in range(messages_total):
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(message_index),
is_ordered=True
)
)
print(f"{datetime.utcnow()}: immediately disposing")
client_messenger.dispose()
server_messenger.stop_receiving_from_clients()
if found_exception is not None:
raise found_exception
self.assertIsNone(failed_at_message_index)
def test_parse_client_server_message_raises_exception_when_receiving_in_server_messenger(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ErrorResponseBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending error messages")
expected_exception = f"test exception: {uuid.uuid4()}"
client_messenger.send_to_server(
client_server_message=ErrorRequestBaseClientServerMessage(
is_constructor_exception_to_set=expected_exception
)
)
print(f"{datetime.utcnow()}: wait for messages")
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
with self.assertRaises(Exception) as assertedException:
server_messenger.stop_receiving_from_clients()
self.assertEqual(expected_exception, str(assertedException.exception))
# the server encountered an exception, closing the connection
self.assertIsInstance(found_exception, ReadWriteSocketClosedException)
def test_getting_json_of_client_server_message_raises_exception_when_sending_to_server_messenger(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
callback_total += 1
self.assertIsInstance(client_server_message, ErrorResponseBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending error messages")
expected_exception = f"test exception: {uuid.uuid4()}"
with self.assertRaises(Exception) as assertedException:
client_messenger.send_to_server(
client_server_message=ErrorRequestBaseClientServerMessage(
to_json_exception=expected_exception
)
)
self.assertEqual(expected_exception, str(assertedException.exception))
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
if found_exception is not None:
raise found_exception
def test_getting_structural_error_client_server_message_response_from_client_server_message_raises_exception_when_processing_in_server_messenger_but_succeeds(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
if callback_total == 0:
self.assertIsInstance(client_server_message, ErrorResponseBaseClientServerMessage)
else:
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending error messages")
expected_exception = f"test exception: {uuid.uuid4()}"
client_messenger.send_to_server(
client_server_message=ErrorRequestBaseClientServerMessage(
get_structural_error_client_server_message_response_exception=expected_exception
)
)
time.sleep(1)
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
# the server encountered an exception but did not close the connect due to it and is still receiving requests
if found_exception is not None:
raise found_exception
def test_getting_structural_error_client_server_message_response_from_client_server_message_raises_exception_when_processing_in_server_messenger_and_causes_exception(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
if callback_total == 0:
self.assertIsInstance(client_server_message, ErrorResponseBaseClientServerMessage)
else:
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending error messages")
expected_exception = f"test exception: {uuid.uuid4()}"
client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
time.sleep(1)
print(f"{datetime.utcnow()}: sending ErrorRequestBaseClientServerMessage")
client_messenger.send_to_server(
client_server_message=ErrorRequestBaseClientServerMessage(
get_structural_error_client_server_message_response_exception=expected_exception
)
)
time.sleep(1)
print(f"{datetime.utcnow()}: sending PingRequestBaseClientServerMessage")
with self.assertRaises(ReadWriteSocketClosedException):
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing client_messenger")
client_messenger.dispose()
time.sleep(1)
print(f"{datetime.utcnow()}: server_messenger.stop_receiving_from_clients()")
with self.assertRaises(Exception) as assertedException:
server_messenger.stop_receiving_from_clients()
self.assertEqual(expected_exception, str(assertedException.exception))
self.assertIsInstance(found_exception, ReadWriteSocketClosedException)
def test_parse_client_server_message_in_response_raises_exception_when_parsing_in_server_messenger(self):
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
print(f"{datetime.utcnow()}: callback: client_server_message: {client_server_message.to_json()}")
if callback_total == 0:
self.assertIsInstance(client_server_message, ErrorResponseBaseClientServerMessage)
else:
self.assertIsInstance(client_server_message, PingResponseBaseClientServerMessage)
callback_total += 1
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending error messages")
expected_exception = f"test exception: {uuid.uuid4()}"
client_messenger.send_to_server(
client_server_message=ErrorRequestBaseClientServerMessage(
response_constructor_arguments={
"is_constructor_exception_to_set": expected_exception
}
)
)
time.sleep(1)
client_messenger.send_to_server(
client_server_message=PingRequestBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting")
time.sleep(1)
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
self.assertEqual(expected_exception, str(found_exception))
# TODO create more server-side ErrorResponse tests
def test_unordered_client_server_messages_100m_10s(self):
messages_total = 100
message_subset_length = 10
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
previous_ordered_index = -1 - message_subset_length
previous_unordered_index = -1
is_printing = False
def callback(echo_response: EchoResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal previous_ordered_index
nonlocal previous_unordered_index
nonlocal is_printing
nonlocal message_subset_length
#print(f"{datetime.utcnow()}: callback: echo_response: {echo_response.to_json()}")
self.assertIsInstance(echo_response, EchoResponseBaseClientServerMessage)
callback_total += 1
index = int(echo_response.get_message())
print(f"index: {index}")
subset_index = int(index / message_subset_length) % 2
print(f"subset_index: {subset_index}")
previous_subset_index = math.floor((index - 1) / message_subset_length) % 2
print(f"previous_subset_index: {previous_subset_index}")
if subset_index == 0:
if previous_subset_index != subset_index:
if previous_ordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
else:
if previous_ordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
previous_ordered_index = index
else:
if previous_subset_index != subset_index:
if previous_unordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
else:
if previous_unordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
previous_unordered_index = index
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending messages")
for index in range(messages_total):
subset_index = int(index / message_subset_length) % 2
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(index),
is_ordered=(subset_index == 0)
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(5)
print(f"{datetime.utcnow()}: disposing client")
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
# the server encountered an exception but did not close the connect due to it and is still receiving requests
if found_exception is not None:
raise found_exception
def test_unordered_client_server_messages_100m_1s(self):
messages_total = 100
message_subset_length = 1
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
previous_ordered_index = -1 - message_subset_length
previous_unordered_index = -1
is_printing = False
def callback(echo_response: EchoResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal previous_ordered_index
nonlocal previous_unordered_index
nonlocal is_printing
nonlocal message_subset_length
#print(f"{datetime.utcnow()}: callback: echo_response: {echo_response.to_json()}")
self.assertIsInstance(echo_response, EchoResponseBaseClientServerMessage)
callback_total += 1
index = int(echo_response.get_message())
#print(f"index: {index}")
subset_index = int(index / message_subset_length) % 2
#print(f"subset_index: {subset_index}")
previous_subset_index = math.floor((index - 1) / message_subset_length) % 2
#print(f"previous_subset_index: {previous_subset_index}")
if subset_index == 0:
if previous_subset_index != subset_index:
if previous_ordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
else:
if previous_ordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
previous_ordered_index = index
else:
if previous_subset_index != subset_index:
if previous_unordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
else:
if previous_unordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
previous_unordered_index = index
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending messages")
for index in range(messages_total):
subset_index = int(index / message_subset_length) % 2
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(index),
is_ordered=(subset_index == 0)
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(5)
print(f"{datetime.utcnow()}: disposing client")
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
# the server encountered an exception but did not close the connect due to it and is still receiving requests
if found_exception is not None:
raise found_exception
def test_unordered_client_server_messages_1000m_1s(self):
messages_total = 1000
message_subset_length = 1
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger = get_default_client_messenger_factory().get_client_messenger()
client_messenger.connect_to_server()
callback_total = 0
previous_ordered_index = -1 - message_subset_length
previous_unordered_index = -1
is_printing = False
def callback(echo_response: EchoResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal previous_ordered_index
nonlocal previous_unordered_index
nonlocal is_printing
nonlocal message_subset_length
#print(f"{datetime.utcnow()}: callback: echo_response: {echo_response.to_json()}")
self.assertIsInstance(echo_response, EchoResponseBaseClientServerMessage)
callback_total += 1
index = int(echo_response.get_message())
#print(f"index: {index}")
subset_index = int(index / message_subset_length) % 2
#print(f"subset_index: {subset_index}")
previous_subset_index = math.floor((index - 1) / message_subset_length) % 2
#print(f"previous_subset_index: {previous_subset_index}")
if subset_index == 0:
if previous_subset_index != subset_index:
if previous_ordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
else:
if previous_ordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found ordered index: {index}")
previous_ordered_index = index
else:
if previous_subset_index != subset_index:
if previous_unordered_index + message_subset_length + 1 != index:
raise Exception(f"Failed to jump to next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
else:
if previous_unordered_index + 1 != index:
raise Exception(f"Failed to find next index at index: {index}")
else:
if is_printing:
print(f"{datetime.utcnow()}: found unordered index: {index}")
previous_unordered_index = index
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending messages")
for index in range(messages_total):
subset_index = int(index / message_subset_length) % 2
client_messenger.send_to_server(
client_server_message=EchoRequestBaseClientServerMessage(
message=str(index),
is_ordered=(subset_index == 0)
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(5)
print(f"{datetime.utcnow()}: disposing client")
client_messenger.dispose()
time.sleep(1)
server_messenger.stop_receiving_from_clients()
# the server encountered an exception but did not close the connect due to it and is still receiving requests
if found_exception is not None:
raise found_exception
def test_child_structure_power_once_then_reset(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
callback_total += 1
print(f"received callback: {client_server_message}")
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
print(f"{datetime.utcnow()}: sending first power")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: sending reset")
client_messenger.send_to_server(
client_server_message=ResetButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(0, callback_total)
self.assertIsNone(found_exception)
def test_child_structure_power_four_times(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(power_overload_transmission: PowerOverloadTransmissionBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(power_overload_transmission, PowerOverloadTransmissionBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: fourth power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=False
)
)
print(f"{datetime.utcnow()}: fourth power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: disposing")
client_messenger.dispose()
print(f"{datetime.utcnow()}: disposed")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
self.assertIsNone(found_exception)
def test_child_structure_power_two_times_anonymous_underpowered_and_power_attempt_with_failure(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(client_server_message, PowerButtonFailedBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
press_client_messenger = get_default_client_messenger_factory().get_client_messenger()
press_client_messenger.connect_to_server()
def press_callback(three_presses_transmission: ThreePressesTransmissionBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(three_presses_transmission, ThreePressesTransmissionBaseClientServerMessage)
self.assertEqual("underpowered", three_presses_transmission.get_power())
press_client_messenger.receive_from_server(
callback=press_callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: first press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: second press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: third press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
print(f"{datetime.utcnow()}: dispose press_client_messenger: start")
press_client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose press_client_messenger: end")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(2, callback_total)
if found_exception is not None:
raise found_exception
def test_child_structure_power_three_times_anonymous_powered(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
callback_total += 1
raise Exception("This client should not be receiving messages.")
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
press_client_messenger = get_default_client_messenger_factory().get_client_messenger()
press_client_messenger.connect_to_server()
def press_callback(three_presses_transmission: ThreePressesTransmissionBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(three_presses_transmission, ThreePressesTransmissionBaseClientServerMessage)
self.assertEqual("powered", three_presses_transmission.get_power())
press_client_messenger.receive_from_server(
callback=press_callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: first press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: second press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: third press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
print(f"{datetime.utcnow()}: dispose press_client_messenger: start")
press_client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose press_client_messenger: end")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
if found_exception is not None:
raise found_exception
def test_child_structure_power_four_times_anonymous_overpowered(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(client_server_message: ClientServerMessage):
nonlocal callback_total
callback_total += 1
raise Exception("This client should not be receiving messages.")
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: fourth power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: fourth power: end")
time.sleep(0.1)
press_client_messenger = get_default_client_messenger_factory().get_client_messenger()
press_client_messenger.connect_to_server()
def press_callback(three_presses_transmission: ThreePressesTransmissionBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(three_presses_transmission, ThreePressesTransmissionBaseClientServerMessage)
self.assertEqual("overpowered", three_presses_transmission.get_power())
press_client_messenger.receive_from_server(
callback=press_callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: first press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: second press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third press: start")
press_client_messenger.send_to_server(
client_server_message=PressButtonBaseClientServerMessage()
)
print(f"{datetime.utcnow()}: third press: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
print(f"{datetime.utcnow()}: dispose press_client_messenger: start")
press_client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose press_client_messenger: end")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
if found_exception is not None:
raise found_exception
def test_child_structure_power_five_times_anonymous_impossible_state(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
def callback(power_button_failed: PowerButtonFailedBaseClientServerMessage):
nonlocal callback_total
callback_total += 1
self.assertIsInstance(power_button_failed, PowerButtonFailedBaseClientServerMessage)
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
try:
print(f"{datetime.utcnow()}: sending first announcement")
client_messenger.send_to_server(
client_server_message=AnnounceBaseClientServerMessage(
name="First"
)
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: first power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: first power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: second power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: second power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: third power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: third power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: fourth power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: fourth power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: fifth power: start")
client_messenger.send_to_server(
client_server_message=PowerButtonBaseClientServerMessage(
is_anonymous=True
)
)
print(f"{datetime.utcnow()}: fifth power: end")
time.sleep(0.1)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(1)
finally:
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(1)
self.assertEqual(1, callback_total)
if found_exception is not None:
raise found_exception
def test_timer_request_1s(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
expected_message = str(uuid.uuid4())
def callback(timer_response: TimerResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal expected_message
callback_total += 1
print(f"{datetime.utcnow()}: received message")
self.assertIsInstance(timer_response, TimerResponseBaseClientServerMessage)
self.assertEqual(expected_message, timer_response.get_message())
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending message")
client_messenger.send_to_server(
client_server_message=TimerRequestBaseClientServerMessage(
message=expected_message,
seconds=1.0
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(5)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(5)
self.assertEqual(1, callback_total)
if found_exception is not None:
raise found_exception
def test_timer_request_after_client_disposed(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
expected_message = str(uuid.uuid4())
def callback(timer_response: TimerResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal expected_message
callback_total += 1
print(f"{datetime.utcnow()}: received message")
self.assertIsInstance(timer_response, TimerResponseBaseClientServerMessage)
self.assertEqual(expected_message, timer_response.get_message())
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending message")
client_messenger.send_to_server(
client_server_message=TimerRequestBaseClientServerMessage(
message=expected_message,
seconds=3.0
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(0.5)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
time.sleep(4.0)
print(f"{datetime.utcnow()}: stopping")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped")
time.sleep(5)
self.assertEqual(0, callback_total)
if found_exception is not None:
raise found_exception
def test_timer_request_after_server_stopped(self):
client_messenger = get_default_client_messenger_factory().get_client_messenger()
server_messenger = get_default_server_messenger_factory().get_server_messenger()
server_messenger.start_receiving_from_clients()
time.sleep(1)
client_messenger.connect_to_server()
callback_total = 0
expected_message = str(uuid.uuid4())
def callback(timer_response: TimerResponseBaseClientServerMessage):
nonlocal callback_total
nonlocal expected_message
callback_total += 1
print(f"{datetime.utcnow()}: received message")
self.assertIsInstance(timer_response, TimerResponseBaseClientServerMessage)
self.assertEqual(expected_message, timer_response.get_message())
found_exception = None # type: Exception
def on_exception(exception: Exception):
nonlocal found_exception
found_exception = exception
client_messenger.receive_from_server(
callback=callback,
on_exception=on_exception
)
time.sleep(0.1)
print(f"{datetime.utcnow()}: sending message")
client_messenger.send_to_server(
client_server_message=TimerRequestBaseClientServerMessage(
message=expected_message,
seconds=10.0
)
)
print(f"{datetime.utcnow()}: waiting for messages")
time.sleep(0.5)
print(f"{datetime.utcnow()}: stopping server")
server_messenger.stop_receiving_from_clients()
print(f"{datetime.utcnow()}: stopped server")
time.sleep(12.0)
print(f"{datetime.utcnow()}: dispose client_messenger: start")
client_messenger.dispose()
print(f"{datetime.utcnow()}: dispose client_messenger: end")
time.sleep(5)
self.assertEqual(0, callback_total)
self.assertIsInstance(found_exception, ReadWriteSocketClosedException)
# TODO determine where the lingering thread is (2021-12-09)
| 30.121696
| 312
| 0.773758
| 17,000
| 147,024
| 6.312353
| 0.030176
| 0.024825
| 0.063564
| 0.064486
| 0.888091
| 0.870338
| 0.859948
| 0.847982
| 0.834116
| 0.820287
| 0
| 0.005348
| 0.126224
| 147,024
| 4,880
| 313
| 30.127869
| 0.829971
| 0.044027
| 0
| 0.759248
| 0
| 0.001213
| 0.163056
| 0.035729
| 0
| 0
| 0
| 0.000205
| 0.03305
| 1
| 0.074894
| false
| 0.002426
| 0.004548
| 0.014857
| 0.118557
| 0.134324
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0bea0580eced9d7ca4edc03687de7f17026a4300
| 11,168
|
py
|
Python
|
testapp/tests/test_migrations.py
|
18F/django-pg-fts
|
db1d5ed05215408068c6eab06dff668d19c50c36
|
[
"BSD-2-Clause-FreeBSD"
] | 22
|
2015-01-06T08:17:28.000Z
|
2021-10-03T11:41:22.000Z
|
testapp/tests/test_migrations.py
|
18F/django-pg-fts
|
db1d5ed05215408068c6eab06dff668d19c50c36
|
[
"BSD-2-Clause-FreeBSD"
] | 8
|
2015-01-06T09:21:56.000Z
|
2019-01-19T17:57:16.000Z
|
testapp/tests/test_migrations.py
|
dvdmgl/django-pg-fts
|
38e2353dc1eec3ecbd10f6c8624c30ebd779cf8a
|
[
"BSD-2-Clause-FreeBSD"
] | 19
|
2015-01-24T10:09:35.000Z
|
2019-05-15T18:15:55.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
from pg_fts.introspection import PgFTSIntrospection
from django.test import (override_settings, override_system_checks,
TestCase, TransactionTestCase)
from django.utils import six
from django.core.management import call_command
try:
from django.db.backends import TableInfo
table_info = True
except:
table_info = False
__all__ = ('FTSTestBase', 'CreateOperationTestSQL',
'TransactionsMigrationsTest')
class FTSTestBase(TransactionTestCase):
'''tests for FTS'''
introspection = PgFTSIntrospection()
def assertIndexExists(self, index, table):
with connection.cursor() as cursor:
self.assertIn(
index, connection.introspection.get_indexes(cursor, table))
def assertIndexNotExists(self, index, table):
with connection.cursor() as cursor:
self.assertNotIn(
index, connection.introspection.get_indexes(cursor, table))
def assertTriggerExists(self, trigger):
with connection.cursor() as cursor:
self.assertIn(trigger, self.introspection.get_trigger_list(cursor))
def assertTriggerNotExists(self, trigger):
with connection.cursor() as cursor:
self.assertNotIn(trigger,
self.introspection.get_trigger_list(cursor))
def assertFunctionExists(self, function):
with connection.cursor() as cursor:
self.assertIn(function,
self.introspection.get_functions_list(cursor))
def assertFunctionNotExists(self, function):
with connection.cursor() as cursor:
self.assertNotIn(function,
self.introspection.get_functions_list(cursor))
def assertTableExists(self, table):
if table_info:
table = TableInfo(name=table, type='t')
with connection.cursor() as cursor:
self.assertIn(table,
connection.introspection.get_table_list(cursor))
def assertTableNotExists(self, table):
if table_info:
table = TableInfo(name=table, type='t')
with connection.cursor() as cursor:
self.assertNotIn(table,
connection.introspection.get_table_list(cursor))
class CreateOperationTestSQL(TestCase):
# single dictionary
@override_system_checks([])
@override_settings(MIGRATION_MODULES={"testapp": "testapp.migrations_index"})
def test_sql_migrate_creates_vector_field(self):
stdout = six.StringIO()
call_command('sqlmigrate', 'testapp', '0002', stdout=stdout)
self.assertIn('"tsvector" tsvector null', stdout.getvalue().lower())
self.assertIn(
"UPDATE testapp_tsvectormodel SET tsvector = setweight(to_tsvector('english', COALESCE(title, '')), 'D') || setweight(to_tsvector('english', COALESCE(body, '')), 'D');",
stdout.getvalue())
@override_system_checks([])
@override_settings(MIGRATION_MODULES={"testapp": "testapp.migrations_index"})
def test_sql_fts_index(self):
stdout = six.StringIO()
call_command('sqlmigrate', 'testapp', '0003', stdout=stdout)
self.assertIn(
('CREATE INDEX testapp_tsvectormodel_tsvector ON '
'testapp_tsvectormodel USING gin(tsvector);'),
stdout.getvalue())
@override_system_checks([])
@override_settings(MIGRATION_MODULES={"testapp": "testapp.migrations_index"})
def test_sql_fts_trigger(self):
stdout = six.StringIO()
call_command('sqlmigrate', 'testapp', '0004', stdout=stdout)
self.assertIn(''.join(
"""
CREATE FUNCTION testapp_tsvectormodel_tsvector_update() RETURNS TRIGGER AS $$
BEGIN
IF TG_OP = 'INSERT' THEN
new.tsvector = setweight(to_tsvector('english', COALESCE(NEW.title, '')), 'D') || setweight(to_tsvector('english', COALESCE(NEW.body, '')), 'D');
END IF;
IF TG_OP = 'UPDATE' THEN
IF NEW.title <> OLD.title OR NEW.body <> OLD.body THEN
new.tsvector = setweight(to_tsvector('english', COALESCE(NEW.title, '')), 'D') || setweight(to_tsvector('english', COALESCE(NEW.body, '')), 'D');
ELSE
new.tsvector = old.tsvector;
END IF;
END IF;
RETURN NEW;
END;
$$ LANGUAGE 'plpgsql';
CREATE TRIGGER testapp_tsvectormodel_tsvector_update BEFORE INSERT OR UPDATE ON testapp_tsvectormodel
FOR EACH ROW EXECUTE PROCEDURE testapp_tsvectormodel_tsvector_update();
""".split()),
''.join(stdout.getvalue().split())
)
# multiple dictionaries
@override_system_checks([])
@override_settings(MIGRATION_MODULES={"testapp": "testapp.migrations_multidict"})
def test_sql_migrate_creates_vector_field_multi(self):
stdout = six.StringIO()
call_command('sqlmigrate', 'testapp', '0002', stdout=stdout)
self.assertIn('"tsvector" tsvector null', stdout.getvalue().lower())
self.assertIn(
"UPDATE testapp_tsvectormodel SET tsvector = setweight(to_tsvector(dictionary::regconfig, COALESCE(title, '')), 'D') || setweight(to_tsvector(dictionary::regconfig, COALESCE(body, '')), 'D');",
stdout.getvalue())
@override_system_checks([])
@override_settings(MIGRATION_MODULES={"testapp": "testapp.migrations_multidict"})
def test_sql_fts_index_multi(self):
stdout = six.StringIO()
call_command('sqlmigrate', 'testapp', '0003', stdout=stdout)
self.assertIn(
('CREATE INDEX testapp_tsvectormodel_tsvector ON '
'testapp_tsvectormodel USING gin(tsvector);'),
stdout.getvalue())
self.assertIn(
('CREATE INDEX testapp_tsvectormodel_tsvector ON '
'testapp_tsvectormodel USING gin(tsvector);'),
stdout.getvalue())
@override_system_checks([])
@override_settings(MIGRATION_MODULES={"testapp": "testapp.migrations_multidict"})
def test_sql_fts_trigger_multi(self):
stdout = six.StringIO()
call_command('sqlmigrate', 'testapp', '0004', stdout=stdout)
self.assertIn(''.join(
"""
CREATE FUNCTION testapp_tsvectormodel_tsvector_update() RETURNS TRIGGER AS $$
BEGIN
IF TG_OP = 'INSERT' THEN
new.tsvector = setweight(to_tsvector(NEW.dictionary::regconfig, COALESCE(NEW.title, '')), 'D') || setweight(to_tsvector(NEW.dictionary::regconfig, COALESCE(NEW.body, '')), 'D');
END IF;
IF TG_OP = 'UPDATE' THEN
IF NEW.dictionary <> OLD.dictionary OR NEW.title <> OLD.title OR NEW.body <> OLD.body THEN
new.tsvector = setweight(to_tsvector(NEW.dictionary::regconfig, COALESCE(NEW.title, '')), 'D') || setweight(to_tsvector(NEW.dictionary::regconfig, COALESCE(NEW.body, '')), 'D');
ELSE
new.tsvector = old.tsvector;
END IF;
END IF;
RETURN NEW;
END;
$$ LANGUAGE 'plpgsql';
CREATE TRIGGER testapp_tsvectormodel_tsvector_update BEFORE INSERT OR UPDATE ON testapp_tsvectormodel
FOR EACH ROW EXECUTE PROCEDURE testapp_tsvectormodel_tsvector_update();
""".split()),
''.join(stdout.getvalue().split())
)
class TransactionsMigrationsTest(FTSTestBase):
# available_apps = ["testapp"]
@override_system_checks([])
@override_settings(MIGRATION_MODULES={"testapp": "testapp.migrations_index"})
def test_migrate_forwards_backwards(self):
stdout = six.StringIO()
call_command('migrate', 'testapp', '0002', stdout=stdout)
self.assertTableExists('testapp_tsvectormodel')
self.assertIndexNotExists('tsvector',
'testapp_tsvectormodel')
call_command('migrate', 'testapp', '0003', stdout=stdout)
self.assertIndexExists('tsvector', 'testapp_tsvectormodel')
self.assertFunctionNotExists('testapp_tsvectormodel_tsvector_update')
self.assertTriggerNotExists('testapp_tsvectormodel_tsvector_update')
call_command('migrate', 'testapp', '0004', stdout=stdout)
self.assertFunctionExists('testapp_tsvectormodel_tsvector_update')
self.assertTriggerExists('testapp_tsvectormodel_tsvector_update')
call_command('migrate', 'testapp', '0005', stdout=stdout)
self.assertFunctionNotExists('testapp_tsvectormodel_tsvector_update')
self.assertTriggerNotExists('testapp_tsvectormodel_tsvector_update')
self.assertTableNotExists('testapp_tsvectormodel')
call_command('migrate', 'testapp', '0004', stdout=stdout)
self.assertFunctionExists('testapp_tsvectormodel_tsvector_update')
self.assertTriggerExists('testapp_tsvectormodel_tsvector_update')
self.assertTableExists('testapp_tsvectormodel')
call_command('migrate', 'testapp', '0003', stdout=stdout)
self.assertFunctionNotExists('testapp_tsvectormodel_tsvector_update')
self.assertTriggerNotExists('testapp_tsvectormodel_tsvector_update')
call_command('migrate', 'testapp', '0002', stdout=stdout)
self.assertIndexNotExists('tsvector',
'testapp_tsvectormodel')
call_command('migrate', 'testapp', '0001', stdout=stdout)
@override_system_checks([])
@override_settings(MIGRATION_MODULES={"testapp": "testapp.migrations_index"})
def test_migrate_forwards_backwards_multi(self):
stdout = six.StringIO()
call_command('migrate', 'testapp', '0002', stdout=stdout)
self.assertTableExists('testapp_tsvectormodel')
self.assertIndexNotExists('tsvector',
'testapp_tsvectormodel')
call_command('migrate', 'testapp', '0003', stdout=stdout)
self.assertIndexExists('tsvector', 'testapp_tsvectormodel')
self.assertFunctionNotExists('testapp_tsvectormodel_tsvector_update')
self.assertTriggerNotExists('testapp_tsvectormodel_tsvector_update')
call_command('migrate', 'testapp', '0004', stdout=stdout)
self.assertFunctionExists('testapp_tsvectormodel_tsvector_update')
self.assertTriggerExists('testapp_tsvectormodel_tsvector_update')
call_command('migrate', 'testapp', '0005', stdout=stdout)
self.assertFunctionNotExists('testapp_tsvectormodel_tsvector_update')
self.assertTriggerNotExists('testapp_tsvectormodel_tsvector_update')
self.assertTableNotExists('testapp_tsvectormodel')
call_command('migrate', 'testapp', '0004', stdout=stdout)
self.assertFunctionExists('testapp_tsvectormodel_tsvector_update')
self.assertTriggerExists('testapp_tsvectormodel_tsvector_update')
self.assertTableExists('testapp_tsvectormodel')
call_command('migrate', 'testapp', '0003', stdout=stdout)
self.assertFunctionNotExists('testapp_tsvectormodel_tsvector_update')
self.assertTriggerNotExists('testapp_tsvectormodel_tsvector_update')
call_command('migrate', 'testapp', '0002', stdout=stdout)
self.assertIndexNotExists('tsvector',
'testapp_tsvectormodel')
call_command('migrate', 'testapp', '0001', stdout=stdout)
| 46.533333
| 205
| 0.682217
| 1,089
| 11,168
| 6.770432
| 0.124885
| 0.130205
| 0.110132
| 0.119897
| 0.892174
| 0.888919
| 0.877255
| 0.842669
| 0.778652
| 0.764139
| 0
| 0.009987
| 0.202006
| 11,168
| 239
| 206
| 46.728033
| 0.817325
| 0.009402
| 0
| 0.730994
| 0
| 0.011696
| 0.263294
| 0.171259
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.093567
| false
| 0
| 0.040936
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0bfe81e2476359c7bcfaaab3cefa0860351c9b7f
| 42
|
py
|
Python
|
storybro/__init__.py
|
MikkoMMM/storybro
|
aa55e5fb0f21bd58fd396a9ca004b1e16e7d6f92
|
[
"MIT"
] | 67
|
2019-12-29T20:17:59.000Z
|
2022-02-10T11:03:31.000Z
|
storybro/__init__.py
|
MikkoMMM/storybro
|
aa55e5fb0f21bd58fd396a9ca004b1e16e7d6f92
|
[
"MIT"
] | 20
|
2019-12-29T18:33:58.000Z
|
2020-01-21T09:15:56.000Z
|
storybro/__init__.py
|
MikkoMMM/storybro
|
aa55e5fb0f21bd58fd396a9ca004b1e16e7d6f92
|
[
"MIT"
] | 13
|
2020-01-02T04:17:59.000Z
|
2021-11-08T01:52:56.000Z
|
from .cli import ep
def main():
ep()
| 8.4
| 19
| 0.571429
| 7
| 42
| 3.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 42
| 4
| 20
| 10.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0457fcf114f059d35c0d816567d276a1426f1809
| 3,174
|
py
|
Python
|
tests/data/docstring_no_string_normalization.py
|
AppliedIntuition/black
|
bc0c5ca9d41956bf5ed9fc851202579f9e52a338
|
[
"MIT"
] | 28
|
2018-09-23T16:01:51.000Z
|
2022-03-27T18:20:08.000Z
|
tests/data/docstring_no_string_normalization.py
|
AppliedIntuition/black
|
bc0c5ca9d41956bf5ed9fc851202579f9e52a338
|
[
"MIT"
] | 22
|
2018-10-10T11:49:07.000Z
|
2022-03-28T02:16:07.000Z
|
tests/data/docstring_no_string_normalization.py
|
AppliedIntuition/black
|
bc0c5ca9d41956bf5ed9fc851202579f9e52a338
|
[
"MIT"
] | 10
|
2020-09-29T21:52:36.000Z
|
2021-09-29T01:02:11.000Z
|
class ALonelyClass:
'''
A multiline class docstring.
'''
def AnEquallyLonelyMethod(self):
'''
A multiline method docstring'''
pass
def one_function():
'''This is a docstring with a single line of text.'''
pass
def shockingly_the_quotes_are_normalized():
'''This is a multiline docstring.
This is a multiline docstring.
This is a multiline docstring.
'''
pass
def foo():
"""This is a docstring with
some lines of text here
"""
return
def baz():
'''"This" is a string with some
embedded "quotes"'''
return
def poit():
"""
Lorem ipsum dolor sit amet.
Consectetur adipiscing elit:
- sed do eiusmod tempor incididunt ut labore
- dolore magna aliqua
- enim ad minim veniam
- quis nostrud exercitation ullamco laboris nisi
- aliquip ex ea commodo consequat
"""
pass
def under_indent():
"""
These lines are indented in a way that does not
make sense.
"""
pass
def over_indent():
"""
This has a shallow indent
- But some lines are deeper
- And the closing quote is too deep
"""
pass
def single_line():
"""But with a newline after it!
"""
pass
def this():
r"""
'hey ho'
"""
def that():
""" "hey yah" """
def and_that():
"""
"hey yah" """
def and_this():
'''
"hey yah"'''
def believe_it_or_not_this_is_in_the_py_stdlib(): '''
"hey yah"'''
def shockingly_the_quotes_are_normalized_v2():
'''
Docstring Docstring Docstring
'''
pass
# output
class ALonelyClass:
'''
A multiline class docstring.
'''
def AnEquallyLonelyMethod(self):
'''
A multiline method docstring'''
pass
def one_function():
'''This is a docstring with a single line of text.'''
pass
def shockingly_the_quotes_are_normalized():
'''This is a multiline docstring.
This is a multiline docstring.
This is a multiline docstring.
'''
pass
def foo():
"""This is a docstring with
some lines of text here
"""
return
def baz():
'''"This" is a string with some
embedded "quotes"'''
return
def poit():
"""
Lorem ipsum dolor sit amet.
Consectetur adipiscing elit:
- sed do eiusmod tempor incididunt ut labore
- dolore magna aliqua
- enim ad minim veniam
- quis nostrud exercitation ullamco laboris nisi
- aliquip ex ea commodo consequat
"""
pass
def under_indent():
"""
These lines are indented in a way that does not
make sense.
"""
pass
def over_indent():
"""
This has a shallow indent
- But some lines are deeper
- And the closing quote is too deep
"""
pass
def single_line():
"""But with a newline after it!"""
pass
def this():
r"""
'hey ho'
"""
def that():
""" "hey yah" """
def and_that():
"""
"hey yah" """
def and_this():
'''
"hey yah"'''
def believe_it_or_not_this_is_in_the_py_stdlib():
'''
"hey yah"'''
def shockingly_the_quotes_are_normalized_v2():
'''
Docstring Docstring Docstring
'''
pass
| 15.114286
| 57
| 0.596093
| 401
| 3,174
| 4.598504
| 0.249377
| 0.053145
| 0.045553
| 0.052061
| 0.996746
| 0.996746
| 0.996746
| 0.996746
| 0.996746
| 0.996746
| 0
| 0.000894
| 0.295211
| 3,174
| 209
| 58
| 15.186603
| 0.823424
| 0.517013
| 0
| 0.909091
| 0
| 0
| 0.009607
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.545455
| false
| 0.290909
| 0
| 0
| 0.654545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
045e0ee4c926163446f46da681af7c5fb0b2c054
| 170
|
py
|
Python
|
tests/sort_test.py
|
spencerpomme/pyalgolib
|
d055287caa4a779ea833c7efc305cd4f966bd841
|
[
"MIT"
] | null | null | null |
tests/sort_test.py
|
spencerpomme/pyalgolib
|
d055287caa4a779ea833c7efc305cd4f966bd841
|
[
"MIT"
] | null | null | null |
tests/sort_test.py
|
spencerpomme/pyalgolib
|
d055287caa4a779ea833c7efc305cd4f966bd841
|
[
"MIT"
] | null | null | null |
from algorithms.sort import Comparison
from algorithms.sort import NonComparison
if __name__ == "__main__":
Comparison.bubble_sort()
NonComparison.radix_sort()
| 21.25
| 41
| 0.782353
| 19
| 170
| 6.473684
| 0.578947
| 0.227642
| 0.292683
| 0.390244
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141176
| 170
| 7
| 42
| 24.285714
| 0.842466
| 0
| 0
| 0
| 0
| 0
| 0.047059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
f0bc193e8121ee47b05c25fb3510e897a76eec5f
| 11,082
|
py
|
Python
|
tests/models/test_color666_pauli.py
|
dua-arpit/qecsim
|
70ded606a653fd96d517e07fbba15d9b755df752
|
[
"BSD-3-Clause"
] | 35
|
2021-02-08T08:32:54.000Z
|
2022-03-22T05:35:06.000Z
|
tests/models/test_color666_pauli.py
|
dua-arpit/qecsim
|
70ded606a653fd96d517e07fbba15d9b755df752
|
[
"BSD-3-Clause"
] | 2
|
2021-08-05T06:10:35.000Z
|
2021-08-20T12:44:10.000Z
|
tests/models/test_color666_pauli.py
|
dua-arpit/qecsim
|
70ded606a653fd96d517e07fbba15d9b755df752
|
[
"BSD-3-Clause"
] | 7
|
2021-02-11T17:32:47.000Z
|
2021-11-30T12:34:41.000Z
|
import pytest
from qecsim import paulitools as pt
from qecsim.models.color import Color666Code
@pytest.mark.parametrize('size', [
3, 5, 7, 9
])
def test_color666_pauli_properties(size):
lattice = Color666Code(size)
pauli = lattice.new_pauli()
assert pauli.code == lattice
assert isinstance(repr(pauli), str)
assert isinstance(str(pauli), str)
@pytest.mark.parametrize('pauli', [
Color666Code(5).new_pauli(),
Color666Code(5).new_pauli().plaquette('X', (2, 0)).plaquette('Z', (5, 3)),
Color666Code(5).new_pauli().logical_x().plaquette('X', (6, 5)).plaquette('Y', (4, 1)),
Color666Code(5).new_pauli().logical_z().plaquette('Z', (1, 1)).plaquette('Z', (6, 2)),
Color666Code(5).new_pauli().logical_x().plaquette('X', (5, 3)).plaquette('X', (4, 4)),
Color666Code(5).new_pauli().logical_z().plaquette('Z', (4, 1)).plaquette('Z', (3, 2)),
])
def test_color666_pauli_new_to_bsf(pauli):
assert pauli.code.new_pauli(pauli.to_bsf()) == pauli, (
'Conversion to_bsf+from_bsf does not result in equality.')
@pytest.mark.parametrize('pauli', [
Color666Code(5).new_pauli(),
Color666Code(5).new_pauli().plaquette('X', (2, 0)).plaquette('Z', (5, 3)),
Color666Code(5).new_pauli().logical_x().plaquette('X', (6, 5)).plaquette('Y', (4, 1)),
Color666Code(5).new_pauli().logical_z().plaquette('Z', (1, 1)).plaquette('Z', (6, 2)),
Color666Code(5).new_pauli().logical_x().plaquette('X', (5, 3)).plaquette('X', (4, 4)),
Color666Code(5).new_pauli().logical_z().plaquette('Z', (4, 1)).plaquette('Z', (3, 2)),
])
def test_color666_pauli_copy(pauli):
copy_pauli = pauli.copy()
assert copy_pauli == pauli, 'Copy Pauli does not equal original Pauli'
assert copy_pauli.logical_x() != pauli, 'Modified copy Pauli still equals original Pauli'
@pytest.mark.parametrize('pauli, index, expected', [
(Color666Code(5).new_pauli(), (0, 0), 'I'),
(Color666Code(5).new_pauli().site('X', (2, 2)), (2, 2), 'X'),
(Color666Code(5).new_pauli().site('Y', (5, 5)), (5, 5), 'Y'),
(Color666Code(5).new_pauli().site('Z', (4, 3)), (4, 3), 'Z'),
(Color666Code(5).new_pauli().site('X', (2, 2)), (1, 0), 'I'),
(Color666Code(5).new_pauli().site('Y', (5, 5)), (6, 4), 'I'),
(Color666Code(5).new_pauli().site('Z', (4, 3)), (3, 1), 'I'),
])
def test_color666_pauli_operator(pauli, index, expected):
assert pauli.operator(index) == expected
@pytest.mark.parametrize('size, index', [
(5, (2, 0)), # not a site index
(5, (3, 2)), # not a site index
(5, (-1, -1)), # out of bounds
(5, (2, -1)), # out of bounds
(5, (7, 0)), # out of bounds
(5, (6, 7)), # out of bounds
(5, (0, -1)), # out of bounds and not a site index
])
def test_color666_pauli_operator_invalid_index(size, index):
pauli = Color666Code(size).new_pauli()
with pytest.raises(IndexError):
pauli.operator(index)
@pytest.mark.parametrize('pauli, op_counts, message', [
(Color666Code(5).new_pauli().site('I', (2, 2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'site failed.'),
(Color666Code(5).new_pauli().site('X', (2, 2)),
{'I': 18, 'X': 1, 'Y': 0, 'Z': 0}, 'site failed.'),
(Color666Code(5).new_pauli().site('Y', (2, 2)),
{'I': 18, 'X': 0, 'Y': 1, 'Z': 0}, 'site failed.'),
(Color666Code(5).new_pauli().site('Z', (2, 2)),
{'I': 18, 'X': 0, 'Y': 0, 'Z': 1}, 'site failed.'),
(Color666Code(5).new_pauli().site('X', (2, 2)).site('X', (2, 2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'site self-inverse failed.'),
(Color666Code(5).new_pauli().site('Y', (2, 2)).site('Y', (2, 2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'site self-inverse failed.'),
(Color666Code(5).new_pauli().site('Z', (2, 2)).site('Z', (2, 2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'site self-inverse failed.'),
(Color666Code(5).new_pauli().site('X', (2, 2)).site('Z', (2, 2)),
{'I': 18, 'X': 0, 'Y': 1, 'Z': 0}, 'site X+Z=Y failed.'),
(Color666Code(5).new_pauli().site('X', (2, 2)).site('Y', (2, 2)),
{'I': 18, 'X': 0, 'Y': 0, 'Z': 1}, 'site X+Y=Z failed.'),
(Color666Code(5).new_pauli().site('Y', (2, 2)).site('Z', (2, 2)),
{'I': 18, 'X': 1, 'Y': 0, 'Z': 0}, 'site Y+Z=X failed.'),
(Color666Code(5).new_pauli().site('X', (2, 2)).site('X', (4, 2)),
{'I': 17, 'X': 2, 'Y': 0, 'Z': 0}, 'multi-site failed.'),
(Color666Code(5).new_pauli().site('Y', (2, 2)).site('Y', (4, 2)),
{'I': 17, 'X': 0, 'Y': 2, 'Z': 0}, 'multi-site failed.'),
(Color666Code(5).new_pauli().site('Z', (2, 2)).site('Z', (4, 2)),
{'I': 17, 'X': 0, 'Y': 0, 'Z': 2}, 'multi-site failed.'),
(Color666Code(5).new_pauli().site('X', (0, -2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'site outside lattice failed.'),
(Color666Code(5).new_pauli().site('X', (0, 1)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'site outside lattice failed.'),
(Color666Code(5).new_pauli().site('X', (7, 0)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'site outside lattice failed.'),
(Color666Code(5).new_pauli().site('X', (3, 4)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'site outside lattice failed.'),
])
def test_color666_pauli_site(pauli, op_counts, message):
pauli = pt.bsf_to_pauli(pauli.to_bsf())
for op, count in op_counts.items():
assert pauli.count(op) == count, message
@pytest.mark.parametrize('pauli, op_counts, message', [
(Color666Code(5).new_pauli().site('I', (2, 2), (4, 2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'sites failed.'),
(Color666Code(5).new_pauli().site('X', (2, 2), (4, 2)),
{'I': 17, 'X': 2, 'Y': 0, 'Z': 0}, 'sites failed.'),
(Color666Code(5).new_pauli().site('Y', (2, 2), (4, 2)),
{'I': 17, 'X': 0, 'Y': 2, 'Z': 0}, 'sites failed.'),
(Color666Code(5).new_pauli().site('Z', (2, 2), (4, 2)),
{'I': 17, 'X': 0, 'Y': 0, 'Z': 2}, 'sites failed.'),
(Color666Code(5).new_pauli().site('X', (2, 2), (2, 2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'sites self-inverse failed.'),
(Color666Code(5).new_pauli().site('Y', (2, 2), (2, 2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'sites self-inverse failed.'),
(Color666Code(5).new_pauli().site('Z', (2, 2), (2, 2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'sites self-inverse failed.'),
])
def test_color666_pauli_sites(pauli, op_counts, message):
pauli = pt.bsf_to_pauli(pauli.to_bsf())
for op, count in op_counts.items():
assert pauli.count(op) == count, message
@pytest.mark.parametrize('size, operator, index', [
(5, 'Z', (1, 1)), # not a site index
(5, 'X', (5, 3)), # not a site index
])
def test_color666_pauli_site_invalid_index(size, operator, index):
pauli = Color666Code(size).new_pauli()
with pytest.raises(IndexError):
pauli.site(operator, index)
@pytest.mark.parametrize('pauli, op_counts, message', [
(Color666Code(5).new_pauli().plaquette('X', (3, 2)),
{'I': 13, 'X': 6, 'Y': 0, 'Z': 0}, 'X plaquette failed.'),
(Color666Code(5).new_pauli().plaquette('X', (3, 2)).plaquette('X', (3, 2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'X plaquette self-inverse failed.'),
(Color666Code(5).new_pauli().plaquette('X', (3, 2)).plaquette('X', (5, 3)),
{'I': 11, 'X': 8, 'Y': 0, 'Z': 0}, 'X adjacent plaquettes failed.'),
(Color666Code(5).new_pauli().plaquette('X', (2, 0)),
{'I': 15, 'X': 4, 'Y': 0, 'Z': 0}, 'X boundary plaquette failed.'),
(Color666Code(5).new_pauli().plaquette('X', (4, -2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'X outside lattice plaquette failed.'),
(Color666Code(5).new_pauli().plaquette('Z', (3, 2)),
{'I': 13, 'X': 0, 'Y': 0, 'Z': 6}, 'Z plaquette failed.'),
(Color666Code(5).new_pauli().plaquette('Z', (3, 2)).plaquette('Z', (3, 2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'Z plaquette self-inverse failed.'),
(Color666Code(5).new_pauli().plaquette('Z', (3, 2)).plaquette('Z', (5, 3)),
{'I': 11, 'X': 0, 'Y': 0, 'Z': 8}, 'Z adjacent plaquettes failed.'),
(Color666Code(5).new_pauli().plaquette('Z', (2, 0)),
{'I': 15, 'X': 0, 'Y': 0, 'Z': 4}, 'Z boundary plaquette failed.'),
(Color666Code(5).new_pauli().plaquette('Z', (4, -2)),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'Z outside lattice plaquette failed.'),
(Color666Code(5).new_pauli().plaquette('X', (3, 2)).plaquette('Z', (3, 2)),
{'I': 13, 'X': 0, 'Y': 6, 'Z': 0}, 'X+Z plaquette failed.'),
(Color666Code(5).new_pauli().plaquette('X', (3, 2)).plaquette('Z', (5, 3)),
{'I': 9, 'X': 4, 'Y': 2, 'Z': 4}, 'X+Z adjacent plaquettes failed.'),
])
def test_color666_pauli_plaquette(pauli, op_counts, message):
pauli = pt.bsf_to_pauli(pauli.to_bsf())
for op, count in op_counts.items():
assert pauli.count(op) == count, message
@pytest.mark.parametrize('size, operator, index', [
(5, 'X', (0, 0)), # not a plaquette index
(5, 'Z', (5, 2)), # not a plaquette index
])
def test_color666_pauli_invalid_plaquette(size, operator, index):
pauli = Color666Code(size).new_pauli()
with pytest.raises(IndexError):
pauli.plaquette(operator, index)
@pytest.mark.parametrize('pauli, op_counts, message', [
(Color666Code(5).new_pauli().logical_x(),
{'I': 14, 'X': 5, 'Y': 0, 'Z': 0}, 'logical_x failed.'),
(Color666Code(5).new_pauli().logical_x().logical_x(),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'logical_x self-inverse failed.'),
(Color666Code(5).new_pauli().logical_z(),
{'I': 14, 'X': 0, 'Y': 0, 'Z': 5}, 'logical_z failed.'),
(Color666Code(5).new_pauli().logical_z().logical_z(),
{'I': 19, 'X': 0, 'Y': 0, 'Z': 0}, 'logical_z self-inverse failed.'),
(Color666Code(5).new_pauli().logical_x().logical_z(),
{'I': 14, 'X': 0, 'Y': 5, 'Z': 0}, 'logical_x_z failed.'),
])
def test_color666_pauli_logical(pauli, op_counts, message):
pauli = pt.bsf_to_pauli(pauli.to_bsf())
for op, count in op_counts.items():
assert pauli.count(op) == count, message
@pytest.mark.parametrize('pauli_1, pauli_2', [
(Color666Code(5).new_pauli(), Color666Code(5).new_pauli()),
(Color666Code(5).new_pauli().plaquette('X', (3, 2)), Color666Code(5).new_pauli().plaquette('X', (3, 2))),
(Color666Code(5).new_pauli().logical_x(), Color666Code(5).new_pauli().logical_x()),
(Color666Code(5).new_pauli().logical_z(), Color666Code(5).new_pauli().logical_z()),
])
def test_color666_pauli_eq(pauli_1, pauli_2):
assert pauli_1 == pauli_2
assert not pauli_1 != pauli_2
@pytest.mark.parametrize('pauli_1, pauli_2', [
(Color666Code(5).new_pauli(), Color666Code(5).new_pauli().plaquette('X', (3, 2))),
(Color666Code(5).new_pauli().plaquette('X', (3, 2)), Color666Code(5).new_pauli().plaquette('Z', (3, 2))),
(Color666Code(5).new_pauli().plaquette('X', (3, 2)), Color666Code(5).new_pauli().plaquette('X', (5, 3))),
(Color666Code(5).new_pauli().logical_x(), Color666Code(5).new_pauli().logical_z()),
(Color666Code(3).new_pauli(), Color666Code(5).new_pauli()),
(Color666Code(3).new_pauli(), None),
])
def test_color666_pauli_ne(pauli_1, pauli_2):
assert pauli_1 != pauli_2
assert not pauli_1 == pauli_2
| 46.563025
| 109
| 0.571197
| 1,714
| 11,082
| 3.573512
| 0.054259
| 0.109714
| 0.201143
| 0.264
| 0.840163
| 0.795102
| 0.787429
| 0.770939
| 0.668898
| 0.642939
| 0
| 0.089098
| 0.166486
| 11,082
| 237
| 110
| 46.759494
| 0.573996
| 0.018228
| 0
| 0.313433
| 0
| 0
| 0.142252
| 0
| 0
| 0
| 0
| 0
| 0.074627
| 1
| 0.064677
| false
| 0
| 0.014925
| 0
| 0.079602
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f0c5d97f8457c1def15f6723160f6a7a90cb9991
| 19,603
|
py
|
Python
|
sdk/python/pulumi_databricks/azure_adls_gen2_mount.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_databricks/azure_adls_gen2_mount.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_databricks/azure_adls_gen2_mount.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['AzureAdlsGen2MountArgs', 'AzureAdlsGen2Mount']
@pulumi.input_type
class AzureAdlsGen2MountArgs:
def __init__(__self__, *,
client_id: pulumi.Input[str],
client_secret_key: pulumi.Input[str],
client_secret_scope: pulumi.Input[str],
container_name: pulumi.Input[str],
initialize_file_system: pulumi.Input[bool],
mount_name: pulumi.Input[str],
storage_account_name: pulumi.Input[str],
tenant_id: pulumi.Input[str],
cluster_id: Optional[pulumi.Input[str]] = None,
directory: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AzureAdlsGen2Mount resource.
"""
pulumi.set(__self__, "client_id", client_id)
pulumi.set(__self__, "client_secret_key", client_secret_key)
pulumi.set(__self__, "client_secret_scope", client_secret_scope)
pulumi.set(__self__, "container_name", container_name)
pulumi.set(__self__, "initialize_file_system", initialize_file_system)
pulumi.set(__self__, "mount_name", mount_name)
pulumi.set(__self__, "storage_account_name", storage_account_name)
pulumi.set(__self__, "tenant_id", tenant_id)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if directory is not None:
pulumi.set(__self__, "directory", directory)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: pulumi.Input[str]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecretKey")
def client_secret_key(self) -> pulumi.Input[str]:
return pulumi.get(self, "client_secret_key")
@client_secret_key.setter
def client_secret_key(self, value: pulumi.Input[str]):
pulumi.set(self, "client_secret_key", value)
@property
@pulumi.getter(name="clientSecretScope")
def client_secret_scope(self) -> pulumi.Input[str]:
return pulumi.get(self, "client_secret_scope")
@client_secret_scope.setter
def client_secret_scope(self, value: pulumi.Input[str]):
pulumi.set(self, "client_secret_scope", value)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "container_name")
@container_name.setter
def container_name(self, value: pulumi.Input[str]):
pulumi.set(self, "container_name", value)
@property
@pulumi.getter(name="initializeFileSystem")
def initialize_file_system(self) -> pulumi.Input[bool]:
return pulumi.get(self, "initialize_file_system")
@initialize_file_system.setter
def initialize_file_system(self, value: pulumi.Input[bool]):
pulumi.set(self, "initialize_file_system", value)
@property
@pulumi.getter(name="mountName")
def mount_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "mount_name")
@mount_name.setter
def mount_name(self, value: pulumi.Input[str]):
pulumi.set(self, "mount_name", value)
@property
@pulumi.getter(name="storageAccountName")
def storage_account_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "storage_account_name")
@storage_account_name.setter
def storage_account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "storage_account_name", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: pulumi.Input[str]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter
def directory(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "directory")
@directory.setter
def directory(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "directory", value)
@pulumi.input_type
class _AzureAdlsGen2MountState:
def __init__(__self__, *,
client_id: Optional[pulumi.Input[str]] = None,
client_secret_key: Optional[pulumi.Input[str]] = None,
client_secret_scope: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
directory: Optional[pulumi.Input[str]] = None,
initialize_file_system: Optional[pulumi.Input[bool]] = None,
mount_name: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[str]] = None,
storage_account_name: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AzureAdlsGen2Mount resources.
"""
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_secret_key is not None:
pulumi.set(__self__, "client_secret_key", client_secret_key)
if client_secret_scope is not None:
pulumi.set(__self__, "client_secret_scope", client_secret_scope)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if container_name is not None:
pulumi.set(__self__, "container_name", container_name)
if directory is not None:
pulumi.set(__self__, "directory", directory)
if initialize_file_system is not None:
pulumi.set(__self__, "initialize_file_system", initialize_file_system)
if mount_name is not None:
pulumi.set(__self__, "mount_name", mount_name)
if source is not None:
pulumi.set(__self__, "source", source)
if storage_account_name is not None:
pulumi.set(__self__, "storage_account_name", storage_account_name)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecretKey")
def client_secret_key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "client_secret_key")
@client_secret_key.setter
def client_secret_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret_key", value)
@property
@pulumi.getter(name="clientSecretScope")
def client_secret_scope(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "client_secret_scope")
@client_secret_scope.setter
def client_secret_scope(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret_scope", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "container_name")
@container_name.setter
def container_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "container_name", value)
@property
@pulumi.getter
def directory(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "directory")
@directory.setter
def directory(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "directory", value)
@property
@pulumi.getter(name="initializeFileSystem")
def initialize_file_system(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "initialize_file_system")
@initialize_file_system.setter
def initialize_file_system(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "initialize_file_system", value)
@property
@pulumi.getter(name="mountName")
def mount_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "mount_name")
@mount_name.setter
def mount_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mount_name", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source", value)
@property
@pulumi.getter(name="storageAccountName")
def storage_account_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "storage_account_name")
@storage_account_name.setter
def storage_account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_account_name", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
class AzureAdlsGen2Mount(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret_key: Optional[pulumi.Input[str]] = None,
client_secret_scope: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
directory: Optional[pulumi.Input[str]] = None,
initialize_file_system: Optional[pulumi.Input[bool]] = None,
mount_name: Optional[pulumi.Input[str]] = None,
storage_account_name: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Create a AzureAdlsGen2Mount resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AzureAdlsGen2MountArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a AzureAdlsGen2Mount resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param AzureAdlsGen2MountArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AzureAdlsGen2MountArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret_key: Optional[pulumi.Input[str]] = None,
client_secret_scope: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
directory: Optional[pulumi.Input[str]] = None,
initialize_file_system: Optional[pulumi.Input[bool]] = None,
mount_name: Optional[pulumi.Input[str]] = None,
storage_account_name: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AzureAdlsGen2MountArgs.__new__(AzureAdlsGen2MountArgs)
if client_id is None and not opts.urn:
raise TypeError("Missing required property 'client_id'")
__props__.__dict__["client_id"] = client_id
if client_secret_key is None and not opts.urn:
raise TypeError("Missing required property 'client_secret_key'")
__props__.__dict__["client_secret_key"] = client_secret_key
if client_secret_scope is None and not opts.urn:
raise TypeError("Missing required property 'client_secret_scope'")
__props__.__dict__["client_secret_scope"] = client_secret_scope
__props__.__dict__["cluster_id"] = cluster_id
if container_name is None and not opts.urn:
raise TypeError("Missing required property 'container_name'")
__props__.__dict__["container_name"] = container_name
__props__.__dict__["directory"] = directory
if initialize_file_system is None and not opts.urn:
raise TypeError("Missing required property 'initialize_file_system'")
__props__.__dict__["initialize_file_system"] = initialize_file_system
if mount_name is None and not opts.urn:
raise TypeError("Missing required property 'mount_name'")
__props__.__dict__["mount_name"] = mount_name
if storage_account_name is None and not opts.urn:
raise TypeError("Missing required property 'storage_account_name'")
__props__.__dict__["storage_account_name"] = storage_account_name
if tenant_id is None and not opts.urn:
raise TypeError("Missing required property 'tenant_id'")
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["source"] = None
super(AzureAdlsGen2Mount, __self__).__init__(
'databricks:index/azureAdlsGen2Mount:AzureAdlsGen2Mount',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret_key: Optional[pulumi.Input[str]] = None,
client_secret_scope: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
directory: Optional[pulumi.Input[str]] = None,
initialize_file_system: Optional[pulumi.Input[bool]] = None,
mount_name: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[str]] = None,
storage_account_name: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None) -> 'AzureAdlsGen2Mount':
"""
Get an existing AzureAdlsGen2Mount resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AzureAdlsGen2MountState.__new__(_AzureAdlsGen2MountState)
__props__.__dict__["client_id"] = client_id
__props__.__dict__["client_secret_key"] = client_secret_key
__props__.__dict__["client_secret_scope"] = client_secret_scope
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["container_name"] = container_name
__props__.__dict__["directory"] = directory
__props__.__dict__["initialize_file_system"] = initialize_file_system
__props__.__dict__["mount_name"] = mount_name
__props__.__dict__["source"] = source
__props__.__dict__["storage_account_name"] = storage_account_name
__props__.__dict__["tenant_id"] = tenant_id
return AzureAdlsGen2Mount(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="clientSecretKey")
def client_secret_key(self) -> pulumi.Output[str]:
return pulumi.get(self, "client_secret_key")
@property
@pulumi.getter(name="clientSecretScope")
def client_secret_scope(self) -> pulumi.Output[str]:
return pulumi.get(self, "client_secret_scope")
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter(name="containerName")
def container_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "container_name")
@property
@pulumi.getter
def directory(self) -> pulumi.Output[str]:
return pulumi.get(self, "directory")
@property
@pulumi.getter(name="initializeFileSystem")
def initialize_file_system(self) -> pulumi.Output[bool]:
return pulumi.get(self, "initialize_file_system")
@property
@pulumi.getter(name="mountName")
def mount_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "mount_name")
@property
@pulumi.getter
def source(self) -> pulumi.Output[str]:
return pulumi.get(self, "source")
@property
@pulumi.getter(name="storageAccountName")
def storage_account_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "storage_account_name")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "tenant_id")
| 41.976445
| 134
| 0.656175
| 2,270
| 19,603
| 5.328194
| 0.065639
| 0.089128
| 0.100703
| 0.116412
| 0.831005
| 0.807772
| 0.786441
| 0.746341
| 0.705581
| 0.646135
| 0
| 0.001533
| 0.234505
| 19,603
| 466
| 135
| 42.066524
| 0.804478
| 0.059634
| 0
| 0.663978
| 1
| 0
| 0.12339
| 0.018692
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16129
| false
| 0.002688
| 0.013441
| 0.086022
| 0.271505
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50279f70ea923b1f91fa14d3b4b9e8364758d581
| 30,196
|
py
|
Python
|
accountancy/testing/test_integration/test_helpers.py
|
rossm6/accounts
|
74633ce4038806222048d85ef9dfe97a957a6a71
|
[
"MIT"
] | 11
|
2021-01-23T01:09:54.000Z
|
2021-01-25T07:16:30.000Z
|
accountancy/testing/test_integration/test_helpers.py
|
rossm6/accounts
|
74633ce4038806222048d85ef9dfe97a957a6a71
|
[
"MIT"
] | 7
|
2021-04-06T18:19:10.000Z
|
2021-09-22T19:45:03.000Z
|
accountancy/testing/test_integration/test_helpers.py
|
rossm6/accounts
|
74633ce4038806222048d85ef9dfe97a957a6a71
|
[
"MIT"
] | 3
|
2021-01-23T18:55:32.000Z
|
2021-02-16T17:47:59.000Z
|
from datetime import date, datetime, timedelta
from accountancy.helpers import AuditTransaction, get_all_historical_changes
from cashbook.models import CashBook
from contacts.models import Contact
from controls.models import FinancialYear, Period
from django.test import TestCase
from nominals.models import Nominal
from purchases.models import (PurchaseHeader, PurchaseLine, PurchaseMatching,
Supplier)
from vat.models import Vat
DATE_INPUT_FORMAT = '%d-%m-%Y'
MODEL_DATE_INPUT_FORMAT = '%Y-%m-%d'
class GetAllHistoricalChangesTest(TestCase):
def test_create_only(self):
contact = Contact.objects.create(code="1", name="11", email="111")
historical_records = Contact.history.all().order_by("pk")
self.assertEqual(
len(historical_records),
1
)
changes = get_all_historical_changes(historical_records)
self.assertEqual(
len(changes),
1
)
creation_change = changes[0]
self.assertEqual(
creation_change["id"]["old"],
""
)
self.assertEqual(
creation_change["id"]["new"],
str(contact.id)
)
self.assertEqual(
creation_change["code"]["old"],
""
)
self.assertEqual(
creation_change["code"]["new"],
"1"
)
self.assertEqual(
creation_change["name"]["old"],
""
)
self.assertEqual(
creation_change["name"]["new"],
"11"
)
self.assertEqual(
creation_change["meta"]["AUDIT_action"],
"Create"
)
def test_create_and_update(self):
contact = Contact.objects.create(code="1", name="11", email="111")
contact.name = "12"
contact.save()
historical_records = Contact.history.all().order_by("pk")
self.assertEqual(
len(historical_records),
2
)
changes = get_all_historical_changes(historical_records)
self.assertEqual(
len(changes),
2
)
creation_change = changes[0]
update_change = changes[1]
self.assertEqual(
creation_change["id"]["old"],
""
)
self.assertEqual(
creation_change["id"]["new"],
str(contact.id)
)
self.assertEqual(
creation_change["code"]["old"],
""
)
self.assertEqual(
creation_change["code"]["new"],
"1"
)
self.assertEqual(
creation_change["name"]["old"],
""
)
self.assertEqual(
creation_change["name"]["new"],
"11"
)
self.assertEqual(
creation_change["meta"]["AUDIT_action"],
"Create"
)
self.assertEqual(
update_change["name"]["old"],
"11"
)
self.assertEqual(
update_change["name"]["new"],
"12"
)
self.assertEqual(
update_change["meta"]["AUDIT_action"],
"Update"
)
def test_create_and_update_and_delete(self):
contact = Contact.objects.create(code="1", name="11", email="111")
contact_dict = contact.__dict__.copy()
contact.name = "12"
contact.save()
contact.delete()
historical_records = Contact.history.all().order_by("pk")
self.assertEqual(
len(historical_records),
3
)
changes = get_all_historical_changes(historical_records)
self.assertEqual(
len(changes),
3
)
creation_change = changes[0]
update_change = changes[1]
deleted_change = changes[2]
self.assertEqual(
creation_change["id"]["old"],
""
)
self.assertEqual(
creation_change["id"]["new"],
str(contact_dict["id"])
)
self.assertEqual(
creation_change["code"]["old"],
""
)
self.assertEqual(
creation_change["code"]["new"],
"1"
)
self.assertEqual(
creation_change["name"]["old"],
""
)
self.assertEqual(
creation_change["name"]["new"],
"11"
)
self.assertEqual(
creation_change["meta"]["AUDIT_action"],
"Create"
)
self.assertEqual(
update_change["name"]["old"],
"11"
)
self.assertEqual(
update_change["name"]["new"],
"12"
)
self.assertEqual(
update_change["meta"]["AUDIT_action"],
"Update"
)
self.assertEqual(
deleted_change["id"]["old"],
str(contact_dict["id"])
)
self.assertEqual(
deleted_change["id"]["new"],
""
)
self.assertEqual(
deleted_change["code"]["old"],
contact_dict["code"]
)
self.assertEqual(
deleted_change["code"]["new"],
""
)
self.assertEqual(
deleted_change["name"]["old"],
"12"
)
self.assertEqual(
deleted_change["name"]["new"],
""
)
self.assertEqual(
deleted_change["email"]["old"],
contact_dict["email"]
)
self.assertEqual(
deleted_change["email"]["new"],
""
)
self.assertEqual(
deleted_change["customer"]["old"],
str(contact_dict["customer"])
)
self.assertEqual(
deleted_change["customer"]["new"],
""
)
self.assertEqual(
deleted_change["supplier"]["old"],
str(contact_dict["supplier"])
)
self.assertEqual(
deleted_change["supplier"]["new"],
""
)
self.assertEqual(
deleted_change["meta"]["AUDIT_action"],
"Delete"
)
class AuditTransactionTest(TestCase):
"""
Test with PL header, line, matching
"""
@classmethod
def setUpTestData(cls):
cls.date = datetime.now().strftime(DATE_INPUT_FORMAT)
cls.due_date = (datetime.now() + timedelta(days=31)
).strftime(DATE_INPUT_FORMAT)
cls.model_date = datetime.now().strftime(MODEL_DATE_INPUT_FORMAT)
cls.model_due_date = (datetime.now() + timedelta(days=31)
).strftime(MODEL_DATE_INPUT_FORMAT)
fy = FinancialYear.objects.create(financial_year=2020)
cls.fy = fy
cls.period = Period.objects.create(
fy=fy, period="01", fy_and_period="202001", month_start=date(2020, 1, 31))
def test_no_lines(self):
cash_book = CashBook.objects.create(
nominal=None,
name="current"
)
supplier = Supplier.objects.create(
code="1",
name="2",
email="3"
)
h = PurchaseHeader.objects.create(
type="pp", # payment
date=date.today(),
goods=120,
vat=0,
total=120,
ref="123",
cash_book=cash_book,
supplier=supplier,
paid=0,
due=0,
period=self.period
)
self.assertEqual(
len(PurchaseHeader.history.all()),
1
)
h.ref = "1234" # update the header
h.save()
h.refresh_from_db()
self.assertEqual(
len(PurchaseHeader.history.all()),
2
)
audit_transaction = AuditTransaction(
h, PurchaseHeader, PurchaseLine, PurchaseMatching)
self.assertEqual(
len(audit_transaction.audit_header_history),
2
)
self.assertEqual(
len(audit_transaction.audit_lines_history),
0
)
self.assertEqual(
len(audit_transaction.audit_matches_history),
0
)
all_changes = audit_transaction.get_historical_changes()
self.assertEqual(
len(all_changes),
2
)
self.assertTrue(
all_changes[0]["meta"]["AUDIT_date"] < all_changes[1]["meta"]["AUDIT_date"]
)
create = all_changes[0]
self.assertEqual(
create["id"]["old"],
"",
)
self.assertEqual(
create["id"]["new"],
str(h.id),
)
self.assertEqual(
create["ref"]["old"],
"",
)
self.assertEqual(
create["ref"]["new"],
"123",
)
self.assertEqual(
create["goods"]["old"],
"",
)
self.assertEqual(
create["goods"]["new"],
str(h.goods * -1), # payment ui value is positive
)
self.assertEqual(
create["vat"]["old"],
"",
)
self.assertEqual(
create["vat"]["new"],
str(h.vat),
)
self.assertEqual(
create["total"]["old"],
"",
)
self.assertEqual(
create["total"]["new"],
str(h.total * -1), # payment ui value is positive
)
self.assertEqual(
create["paid"]["old"],
"",
)
self.assertEqual(
create["paid"]["new"],
str(h.paid),
)
self.assertEqual(
create["due"]["old"],
"",
)
self.assertEqual(
create["due"]["new"],
str(h.due),
)
self.assertEqual(
create["date"]["old"],
"",
)
self.assertEqual(
create["date"]["new"],
str(h.date),
)
self.assertEqual(
create["due_date"]["old"],
"",
)
self.assertEqual(
create["due_date"]["new"],
str(h.due_date),
)
self.assertEqual(
create["period_id"]["old"],
"",
)
self.assertEqual(
create["period_id"]["new"],
str(self.period.pk),
)
self.assertEqual(
create["status"]["old"],
"",
)
self.assertEqual(
create["status"]["new"],
str(h.status),
)
self.assertEqual(
create["type"]["old"],
"",
)
self.assertEqual(
create["type"]["new"],
str(h.type),
)
self.assertEqual(
create["cash_book_id"]["old"],
"",
)
self.assertEqual(
create["cash_book_id"]["new"],
str(h.cash_book_id),
)
self.assertEqual(
create["supplier_id"]["old"],
"",
)
self.assertEqual(
create["supplier_id"]["new"],
str(h.supplier_id),
)
self.assertEqual(
create["meta"]["AUDIT_action"],
"Create"
)
self.assertEqual(
create["meta"]["transaction_aspect"],
"header"
)
update = all_changes[1]
self.assertEqual(
update["ref"]["old"],
"123",
)
self.assertEqual(
update["ref"]["new"],
h.ref,
)
self.assertEqual(
update["meta"]["AUDIT_action"],
"Update"
)
self.assertEqual(
update["meta"]["transaction_aspect"],
"header"
)
def test_lines(self):
# same as above except for change a line
# above has no lines
cash_book = CashBook.objects.create(
nominal=None,
name="current"
)
supplier = Supplier.objects.create(
code="1",
name="2",
email="3"
)
h = PurchaseHeader.objects.create(
type="pi", # payment
date=date.today(),
goods=100,
vat=20,
total=120,
ref="123",
cash_book=cash_book,
supplier=supplier,
paid=0,
due=0,
period=self.period
)
nominal = Nominal.objects.create(
name="something",
parent=None
)
vat_code = Vat.objects.create(
code="1",
name="2",
rate=20
)
l = PurchaseLine.objects.create(
nominal=nominal,
goods=100,
vat=20,
vat_code=vat_code,
description="123",
line_no=1,
header=h
)
self.assertEqual(
len(PurchaseHeader.history.all()),
1
)
h.ref = "1234" # update the header
h.save()
h.refresh_from_db()
l.description = "12345"
l.save()
l.refresh_from_db()
self.assertEqual(
len(PurchaseHeader.history.all()),
2
)
audit_transaction = AuditTransaction(
h, PurchaseHeader, PurchaseLine, PurchaseMatching)
self.assertEqual(
len(audit_transaction.audit_header_history),
2
)
self.assertEqual(
len(audit_transaction.audit_lines_history),
2
)
self.assertEqual(
len(audit_transaction.audit_matches_history),
0
)
all_changes = audit_transaction.get_historical_changes()
self.assertEqual(
len(all_changes),
4
)
self.assertTrue(
all_changes[0]["meta"]["AUDIT_date"] < all_changes[1]["meta"]["AUDIT_date"]
)
self.assertTrue(
all_changes[1]["meta"]["AUDIT_date"] < all_changes[2]["meta"]["AUDIT_date"]
)
self.assertTrue(
all_changes[2]["meta"]["AUDIT_date"] < all_changes[3]["meta"]["AUDIT_date"]
)
create = all_changes[0]
self.assertEqual(
create["id"]["old"],
"",
)
self.assertEqual(
create["id"]["new"],
str(h.id),
)
self.assertEqual(
create["ref"]["old"],
"",
)
self.assertEqual(
create["ref"]["new"],
"123",
)
self.assertEqual(
create["goods"]["old"],
"",
)
self.assertEqual(
create["goods"]["new"],
str(h.goods),
)
self.assertEqual(
create["vat"]["old"],
"",
)
self.assertEqual(
create["vat"]["new"],
str(h.vat),
)
self.assertEqual(
create["total"]["old"],
"",
)
self.assertEqual(
create["total"]["new"],
str(h.total),
)
self.assertEqual(
create["paid"]["old"],
"",
)
self.assertEqual(
create["paid"]["new"],
str(h.paid),
)
self.assertEqual(
create["due"]["old"],
"",
)
self.assertEqual(
create["due"]["new"],
str(h.due),
)
self.assertEqual(
create["date"]["old"],
"",
)
self.assertEqual(
create["date"]["new"],
str(h.date),
)
self.assertEqual(
create["due_date"]["old"],
"",
)
self.assertEqual(
create["due_date"]["new"],
str(h.due_date),
)
self.assertEqual(
create["period_id"]["old"],
"",
)
self.assertEqual(
create["period_id"]["new"],
str(self.period.pk),
)
self.assertEqual(
create["status"]["old"],
"",
)
self.assertEqual(
create["status"]["new"],
str(h.status),
)
self.assertEqual(
create["type"]["old"],
"",
)
self.assertEqual(
create["type"]["new"],
str(h.type),
)
self.assertEqual(
create["cash_book_id"]["old"],
"",
)
self.assertEqual(
create["cash_book_id"]["new"],
str(h.cash_book_id),
)
self.assertEqual(
create["supplier_id"]["old"],
"",
)
self.assertEqual(
create["supplier_id"]["new"],
str(h.supplier_id),
)
self.assertEqual(
create["meta"]["AUDIT_action"],
"Create"
)
self.assertEqual(
create["meta"]["transaction_aspect"],
"header"
)
update = all_changes[2]
self.assertEqual(
update["ref"]["old"],
"123",
)
self.assertEqual(
update["ref"]["new"],
h.ref,
)
self.assertEqual(
update["meta"]["AUDIT_action"],
"Update"
)
self.assertEqual(
update["meta"]["transaction_aspect"],
"header"
)
# now for the line change
create = all_changes[1]
self.assertEqual(
create["id"]["old"],
"",
)
self.assertEqual(
create["id"]["new"],
str(l.id),
)
self.assertEqual(
create["description"]["old"],
"",
)
self.assertEqual(
create["description"]["new"],
"123",
)
self.assertEqual(
create["goods"]["old"],
""
)
self.assertEqual(
create["goods"]["new"],
str(l.goods),
)
self.assertEqual(
create["vat"]["old"],
"",
)
self.assertEqual(
create["vat"]["new"],
str(l.vat),
)
self.assertEqual(
create["line_no"]["old"],
"",
)
self.assertEqual(
create["line_no"]["new"],
str(l.line_no),
)
self.assertEqual(
create["nominal_id"]["old"],
"",
)
self.assertEqual(
create["nominal_id"]["new"],
str(l.nominal.pk),
)
self.assertEqual(
create["vat_code_id"]["old"],
"",
)
self.assertEqual(
create["vat_code_id"]["new"],
str(l.vat_code.pk),
)
self.assertEqual(
create["header_id"]["old"],
"",
)
self.assertEqual(
create["header_id"]["new"],
str(l.header.pk),
)
self.assertEqual(
create["meta"]["AUDIT_action"],
"Create"
)
self.assertEqual(
create["meta"]["transaction_aspect"],
"line"
)
update = all_changes[3]
self.assertEqual(
update["description"]["old"],
"123",
)
self.assertEqual(
update["description"]["new"],
l.description,
)
self.assertEqual(
update["meta"]["AUDIT_action"],
"Update"
)
self.assertEqual(
update["meta"]["transaction_aspect"],
"line"
)
def test_matching(self):
# same as above except for change a line
# above has no lines
cash_book = CashBook.objects.create(
nominal=None,
name="current"
)
supplier = Supplier.objects.create(
code="1",
name="2",
email="3"
)
to_match_against = PurchaseHeader.objects.create(
type="pi", # payment
date=date.today(),
goods=-100,
vat=-20,
total=-120,
ref="123",
cash_book=cash_book,
supplier=supplier,
paid=0,
due=0,
period=self.period
)
h = PurchaseHeader.objects.create(
type="pi", # payment
date=date.today(),
goods=100,
vat=20,
total=120,
ref="123",
cash_book=cash_book,
supplier=supplier,
paid=0,
due=0,
period=self.period
)
nominal = Nominal.objects.create(
name="something",
parent=None
)
vat_code = Vat.objects.create(
code="1",
name="2",
rate=20
)
l = PurchaseLine.objects.create(
nominal=nominal,
goods=100,
vat=20,
vat_code=vat_code,
description="123",
line_no=1,
header=h
)
match = PurchaseMatching.objects.create(
matched_by=h,
matched_to=to_match_against,
period=self.period,
value=-100
)
self.assertEqual(
len(PurchaseHeader.history.all()),
2
)
self.assertEqual(
len(PurchaseMatching.history.all()),
1
)
h.ref = "1234" # update the header
h.save()
h.refresh_from_db()
l.description = "12345"
l.save()
l.refresh_from_db()
match.value = -120
match.save()
match.refresh_from_db()
audit_transaction = AuditTransaction(
h, PurchaseHeader, PurchaseLine, PurchaseMatching)
self.assertEqual(
len(audit_transaction.audit_header_history),
2
)
self.assertEqual(
len(audit_transaction.audit_lines_history),
2
)
self.assertEqual(
len(audit_transaction.audit_matches_history),
2
)
all_changes = audit_transaction.get_historical_changes()
self.assertEqual(
len(all_changes),
6
)
self.assertTrue(
all_changes[0]["meta"]["AUDIT_date"] <= all_changes[1]["meta"]["AUDIT_date"]
)
self.assertTrue(
all_changes[1]["meta"]["AUDIT_date"] <= all_changes[2]["meta"]["AUDIT_date"]
)
self.assertTrue(
all_changes[2]["meta"]["AUDIT_date"] <= all_changes[3]["meta"]["AUDIT_date"]
)
self.assertTrue(
all_changes[3]["meta"]["AUDIT_date"] <= all_changes[4]["meta"]["AUDIT_date"]
)
self.assertTrue(
all_changes[4]["meta"]["AUDIT_date"] <= all_changes[5]["meta"]["AUDIT_date"]
)
create = all_changes[0]
self.assertEqual(
create["id"]["old"],
"",
)
self.assertEqual(
create["id"]["new"],
str(h.id),
)
self.assertEqual(
create["ref"]["old"],
"",
)
self.assertEqual(
create["ref"]["new"],
"123",
)
self.assertEqual(
create["goods"]["old"],
"",
)
self.assertEqual(
create["goods"]["new"],
str(h.goods),
)
self.assertEqual(
create["vat"]["old"],
"",
)
self.assertEqual(
create["vat"]["new"],
str(h.vat),
)
self.assertEqual(
create["total"]["old"],
"",
)
self.assertEqual(
create["total"]["new"],
str(h.total),
)
self.assertEqual(
create["paid"]["old"],
"",
)
self.assertEqual(
create["paid"]["new"],
str(h.paid),
)
self.assertEqual(
create["due"]["old"],
"",
)
self.assertEqual(
create["due"]["new"],
str(h.due),
)
self.assertEqual(
create["date"]["old"],
"",
)
self.assertEqual(
create["date"]["new"],
str(h.date),
)
self.assertEqual(
create["due_date"]["old"],
"",
)
self.assertEqual(
create["due_date"]["new"],
str(h.due_date),
)
self.assertEqual(
create["period_id"]["old"],
"",
)
self.assertEqual(
create["period_id"]["new"],
str(self.period.pk),
)
self.assertEqual(
create["status"]["old"],
"",
)
self.assertEqual(
create["status"]["new"],
str(h.status),
)
self.assertEqual(
create["type"]["old"],
"",
)
self.assertEqual(
create["type"]["new"],
str(h.type),
)
self.assertEqual(
create["cash_book_id"]["old"],
"",
)
self.assertEqual(
create["cash_book_id"]["new"],
str(h.cash_book_id),
)
self.assertEqual(
create["supplier_id"]["old"],
"",
)
self.assertEqual(
create["supplier_id"]["new"],
str(h.supplier_id),
)
self.assertEqual(
create["meta"]["AUDIT_action"],
"Create"
)
self.assertEqual(
create["meta"]["transaction_aspect"],
"header"
)
update = all_changes[3]
self.assertEqual(
update["ref"]["old"],
"123",
)
self.assertEqual(
update["ref"]["new"],
h.ref,
)
self.assertEqual(
update["meta"]["AUDIT_action"],
"Update"
)
self.assertEqual(
update["meta"]["transaction_aspect"],
"header"
)
# now for the line change
create = all_changes[1]
self.assertEqual(
create["id"]["old"],
"",
)
self.assertEqual(
create["id"]["new"],
str(l.id),
)
self.assertEqual(
create["description"]["old"],
"",
)
self.assertEqual(
create["description"]["new"],
"123",
)
self.assertEqual(
create["goods"]["old"],
""
)
self.assertEqual(
create["goods"]["new"],
str(l.goods),
)
self.assertEqual(
create["vat"]["old"],
"",
)
self.assertEqual(
create["vat"]["new"],
str(l.vat),
)
self.assertEqual(
create["line_no"]["old"],
"",
)
self.assertEqual(
create["line_no"]["new"],
str(l.line_no),
)
self.assertEqual(
create["nominal_id"]["old"],
"",
)
self.assertEqual(
create["nominal_id"]["new"],
str(l.nominal.pk),
)
self.assertEqual(
create["vat_code_id"]["old"],
"",
)
self.assertEqual(
create["vat_code_id"]["new"],
str(l.vat_code.pk),
)
self.assertEqual(
create["header_id"]["old"],
"",
)
self.assertEqual(
create["header_id"]["new"],
str(l.header.pk),
)
self.assertEqual(
create["meta"]["AUDIT_action"],
"Create"
)
self.assertEqual(
create["meta"]["transaction_aspect"],
"line"
)
update = all_changes[4]
self.assertEqual(
update["description"]["old"],
"123",
)
self.assertEqual(
update["description"]["new"],
l.description,
)
self.assertEqual(
update["meta"]["AUDIT_action"],
"Update"
)
self.assertEqual(
update["meta"]["transaction_aspect"],
"line"
)
create = all_changes[2]
self.assertEqual(
create["matched_by_id"]["old"],
"",
)
self.assertEqual(
create["matched_by_id"]["new"],
str(match.matched_by_id),
)
self.assertEqual(
create["matched_to_id"]["old"],
"",
)
self.assertEqual(
create["matched_to_id"]["new"],
str(match.matched_to_id),
)
self.assertEqual(
create["value"]["old"],
"",
)
self.assertEqual(
create["value"]["new"],
"-100.00",
)
self.assertEqual(
create["period_id"]["old"],
"",
)
self.assertEqual(
create["period_id"]["new"],
str(self.period.pk),
)
self.assertEqual(
create["meta"]["AUDIT_action"],
"Create"
)
self.assertEqual(
create["meta"]["transaction_aspect"],
"match"
)
update = all_changes[5]
self.assertEqual(
update["value"]["old"],
"-100.00"
)
self.assertEqual(
update["value"]["new"],
"-120.00"
)
self.assertEqual(
update["meta"]["AUDIT_action"],
"Update"
)
self.assertEqual(
update["meta"]["transaction_aspect"],
"match"
)
| 25.676871
| 88
| 0.440588
| 2,506
| 30,196
| 5.166002
| 0.061053
| 0.25954
| 0.220609
| 0.114939
| 0.896648
| 0.843504
| 0.826278
| 0.818091
| 0.805268
| 0.801406
| 0
| 0.016328
| 0.421943
| 30,196
| 1,175
| 89
| 25.698723
| 0.725351
| 0.011392
| 0
| 0.674296
| 0
| 0
| 0.091577
| 0
| 0
| 0
| 0
| 0
| 0.205106
| 1
| 0.006162
| false
| 0
| 0.007923
| 0
| 0.015845
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
503f64c1b012c8d743dd5c774fa6555365d46deb
| 24
|
py
|
Python
|
python/testData/copyPaste/ReplaceSelection.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/copyPaste/ReplaceSelection.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/copyPaste/ReplaceSelection.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
a = 1
b = 1
a = 1
b = 1
| 4.8
| 5
| 0.333333
| 8
| 24
| 1
| 0.375
| 0.5
| 0.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0.5
| 24
| 4
| 6
| 6
| 0.333333
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
4a0a559463a85757a42ba71a9dedca02c59e4314
| 121
|
py
|
Python
|
tests/Usage.py
|
astronalta/gamepython
|
3927dfbb0ae9706cd99d4f15ea792b30512dd4c1
|
[
"MIT"
] | null | null | null |
tests/Usage.py
|
astronalta/gamepython
|
3927dfbb0ae9706cd99d4f15ea792b30512dd4c1
|
[
"MIT"
] | null | null | null |
tests/Usage.py
|
astronalta/gamepython
|
3927dfbb0ae9706cd99d4f15ea792b30512dd4c1
|
[
"MIT"
] | null | null | null |
import os
os.system(os.path.join("..", "bin", "jogo") + " --help")
os.system(os.path.join("..", "bin", "jogo") + " -h")
| 24.2
| 56
| 0.520661
| 18
| 121
| 3.5
| 0.5
| 0.253968
| 0.31746
| 0.444444
| 0.793651
| 0.793651
| 0.793651
| 0
| 0
| 0
| 0
| 0
| 0.123967
| 121
| 4
| 57
| 30.25
| 0.59434
| 0
| 0
| 0
| 0
| 0
| 0.231405
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
4a0abdb3269da3835943a0f144f3b4cc4938b15c
| 75,134
|
py
|
Python
|
rawdata.py
|
icogg/psl
|
d53329fc4b72ceb55fff1f4c73f6d13666a2caf1
|
[
"MIT"
] | null | null | null |
rawdata.py
|
icogg/psl
|
d53329fc4b72ceb55fff1f4c73f6d13666a2caf1
|
[
"MIT"
] | null | null | null |
rawdata.py
|
icogg/psl
|
d53329fc4b72ceb55fff1f4c73f6d13666a2caf1
|
[
"MIT"
] | null | null | null |
blankpsl = """
A P P : C o u r i e r
T Y P E : S c h e m e L o g i c E d i t o r
F O R M A T : 1 . 0
V E R S I O N : 4 . 0 0
D O M A I N : 0 0 S e t t i n g s
S U B D O M A I N : 0 P S L S e t t i n g G r p 1
M O D E L : P 1 4 2 1 1 7 B 4 M 0 4 3 0 J
R E F E R E N C E :
D D B D E S C R I P T I O N F I L E :
F i l e c r e a t e d f r o m t e m p l a t e o n T h u r s d a y , D e c e m b e r 2 0 , 2 0 1 8 Â X @ +++++++++++++++++!!!!!!!!!!!!!!!!++++++++++++++++""""""""EEEEEEEEEEEEEEE+++++++++++++++++FFFFFFFFGGGGGGGGGGGGGGGG((((((((((((((((I$$$$$$$$$$$$$$$$$+$$$+$CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC+CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC$$$$$$$$$$$$$$$$+$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$+$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$,,,,,,,,,,,,,,,,,+C$,$$,,$$CCCCCC$$$$$$$$$$$$CCC$$$$$$CCCC$$++++++++CCC$CC$$C+$CCC$$$$++++C+++++++++++++++++++++++CCCCCCCCCC$$$$$$$$CC++++++CCCCCC$C$$$C$$$$$CCC+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$CCCC$$$$+++++++$$$$$$$$$$$$$$$$$++++++++++++++--------------------------------////////////////////////////////////////////////////////////////11111111RRRRRRRR+++++++++++JJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJ+PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++C$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$CC+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++Output R1 Output R2 Output R3 Output R4 Output R5 Output R6 Output R7 Output R8 Output R9 Output R10 Output R11 Output R12 Output R13 Output R14 Output R15 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Input L1 Input L2 Input L3 Input L4 Input L5 Input L6 Input L7 Input L8 Input L9 Input L10 Input L11 Input L12 Input L13 Input L14 Input L15 Input L16 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused LED 1 LED 2 LED 3 LED 4 LED 5 LED 6 LED 7 LED 8 Relay Cond 1 Relay Cond 2 Relay Cond 3 Relay Cond 4 Relay Cond 5 Relay Cond 6 Relay Cond 7 Relay Cond 8 Relay Cond 9 Relay Cond 10 Relay Cond 11 Relay Cond 12 Relay Cond 13 Relay Cond 14 Relay Cond 15 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused LED Cond IN 1 LED Cond IN 2 LED Cond IN 3 LED Cond IN 4 LED Cond IN 5 LED Cond IN 6 LED Cond IN 7 LED Cond IN 8 Timer in 1 Timer in 2 Timer in 3 Timer in 4 Timer in 5 Timer in 6 Timer in 7 Timer in 8 Timer in 9 Timer in 10 Timer in 11 Timer in 12 Timer in 13 Timer in 14 Timer in 15 Timer in 16 Timer out 1 Timer out 2 Timer out 3 Timer out 4 Timer out 5 Timer out 6 Timer out 7 Timer out 8 Timer out 9 Timer out 10 Timer out 11 Timer out 12 Timer out 13 Timer out 14 Timer out 15 Timer out 16 Fault REC TRIG SG-opto Invalid Prot'n Disabled F out of Range VT Fail Alarm CT Fail Alarm CB Fail Alarm I^ Maint Alarm I^ Lockout AlarmCB Ops Maint CB Ops Lockout CB Op Time MaintCB Op Time Lock Fault Freq Lock CB Status Alarm Man CB Trip FailMan CB Cls Fail Man CB UnhealthyUnused AR Lockout AR CB Unhealthy AR No Sys Check Unused UV Block SR User Alarm 1 SR User Alarm 2 SR User Alarm 3 SR User Alarm 4 SR User Alarm 5 SR User Alarm 6 SR User Alarm 7 SR User Alarm 8 SR User Alarm 9 SR User Alarm 10SR User Alarm 11SR User Alarm 12SR User Alarm 13SR User Alarm 14SR User Alarm 15SR User Alarm 16SR User Alarm 17MR User Alarm 18MR User Alarm 19MR User Alarm 20MR User Alarm 21MR User Alarm 22MR User Alarm 23MR User Alarm 24MR User Alarm 25MR User Alarm 26MR User Alarm 27MR User Alarm 28MR User Alarm 29MR User Alarm 30MR User Alarm 31MR User Alarm 32MR User Alarm 33MR User Alarm 34MR User Alarm 35I>1 Timer Block I>2 Timer Block I>3 Timer Block I>4 Timer Block Unused IN1>1 Timer Blk IN1>2 Timer Blk IN1>3 Timer Blk IN1>4 Timer Blk IN2>1 Timer Blk IN2>2 Timer Blk IN2>3 Timer Blk IN2>4 Timer Blk ISEF>1 Timer BlkISEF>2 Timer BlkISEF>3 Timer BlkISEF>4 Timer BlkVN>1 Timer Blk VN>2 Timer Blk V<1 Timer Block V<2 Timer Block V>1 Timer Block V>2 Timer Block CLP Initiate Ext. Trip 3ph CB Aux 3ph(52-A)CB Aux 3ph(52-B)CB Healthy MCB/VTS Init Trip CB Init Close CB Reset Close Dly Reset Relays/LEDReset Thermal Reset Lockout Reset CB Data Block AR Live Line Mode Auto Mode Telecontrol ModeI>1 Trip I>1 Trip A I>1 Trip B I>1 Trip C I>2 Trip I>2 Trip A I>2 Trip B I>2 Trip C I>3 Trip I>3 Trip A I>3 Trip B I>3 Trip C I>4 Trip I>4 Trip A I>4 Trip B I>4 Trip C Unused Broken Line TripIN1>1 Trip IN1>2 Trip IN1>3 Trip IN1>4 Trip IN2>1 Trip IN2>2 Trip IN2>3 Trip IN2>4 Trip ISEF>1 Trip ISEF>2 Trip ISEF>3 Trip ISEF>4 Trip IREF> Trip VN>1 Trip VN>2 Trip Thermal Trip V2> Trip V<1 Trip V<1 Trip A/AB V<1 Trip B/BC V<1 Trip C/CA V<2 Trip V<2 Trip A/AB V<2 Trip B/BC V<2 Trip C/CA V>1 Trip V>1 Trip A/AB V>1 Trip B/BC V>1 Trip C/CA V>2 Trip V>2 Trip A/AB V>2 Trip B/BC V>2 Trip C/CA Any Start I>1 Start I>1 Start A I>1 Start B I>1 Start C I>2 Start I>2 Start A I>2 Start B I>2 Start C I>3 Start I>3 Start A I>3 Start B I>3 Start C I>4 Start I>4 Start A I>4 Start B I>4 Start C VCO Start AB VCO Start BC VCO Start CA Unused IN1>1 Start IN1>2 Start IN1>3 Start IN1>4 Start IN2>1 Start IN2>2 Start IN2>3 Start IN2>4 Start ISEF>1 Start ISEF>2 Start ISEF>3 Start ISEF>4 Start VN>1 Start VN>2 Start Thermal Alarm V2> Start V<1 Start V<1 Start A/AB V<1 Start B/BC V<1 Start C/CA V<2 Start V<2 Start A/AB V<2 Start B/BC V<2 Start C/CA V>1 Start V>1 Start A/AB V>1 Start B/BC V>1 Start C/CA V>2 Start V>2 Start A/AB V>2 Start B/BC V>2 Start C/CA CLP Operation I> BlockStart IN/SEF>Blk StartVTS Fast Block VTS Slow Block CTS Block Bfail1 Trip 3ph Bfail2 Trip 3ph Control Trip Control Close Close in Prog Block Main Prot Block SEF Prot AR In Progress AR In Service Seq Counter = 0 Seq Counter = 1 Seq Counter = 2 Seq Counter = 3 Seq Counter = 4 Successful CloseDead T in Prog Protection LocktReset Lckout AlmAuto Close AR Trip Test IA< Start IB< Start IC< Start IN< Start ISEF< Start CB Open 3 ph CB Closed 3 ph All Poles Dead Any Pole Dead Pole Dead A Pole Dead B Pole Dead C VTS Acc Ind VTS Volt Dep VTS IA> VTS IB> VTS IC> VTS VA> VTS VB> VTS VC> VTS I2> VTS V2> VTS IA delta> VTS IB delta> VTS IC delta> CBF SEF Trip CBF Non I Trip CBF SEF Trip-1 CBF Non I Trip-1Unused AR Sys Checks OKLockout Alarm Pre-Lockout Freq High Freq Low Stop Freq Track Start N Field volts failFreq Not Found F<1 Timer Block F<2 Timer Block F<3 Timer Block F<4 Timer Block F>1 Timer Block F>2 Timer Block F<1 Start F<2 Start F<3 Start F<4 Start F>1 Start F>2 Start F<1 Trip F<2 Trip F<3 Trip F<4 Trip F>1 Trip F>2 Trip YN> Timer Block GN> Timer Block BN> Timer Block YN> Start GN> Start BN> Start YN> Trip GN> Trip BN> Trip Ext AR Prot TripExt AR Prot StrtTest Mode Inhibit SEF Live Line Dead Line Unused Unused Unused Unused Unused Unused Unused Unused DAR Complete CB in Service AR Restart AR In Progress 1DeadTime EnabledDT OK To Start DT Complete Reclose Checks Circuits OK Unused AR SysChecks OK AR Init TripTest103 MonitorBlock103 CommandBlockISEF>1 Start 2 ISEF>2 Start 2 ISEF>3 Start 2 ISEF>4 Start 2 Unused Unused Unused Unused Time Synch Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused External Trip A External Trip B External Trip C External Trip EFExternal TripSEFI2> Inhibit I2>1 Tmr Blk I2>2 Tmr Blk I2>3 Tmr Blk I2>4 Tmr Blk I2>1 Start I2>2 Start I2>3 Start I2>4 Start I2>1 Trip I2>2 Trip I2>3 Trip I2>4 Trip V2> Accelerate Trip LED TriggerUnused Unused Unused Unused Unused Unused Blk Rmt. CB Ops SG Select x1 SG Select 1x IN1> Inhibit IN2> Inhibit AR Skip Shot 1 Logic 0 Ref. Inh Reclaim TimeReclaim In Prog Reclaim CompleteBrokenLine StartTrip Command In Trip Command OutIA2H Start IB2H Start IC2H Start I2H Any Start RP1 Read Only RP2 Read Only NIC Read Only Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Power>1 3PhStartPower>1 A Start Power>1 B Start Power>1 C Start Power>2 3PhStartPower>2 A Start Power>2 B Start Power>2 C Start Power<1 3PhStartPower<1 A Start Power<1 B Start Power<1 C Start Power<2 3PhStartPower<2 A Start Power<2 B Start Power<2 C Start Power>1 3Ph TripPower>1 A Trip Power>1 B Trip Power>1 C Trip Power>2 3Ph TripPower>2 A Trip Power>2 B Trip Power>2 C Trip Power<1 3Ph TripPower<1 A Trip Power<1 B Trip Power<1 C Trip Power<2 3Ph TripPower<2 A Trip Power<2 B Trip Power<2 C Trip Power>1 Block Power>2 Block Power<1 Block Power<2 Block SensP1 Start A SensP2 Start A SensP1 Trip A SensP2 Trip A Unused Unused Unused Unused Unused Unused Unused Battery Fail Rear Comm 2 FailGOOSE IED AbsentNIC Not Fitted NIC No Response NIC Fatal Error NIC Soft. ReloadBad TCP/IP Cfg. Bad OSI Config. NIC Link Fail NIC SW Mis-MatchIP Addr ConflictIM Loopback IM Msg Fail IM DCD Fail IM Chan Fail Backup Setting Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Control Input 1 Control Input 2 Control Input 3 Control Input 4 Control Input 5 Control Input 6 Control Input 7 Control Input 8 Control Input 9 Control Input 10Control Input 11Control Input 12Control Input 13Control Input 14Control Input 15Control Input 16Control Input 17Control Input 18Control Input 19Control Input 20Control Input 21Control Input 22Control Input 23Control Input 24Control Input 25Control Input 26Control Input 27Control Input 28Control Input 29Control Input 30Control Input 31Control Input 32Virtual Input 1 Virtual Input 2 Virtual Input 3 Virtual Input 4 Virtual Input 5 Virtual Input 6 Virtual Input 7 Virtual Input 8 Virtual Input 9 Virtual Input 10Virtual Input 11Virtual Input 12Virtual Input 13Virtual Input 14Virtual Input 15Virtual Input 16Virtual Input 17Virtual Input 18Virtual Input 19Virtual Input 20Virtual Input 21Virtual Input 22Virtual Input 23Virtual Input 24Virtual Input 25Virtual Input 26Virtual Input 27Virtual Input 28Virtual Input 29Virtual Input 30Virtual Input 31Virtual Input 32Virtual Input 33Virtual Input 34Virtual Input 35Virtual Input 36Virtual Input 37Virtual Input 38Virtual Input 39Virtual Input 40Virtual Input 41Virtual Input 42Virtual Input 43Virtual Input 44Virtual Input 45Virtual Input 46Virtual Input 47Virtual Input 48Virtual Input 49Virtual Input 50Virtual Input 51Virtual Input 52Virtual Input 53Virtual Input 54Virtual Input 55Virtual Input 56Virtual Input 57Virtual Input 58Virtual Input 59Virtual Input 60Virtual Input 61Virtual Input 62Virtual Input 63Virtual Input 64InterMiCOM in 1 InterMiCOM in 2 InterMiCOM in 3 InterMiCOM in 4 InterMiCOM in 5 InterMiCOM in 6 InterMiCOM in 7 InterMiCOM in 8 InterMiCOM out 1InterMiCOM out 2InterMiCOM out 3InterMiCOM out 4InterMiCOM out 5InterMiCOM out 6InterMiCOM out 7InterMiCOM out 8Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused PSL Internal 001PSL Internal 002PSL Internal 003PSL Internal 004PSL Internal 005PSL Internal 006PSL Internal 007PSL Internal 008PSL Internal 009PSL Internal 010PSL Internal 011PSL Internal 012PSL Internal 013PSL Internal 014PSL Internal 015PSL Internal 016PSL Internal 017PSL Internal 018PSL Internal 019PSL Internal 020PSL Internal 021PSL Internal 022PSL Internal 023PSL Internal 024PSL Internal 025PSL Internal 026PSL Internal 027PSL Internal 028PSL Internal 029PSL Internal 030PSL Internal 031PSL Internal 032PSL Internal 033PSL Internal 034PSL Internal 035PSL Internal 036PSL Internal 037PSL Internal 038PSL Internal 039PSL Internal 040PSL Internal 041PSL Internal 042PSL Internal 043PSL Internal 044PSL Internal 045PSL Internal 046PSL Internal 047PSL Internal 048PSL Internal 049PSL Internal 050PSL Internal 051PSL Internal 052PSL Internal 053PSL Internal 054PSL Internal 055PSL Internal 056PSL Internal 057PSL Internal 058PSL Internal 059PSL Internal 060PSL Internal 061PSL Internal 062PSL Internal 063PSL Internal 064PSL Internal 065PSL Internal 066PSL Internal 067PSL Internal 068PSL Internal 069PSL Internal 070PSL Internal 071PSL Internal 072PSL Internal 073PSL Internal 074PSL Internal 075PSL Internal 076PSL Internal 077PSL Internal 078PSL Internal 079PSL Internal 080PSL Internal 081PSL Internal 082PSL Internal 083PSL Internal 084PSL Internal 085PSL Internal 086PSL Internal 087PSL Internal 088PSL Internal 089PSL Internal 090PSL Internal 091PSL Internal 092PSL Internal 093PSL Internal 094PSL Internal 095PSL Internal 096PSL Internal 097PSL Internal 098PSL Internal 099PSL Internal 100Unused Virtual Output 1Virtual Output 2Virtual Output 3Virtual Output 4Virtual Output 5Virtual Output 6Virtual Output 7Virtual Output 8Virtual Output 9Virtual Output10Virtual Output11Virtual Output12Virtual Output13Virtual Output14Virtual Output15Virtual Output16Virtual Output17Virtual Output18Virtual Output19Virtual Output20Virtual Output21Virtual Output22Virtual Output23Virtual Output24Virtual Output25Virtual Output26Virtual Output27Virtual Output28Virtual Output29Virtual Output30Virtual Output31Virtual Output32Quality VIP 1 Quality VIP 2 Quality VIP 3 Quality VIP 4 Quality VIP 5 Quality VIP 6 Quality VIP 7 Quality VIP 8 Quality VIP 9 Quality VIP 10 Quality VIP 11 Quality VIP 12 Quality VIP 13 Quality VIP 14 Quality VIP 15 Quality VIP 16 Quality VIP 17 Quality VIP 18 Quality VIP 19 Quality VIP 20 Quality VIP 21 Quality VIP 22 Quality VIP 23 Quality VIP 24 Quality VIP 25 Quality VIP 26 Quality VIP 27 Quality VIP 28 Quality VIP 29 Quality VIP 30 Quality VIP 31 Quality VIP 32 Quality VIP 33 Quality VIP 34 Quality VIP 35 Quality VIP 36 Quality VIP 37 Quality VIP 38 Quality VIP 39 Quality VIP 40 Quality VIP 41 Quality VIP 42 Quality VIP 43 Quality VIP 44 Quality VIP 45 Quality VIP 46 Quality VIP 47 Quality VIP 48 Quality VIP 49 Quality VIP 50 Quality VIP 51 Quality VIP 52 Quality VIP 53 Quality VIP 54 Quality VIP 55 Quality VIP 56 Quality VIP 57 Quality VIP 58 Quality VIP 59 Quality VIP 60 Quality VIP 61 Quality VIP 62 Quality VIP 63 Quality VIP 64 PubPres VIP 1 PubPres VIP 2 PubPres VIP 3 PubPres VIP 4 PubPres VIP 5 PubPres VIP 6 PubPres VIP 7 PubPres VIP 8 PubPres VIP 9 PubPres VIP 10 PubPres VIP 11 PubPres VIP 12 PubPres VIP 13 PubPres VIP 14 PubPres VIP 15 PubPres VIP 16 PubPres VIP 17 PubPres VIP 18 PubPres VIP 19 PubPres VIP 20 PubPres VIP 21 PubPres VIP 22 PubPres VIP 23 PubPres VIP 24 PubPres VIP 25 PubPres VIP 26 PubPres VIP 27 PubPres VIP 28 PubPres VIP 29 PubPres VIP 30 PubPres VIP 31 PubPres VIP 32 PubPres VIP 33 PubPres VIP 34 PubPres VIP 35 PubPres VIP 36 PubPres VIP 37 PubPres VIP 38 PubPres VIP 39 PubPres VIP 40 PubPres VIP 41 PubPres VIP 42 PubPres VIP 43 PubPres VIP 44 PubPres VIP 45 PubPres VIP 46 PubPres VIP 47 PubPres VIP 48 PubPres VIP 49 PubPres VIP 50 PubPres VIP 51 PubPres VIP 52 PubPres VIP 53 PubPres VIP 54 PubPres VIP 55 PubPres VIP 56 PubPres VIP 57 PubPres VIP 58 PubPres VIP 59 PubPres VIP 60 PubPres VIP 61 PubPres VIP 62 PubPres VIP 63 PubPres VIP 64 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Adv Freq Inh Stg1 f+t Sta Stg1 f+t Trp Stg1 f+df/dt TrpStg1 df/dt+t StaStg1 df/dt+t TrpStg1 f+Df/Dt StaStg1 f+Df/Dt TrpStg1 Block Unused Unused Stg1 Restore ClsStg1 Restore StaUnused Unused Stg2 f+t Sta Stg2 f+t Trp Stg2 f+df/dt TrpStg2 df/dt+t StaStg2 df/dt+t TrpStg2 f+Df/Dt StaStg2 f+Df/Dt TrpStg2 Block Unused Unused Stg2 Restore ClsStg2 Restore StaUnused Unused Stg3 f+t Sta Stg3 f+t Trp Stg3 f+df/dt TrpStg3 df/dt+t StaStg3 df/dt+t TrpStg3 f+Df/Dt StaStg3 f+Df/Dt TrpStg3 Block Unused Unused Stg3 Restore ClsStg3 Restore StaUnused Unused Stg4 f+t Sta Stg4 f+t Trp Stg4 f+df/dt TrpStg4 df/dt+t StaStg4 df/dt+t TrpStg4 f+Df/Dt StaStg4 f+Df/Dt TrpStg4 Block Unused Unused Stg4 Restore ClsStg4 Restore StaUnused Unused Stg5 f+t Sta Stg5 f+t Trp Stg5 f+df/dt TrpStg5 df/dt+t StaStg5 df/dt+t TrpStg5 f+Df/Dt StaStg5 f+Df/Dt TrpStg5 Block Unused Unused Stg5 Restore ClsStg5 Restore StaUnused Unused Stg6 f+t Sta Stg6 f+t Trp Stg6 f+df/dt TrpStg6 df/dt+t StaStg6 df/dt+t TrpStg6 f+Df/Dt StaStg6 f+Df/Dt TrpStg6 Block Unused Unused Stg6 Restore ClsStg6 Restore StaUnused Unused Stg7 f+t Sta Stg7 f+t Trp Stg7 f+df/dt TrpStg7 df/dt+t StaStg7 df/dt+t TrpStg7 f+Df/Dt StaStg7 f+Df/Dt TrpStg7 Block Unused Unused Stg7 Restore ClsStg7 Restore StaUnused Unused Stg8 f+t Sta Stg8 f+t Trp Stg8 f+df/dt TrpStg8 df/dt+t StaStg8 df/dt+t TrpStg8 f+Df/Dt StaStg8 f+Df/Dt TrpStg8 Block Unused Unused Stg8 Restore ClsStg8 Restore StaUnused Unused Stg9 f+t Sta Stg9 f+t Trp Stg9 f+df/dt TrpStg9 df/dt+t StaStg9 df/dt+t TrpStg9 f+Df/Dt StaStg9 f+Df/Dt TrpStg9 Block Unused Unused Stg9 Restore ClsStg9 Restore StaRestore Reset Reset Stats Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused ÿÿ CODStringProperty T ÿþÿ € ^ ÿþÿ ÿÿ CODBoolProperty | ÿÿ CODEditProperties 2 € ( ÿÿ CODDWordProperty r ÿÿÿ € h € r ÿÿÿ ”
) w i n s p o o l \ \ g b s r d 0 1 p s 0 4 \ g b s r d 0 1 p 7 2 2 6 4 I P _ 1 0 . 3 2 . 1 2 9 . 1 2 9 Ü ¸\ \ g b s r d 0 1 p s 0 4 \ g b s r d 0 1 p 7 2 2 6 4 Ü ¸Sÿ€ š4d X X A 4 PRIVâ0 ''' ' (ü ¼ P4 (ˆ þr”
ÿ ÿ ( SMTJ X e r o x W o r k C e n t r e 7 6 6 5 r e v 2 P S InputSlot *UseFormTrayTable PageSize A4 PageRegion LeadingEdge Resolution 600x600dpi Duplex DuplexNoTumble Collate True StapleLocation None XrxInputSlot True Rotation True ¼ 9XRX MOCX ^ l „ 4 š x l ³ e wœ ‘ i j o p q r ’ “ ” ȶ ÉØ ñ ò ß à û f — ¡ ˜c ¢c ™ £ – Ño Òê
Û áo âê
ã ê´ ëö Ö8 ×z º¼ »þ Î Ð Í Ï ƒ † ‡
‰ Œ Š ‹ Ê z@ | } Ë X YZ / Z [ \ ] % ( & ' ! 3 0 0 1 3 1 2 ú MSCF à , 90 R T P9e //Uncompressed-Data// yòói TCKãbb``ìsdHaÈeÈdÈâb††"†D ™¤GÁH ŒŒŒm6ö¹9
e©EÅ™ùy¶J†zJ
©yÉù)™yé¶J¡!nºJöv¼\6Å™i
@ÅV9‰ ©Ô<ÝÐ`%Œ>XÊ ÿÿÿÿ TCOM5 > > > > > > b R t ÿÿÿ
"""
oneconnection = """
A P P : C o u r i e r
T Y P E : S c h e m e L o g i c E d i t o r
F O R M A T : 1 . 0
V E R S I O N : 4 . 0 0
D O M A I N : 0 0 S e t t i n g s
S U B D O M A I N : 0 P S L S e t t i n g G r p 1
M O D E L : P 1 4 2 1 1 7 B 4 M 0 4 3 0 J
R E F E R E N C E :
D D B D E S C R I P T I O N F I L E :
F i l e c r e a t e d f r o m t e m p l a t e o n T h u r s d a y , D e c e m b e r 2 0 , 2 0 1 8 Â X @ +++++++++++++++++!!!!!!!!!!!!!!!!++++++++++++++++""""""""EEEEEEEEEEEEEEE+++++++++++++++++FFFFFFFFGGGGGGGGGGGGGGGG((((((((((((((((I$$$$$$$$$$$$$$$$$+$$$+$CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC+CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC$$$$$$$$$$$$$$$$+$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$+$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$,,,,,,,,,,,,,,,,,+C$,$$,,$$CCCCCC$$$$$$$$$$$$CCC$$$$$$CCCC$$++++++++CCC$CC$$C+$CCC$$$$++++C+++++++++++++++++++++++CCCCCCCCCC$$$$$$$$CC++++++CCCCCC$C$$$C$$$$$CCC+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$CCCC$$$$+++++++$$$$$$$$$$$$$$$$$++++++++++++++--------------------------------////////////////////////////////////////////////////////////////11111111RRRRRRRR+++++++++++JJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJ+PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++C$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$CC+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++Output R1 Output R2 Output R3 Output R4 Output R5 Output R6 Output R7 Output R8 Output R9 Output R10 Output R11 Output R12 Output R13 Output R14 Output R15 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Input L1 Input L2 Input L3 Input L4 Input L5 Input L6 Input L7 Input L8 Input L9 Input L10 Input L11 Input L12 Input L13 Input L14 Input L15 Input L16 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused LED 1 LED 2 LED 3 LED 4 LED 5 LED 6 LED 7 LED 8 Relay Cond 1 Relay Cond 2 Relay Cond 3 Relay Cond 4 Relay Cond 5 Relay Cond 6 Relay Cond 7 Relay Cond 8 Relay Cond 9 Relay Cond 10 Relay Cond 11 Relay Cond 12 Relay Cond 13 Relay Cond 14 Relay Cond 15 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused LED Cond IN 1 LED Cond IN 2 LED Cond IN 3 LED Cond IN 4 LED Cond IN 5 LED Cond IN 6 LED Cond IN 7 LED Cond IN 8 Timer in 1 Timer in 2 Timer in 3 Timer in 4 Timer in 5 Timer in 6 Timer in 7 Timer in 8 Timer in 9 Timer in 10 Timer in 11 Timer in 12 Timer in 13 Timer in 14 Timer in 15 Timer in 16 Timer out 1 Timer out 2 Timer out 3 Timer out 4 Timer out 5 Timer out 6 Timer out 7 Timer out 8 Timer out 9 Timer out 10 Timer out 11 Timer out 12 Timer out 13 Timer out 14 Timer out 15 Timer out 16 Fault REC TRIG SG-opto Invalid Prot'n Disabled F out of Range VT Fail Alarm CT Fail Alarm CB Fail Alarm I^ Maint Alarm I^ Lockout AlarmCB Ops Maint CB Ops Lockout CB Op Time MaintCB Op Time Lock Fault Freq Lock CB Status Alarm Man CB Trip FailMan CB Cls Fail Man CB UnhealthyUnused AR Lockout AR CB Unhealthy AR No Sys Check Unused UV Block SR User Alarm 1 SR User Alarm 2 SR User Alarm 3 SR User Alarm 4 SR User Alarm 5 SR User Alarm 6 SR User Alarm 7 SR User Alarm 8 SR User Alarm 9 SR User Alarm 10SR User Alarm 11SR User Alarm 12SR User Alarm 13SR User Alarm 14SR User Alarm 15SR User Alarm 16SR User Alarm 17MR User Alarm 18MR User Alarm 19MR User Alarm 20MR User Alarm 21MR User Alarm 22MR User Alarm 23MR User Alarm 24MR User Alarm 25MR User Alarm 26MR User Alarm 27MR User Alarm 28MR User Alarm 29MR User Alarm 30MR User Alarm 31MR User Alarm 32MR User Alarm 33MR User Alarm 34MR User Alarm 35I>1 Timer Block I>2 Timer Block I>3 Timer Block I>4 Timer Block Unused IN1>1 Timer Blk IN1>2 Timer Blk IN1>3 Timer Blk IN1>4 Timer Blk IN2>1 Timer Blk IN2>2 Timer Blk IN2>3 Timer Blk IN2>4 Timer Blk ISEF>1 Timer BlkISEF>2 Timer BlkISEF>3 Timer BlkISEF>4 Timer BlkVN>1 Timer Blk VN>2 Timer Blk V<1 Timer Block V<2 Timer Block V>1 Timer Block V>2 Timer Block CLP Initiate Ext. Trip 3ph CB Aux 3ph(52-A)CB Aux 3ph(52-B)CB Healthy MCB/VTS Init Trip CB Init Close CB Reset Close Dly Reset Relays/LEDReset Thermal Reset Lockout Reset CB Data Block AR Live Line Mode Auto Mode Telecontrol ModeI>1 Trip I>1 Trip A I>1 Trip B I>1 Trip C I>2 Trip I>2 Trip A I>2 Trip B I>2 Trip C I>3 Trip I>3 Trip A I>3 Trip B I>3 Trip C I>4 Trip I>4 Trip A I>4 Trip B I>4 Trip C Unused Broken Line TripIN1>1 Trip IN1>2 Trip IN1>3 Trip IN1>4 Trip IN2>1 Trip IN2>2 Trip IN2>3 Trip IN2>4 Trip ISEF>1 Trip ISEF>2 Trip ISEF>3 Trip ISEF>4 Trip IREF> Trip VN>1 Trip VN>2 Trip Thermal Trip V2> Trip V<1 Trip V<1 Trip A/AB V<1 Trip B/BC V<1 Trip C/CA V<2 Trip V<2 Trip A/AB V<2 Trip B/BC V<2 Trip C/CA V>1 Trip V>1 Trip A/AB V>1 Trip B/BC V>1 Trip C/CA V>2 Trip V>2 Trip A/AB V>2 Trip B/BC V>2 Trip C/CA Any Start I>1 Start I>1 Start A I>1 Start B I>1 Start C I>2 Start I>2 Start A I>2 Start B I>2 Start C I>3 Start I>3 Start A I>3 Start B I>3 Start C I>4 Start I>4 Start A I>4 Start B I>4 Start C VCO Start AB VCO Start BC VCO Start CA Unused IN1>1 Start IN1>2 Start IN1>3 Start IN1>4 Start IN2>1 Start IN2>2 Start IN2>3 Start IN2>4 Start ISEF>1 Start ISEF>2 Start ISEF>3 Start ISEF>4 Start VN>1 Start VN>2 Start Thermal Alarm V2> Start V<1 Start V<1 Start A/AB V<1 Start B/BC V<1 Start C/CA V<2 Start V<2 Start A/AB V<2 Start B/BC V<2 Start C/CA V>1 Start V>1 Start A/AB V>1 Start B/BC V>1 Start C/CA V>2 Start V>2 Start A/AB V>2 Start B/BC V>2 Start C/CA CLP Operation I> BlockStart IN/SEF>Blk StartVTS Fast Block VTS Slow Block CTS Block Bfail1 Trip 3ph Bfail2 Trip 3ph Control Trip Control Close Close in Prog Block Main Prot Block SEF Prot AR In Progress AR In Service Seq Counter = 0 Seq Counter = 1 Seq Counter = 2 Seq Counter = 3 Seq Counter = 4 Successful CloseDead T in Prog Protection LocktReset Lckout AlmAuto Close AR Trip Test IA< Start IB< Start IC< Start IN< Start ISEF< Start CB Open 3 ph CB Closed 3 ph All Poles Dead Any Pole Dead Pole Dead A Pole Dead B Pole Dead C VTS Acc Ind VTS Volt Dep VTS IA> VTS IB> VTS IC> VTS VA> VTS VB> VTS VC> VTS I2> VTS V2> VTS IA delta> VTS IB delta> VTS IC delta> CBF SEF Trip CBF Non I Trip CBF SEF Trip-1 CBF Non I Trip-1Unused AR Sys Checks OKLockout Alarm Pre-Lockout Freq High Freq Low Stop Freq Track Start N Field volts failFreq Not Found F<1 Timer Block F<2 Timer Block F<3 Timer Block F<4 Timer Block F>1 Timer Block F>2 Timer Block F<1 Start F<2 Start F<3 Start F<4 Start F>1 Start F>2 Start F<1 Trip F<2 Trip F<3 Trip F<4 Trip F>1 Trip F>2 Trip YN> Timer Block GN> Timer Block BN> Timer Block YN> Start GN> Start BN> Start YN> Trip GN> Trip BN> Trip Ext AR Prot TripExt AR Prot StrtTest Mode Inhibit SEF Live Line Dead Line Unused Unused Unused Unused Unused Unused Unused Unused DAR Complete CB in Service AR Restart AR In Progress 1DeadTime EnabledDT OK To Start DT Complete Reclose Checks Circuits OK Unused AR SysChecks OK AR Init TripTest103 MonitorBlock103 CommandBlockISEF>1 Start 2 ISEF>2 Start 2 ISEF>3 Start 2 ISEF>4 Start 2 Unused Unused Unused Unused Time Synch Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused External Trip A External Trip B External Trip C External Trip EFExternal TripSEFI2> Inhibit I2>1 Tmr Blk I2>2 Tmr Blk I2>3 Tmr Blk I2>4 Tmr Blk I2>1 Start I2>2 Start I2>3 Start I2>4 Start I2>1 Trip I2>2 Trip I2>3 Trip I2>4 Trip V2> Accelerate Trip LED TriggerUnused Unused Unused Unused Unused Unused Blk Rmt. CB Ops SG Select x1 SG Select 1x IN1> Inhibit IN2> Inhibit AR Skip Shot 1 Logic 0 Ref. Inh Reclaim TimeReclaim In Prog Reclaim CompleteBrokenLine StartTrip Command In Trip Command OutIA2H Start IB2H Start IC2H Start I2H Any Start RP1 Read Only RP2 Read Only NIC Read Only Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Power>1 3PhStartPower>1 A Start Power>1 B Start Power>1 C Start Power>2 3PhStartPower>2 A Start Power>2 B Start Power>2 C Start Power<1 3PhStartPower<1 A Start Power<1 B Start Power<1 C Start Power<2 3PhStartPower<2 A Start Power<2 B Start Power<2 C Start Power>1 3Ph TripPower>1 A Trip Power>1 B Trip Power>1 C Trip Power>2 3Ph TripPower>2 A Trip Power>2 B Trip Power>2 C Trip Power<1 3Ph TripPower<1 A Trip Power<1 B Trip Power<1 C Trip Power<2 3Ph TripPower<2 A Trip Power<2 B Trip Power<2 C Trip Power>1 Block Power>2 Block Power<1 Block Power<2 Block SensP1 Start A SensP2 Start A SensP1 Trip A SensP2 Trip A Unused Unused Unused Unused Unused Unused Unused Battery Fail Rear Comm 2 FailGOOSE IED AbsentNIC Not Fitted NIC No Response NIC Fatal Error NIC Soft. ReloadBad TCP/IP Cfg. Bad OSI Config. NIC Link Fail NIC SW Mis-MatchIP Addr ConflictIM Loopback IM Msg Fail IM DCD Fail IM Chan Fail Backup Setting Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Control Input 1 Control Input 2 Control Input 3 Control Input 4 Control Input 5 Control Input 6 Control Input 7 Control Input 8 Control Input 9 Control Input 10Control Input 11Control Input 12Control Input 13Control Input 14Control Input 15Control Input 16Control Input 17Control Input 18Control Input 19Control Input 20Control Input 21Control Input 22Control Input 23Control Input 24Control Input 25Control Input 26Control Input 27Control Input 28Control Input 29Control Input 30Control Input 31Control Input 32Virtual Input 1 Virtual Input 2 Virtual Input 3 Virtual Input 4 Virtual Input 5 Virtual Input 6 Virtual Input 7 Virtual Input 8 Virtual Input 9 Virtual Input 10Virtual Input 11Virtual Input 12Virtual Input 13Virtual Input 14Virtual Input 15Virtual Input 16Virtual Input 17Virtual Input 18Virtual Input 19Virtual Input 20Virtual Input 21Virtual Input 22Virtual Input 23Virtual Input 24Virtual Input 25Virtual Input 26Virtual Input 27Virtual Input 28Virtual Input 29Virtual Input 30Virtual Input 31Virtual Input 32Virtual Input 33Virtual Input 34Virtual Input 35Virtual Input 36Virtual Input 37Virtual Input 38Virtual Input 39Virtual Input 40Virtual Input 41Virtual Input 42Virtual Input 43Virtual Input 44Virtual Input 45Virtual Input 46Virtual Input 47Virtual Input 48Virtual Input 49Virtual Input 50Virtual Input 51Virtual Input 52Virtual Input 53Virtual Input 54Virtual Input 55Virtual Input 56Virtual Input 57Virtual Input 58Virtual Input 59Virtual Input 60Virtual Input 61Virtual Input 62Virtual Input 63Virtual Input 64InterMiCOM in 1 InterMiCOM in 2 InterMiCOM in 3 InterMiCOM in 4 InterMiCOM in 5 InterMiCOM in 6 InterMiCOM in 7 InterMiCOM in 8 InterMiCOM out 1InterMiCOM out 2InterMiCOM out 3InterMiCOM out 4InterMiCOM out 5InterMiCOM out 6InterMiCOM out 7InterMiCOM out 8Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused PSL Internal 001PSL Internal 002PSL Internal 003PSL Internal 004PSL Internal 005PSL Internal 006PSL Internal 007PSL Internal 008PSL Internal 009PSL Internal 010PSL Internal 011PSL Internal 012PSL Internal 013PSL Internal 014PSL Internal 015PSL Internal 016PSL Internal 017PSL Internal 018PSL Internal 019PSL Internal 020PSL Internal 021PSL Internal 022PSL Internal 023PSL Internal 024PSL Internal 025PSL Internal 026PSL Internal 027PSL Internal 028PSL Internal 029PSL Internal 030PSL Internal 031PSL Internal 032PSL Internal 033PSL Internal 034PSL Internal 035PSL Internal 036PSL Internal 037PSL Internal 038PSL Internal 039PSL Internal 040PSL Internal 041PSL Internal 042PSL Internal 043PSL Internal 044PSL Internal 045PSL Internal 046PSL Internal 047PSL Internal 048PSL Internal 049PSL Internal 050PSL Internal 051PSL Internal 052PSL Internal 053PSL Internal 054PSL Internal 055PSL Internal 056PSL Internal 057PSL Internal 058PSL Internal 059PSL Internal 060PSL Internal 061PSL Internal 062PSL Internal 063PSL Internal 064PSL Internal 065PSL Internal 066PSL Internal 067PSL Internal 068PSL Internal 069PSL Internal 070PSL Internal 071PSL Internal 072PSL Internal 073PSL Internal 074PSL Internal 075PSL Internal 076PSL Internal 077PSL Internal 078PSL Internal 079PSL Internal 080PSL Internal 081PSL Internal 082PSL Internal 083PSL Internal 084PSL Internal 085PSL Internal 086PSL Internal 087PSL Internal 088PSL Internal 089PSL Internal 090PSL Internal 091PSL Internal 092PSL Internal 093PSL Internal 094PSL Internal 095PSL Internal 096PSL Internal 097PSL Internal 098PSL Internal 099PSL Internal 100Unused Virtual Output 1Virtual Output 2Virtual Output 3Virtual Output 4Virtual Output 5Virtual Output 6Virtual Output 7Virtual Output 8Virtual Output 9Virtual Output10Virtual Output11Virtual Output12Virtual Output13Virtual Output14Virtual Output15Virtual Output16Virtual Output17Virtual Output18Virtual Output19Virtual Output20Virtual Output21Virtual Output22Virtual Output23Virtual Output24Virtual Output25Virtual Output26Virtual Output27Virtual Output28Virtual Output29Virtual Output30Virtual Output31Virtual Output32Quality VIP 1 Quality VIP 2 Quality VIP 3 Quality VIP 4 Quality VIP 5 Quality VIP 6 Quality VIP 7 Quality VIP 8 Quality VIP 9 Quality VIP 10 Quality VIP 11 Quality VIP 12 Quality VIP 13 Quality VIP 14 Quality VIP 15 Quality VIP 16 Quality VIP 17 Quality VIP 18 Quality VIP 19 Quality VIP 20 Quality VIP 21 Quality VIP 22 Quality VIP 23 Quality VIP 24 Quality VIP 25 Quality VIP 26 Quality VIP 27 Quality VIP 28 Quality VIP 29 Quality VIP 30 Quality VIP 31 Quality VIP 32 Quality VIP 33 Quality VIP 34 Quality VIP 35 Quality VIP 36 Quality VIP 37 Quality VIP 38 Quality VIP 39 Quality VIP 40 Quality VIP 41 Quality VIP 42 Quality VIP 43 Quality VIP 44 Quality VIP 45 Quality VIP 46 Quality VIP 47 Quality VIP 48 Quality VIP 49 Quality VIP 50 Quality VIP 51 Quality VIP 52 Quality VIP 53 Quality VIP 54 Quality VIP 55 Quality VIP 56 Quality VIP 57 Quality VIP 58 Quality VIP 59 Quality VIP 60 Quality VIP 61 Quality VIP 62 Quality VIP 63 Quality VIP 64 PubPres VIP 1 PubPres VIP 2 PubPres VIP 3 PubPres VIP 4 PubPres VIP 5 PubPres VIP 6 PubPres VIP 7 PubPres VIP 8 PubPres VIP 9 PubPres VIP 10 PubPres VIP 11 PubPres VIP 12 PubPres VIP 13 PubPres VIP 14 PubPres VIP 15 PubPres VIP 16 PubPres VIP 17 PubPres VIP 18 PubPres VIP 19 PubPres VIP 20 PubPres VIP 21 PubPres VIP 22 PubPres VIP 23 PubPres VIP 24 PubPres VIP 25 PubPres VIP 26 PubPres VIP 27 PubPres VIP 28 PubPres VIP 29 PubPres VIP 30 PubPres VIP 31 PubPres VIP 32 PubPres VIP 33 PubPres VIP 34 PubPres VIP 35 PubPres VIP 36 PubPres VIP 37 PubPres VIP 38 PubPres VIP 39 PubPres VIP 40 PubPres VIP 41 PubPres VIP 42 PubPres VIP 43 PubPres VIP 44 PubPres VIP 45 PubPres VIP 46 PubPres VIP 47 PubPres VIP 48 PubPres VIP 49 PubPres VIP 50 PubPres VIP 51 PubPres VIP 52 PubPres VIP 53 PubPres VIP 54 PubPres VIP 55 PubPres VIP 56 PubPres VIP 57 PubPres VIP 58 PubPres VIP 59 PubPres VIP 60 PubPres VIP 61 PubPres VIP 62 PubPres VIP 63 PubPres VIP 64 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Adv Freq Inh Stg1 f+t Sta Stg1 f+t Trp Stg1 f+df/dt TrpStg1 df/dt+t StaStg1 df/dt+t TrpStg1 f+Df/Dt StaStg1 f+Df/Dt TrpStg1 Block Unused Unused Stg1 Restore ClsStg1 Restore StaUnused Unused Stg2 f+t Sta Stg2 f+t Trp Stg2 f+df/dt TrpStg2 df/dt+t StaStg2 df/dt+t TrpStg2 f+Df/Dt StaStg2 f+Df/Dt TrpStg2 Block Unused Unused Stg2 Restore ClsStg2 Restore StaUnused Unused Stg3 f+t Sta Stg3 f+t Trp Stg3 f+df/dt TrpStg3 df/dt+t StaStg3 df/dt+t TrpStg3 f+Df/Dt StaStg3 f+Df/Dt TrpStg3 Block Unused Unused Stg3 Restore ClsStg3 Restore StaUnused Unused Stg4 f+t Sta Stg4 f+t Trp Stg4 f+df/dt TrpStg4 df/dt+t StaStg4 df/dt+t TrpStg4 f+Df/Dt StaStg4 f+Df/Dt TrpStg4 Block Unused Unused Stg4 Restore ClsStg4 Restore StaUnused Unused Stg5 f+t Sta Stg5 f+t Trp Stg5 f+df/dt TrpStg5 df/dt+t StaStg5 df/dt+t TrpStg5 f+Df/Dt StaStg5 f+Df/Dt TrpStg5 Block Unused Unused Stg5 Restore ClsStg5 Restore StaUnused Unused Stg6 f+t Sta Stg6 f+t Trp Stg6 f+df/dt TrpStg6 df/dt+t StaStg6 df/dt+t TrpStg6 f+Df/Dt StaStg6 f+Df/Dt TrpStg6 Block Unused Unused Stg6 Restore ClsStg6 Restore StaUnused Unused Stg7 f+t Sta Stg7 f+t Trp Stg7 f+df/dt TrpStg7 df/dt+t StaStg7 df/dt+t TrpStg7 f+Df/Dt StaStg7 f+Df/Dt TrpStg7 Block Unused Unused Stg7 Restore ClsStg7 Restore StaUnused Unused Stg8 f+t Sta Stg8 f+t Trp Stg8 f+df/dt TrpStg8 df/dt+t StaStg8 df/dt+t TrpStg8 f+Df/Dt StaStg8 f+Df/Dt TrpStg8 Block Unused Unused Stg8 Restore ClsStg8 Restore StaUnused Unused Stg9 f+t Sta Stg9 f+t Trp Stg9 f+df/dt TrpStg9 df/dt+t StaStg9 df/dt+t TrpStg9 f+Df/Dt StaStg9 f+Df/Dt TrpStg9 Block Unused Unused Stg9 Restore ClsStg9 Restore StaRestore Reset Reset Stats Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused ÿÿ CODStringProperty T ÿþÿ € ^ ÿþÿ ÿÿ CODBoolProperty | ÿÿ CODEditProperties 2 € ( ÿÿ CODDWordProperty r ÿÿÿ € h € r ÿÿÿ ÿÿ CODSymbolComponentÿÿÿÿ ÿÿ
CODCirclePort ˜A‰™ ! Ÿ ' ÿÿ CODTransform €? €? C @Á € T ÿþÿP o r t € ^ ÿþÿ
O u t p u t P o r t € | ÿÿ CODFillProperties ÿÿÿ ÿÿÿ € ( € ÑƒÌ ! ' € €? €? *à @Á € T ÿþÿP o r t € ^ ÿþÿP o r t € ÿÿÿ ÿÿÿ € I R ! X ' € T ÿþÿP o r t € ^ ÿþÿP o r t € | € P € ( ÿÿ
CODConnection€ œ $ ™ ! Ÿ ' € P € ÿÿÿ ÿÿÿ € ^ ÿþÿP o r t € T ÿþÿP o r t € ( 8 Ÿ 0 € €? €? @A ÀA ÿÿ CPSLSignalProperties õ + ; ÿþÿI n p u t L 1 2 € T ÿþÿO p t o S i g n a l € ^ ÿþÿS i g n a l 1 € | € ( ÿÿ CODRectComponent ¾ { - € T ÿþÿ R e c t a n g l e € ^ ÿþÿS i g n a l C o l o u r R e c t 1 € ÿÿ ÿÿÿ ÿÿ CODLineProperties
+ ÿÿ CODPolygonComponent $ v $ ‚ - v 6 6 ‹6L. ˜ - € €? €? A  € T ÿþÿP o l y g o n € ^ ÿþÿS i g n a l B o d y P o l y g o n 1 € ÿÿÿ ÿÿÿ 9 + ÿÿ CODLineComponent »s„˜ $ œ $ € €? €? C @Á € T ÿþÿL i n e € ^ ÿþÿP o r t L i n e 1 9 + ÿÿ CODTextComponent ÿþÿD D B # 0 4 3 îÿÿÿfÿÿÿ* fÿÿÿ* îÿÿÿ Iü. ’ 0 €ÐÕ? VI=}~BÀe^A € T ÿþÿT e x t € ^ ÿþÿ
D D B N u m b e r T e x t 1 8€
€ ÿÿÿ ÿÿÿ ÿÿ CODFontProperties ÿþÿA r i a l
€ æ € ð ÿÿ CODIntProperty Ò P€ Ü + E€ ÿþÿ I n p u t L 1 2 òÿÿÿþÿÿ- þÿÿ- ¢ òÿÿÿ¢ àTË . “ ( €"ëÚ? Þ<(lfBqå@ H € ^ ÿþÿS i g n a l N a m e T e x t 1 J K M N O Q R + ÿÿ CODImageComponent ÿþÿ+U : \ 5 0 3 2 0 \ D E V \ s c h e m e o v \ r e s \ s i g n a l - o p t o . b m p BMv v ( € € €€ € € € €€ ÀÀÀ €€€ ÿ ÿ ÿÿ ÿ ÿ ÿ ÿÿ ÿÿÿ ÿÿÿÿÿÿÿðÿÿÿÿÿÿÿðÿÿÿÿÿÿÿðÿÿÿÿÿ ð ÿÿÿÿÿ ðÿÿÿÿÿ ÿÿÿÿ ÿÿÿÿÿð ÿÿÿÿð ÿ ÿÿððÿ ðÿð ÿÿÿ ÿ ÿð ÿÿ ÿðÿ ÿÿÿÿÿð ÿÿÿÿÿÿðÿÿÿÿÿÿÿðÿÿÿÿÿÿÿð ú7 - , € €? €? ˆA €@ € T ÿþÿI m a g e € ^ ÿþÿI m a g e 1 + @€ " ¨Z .
C € ^ ÿþÿL i n e 1 9 P€ M 2 P€ L 2 P€ K 2 P€ J 2 € + @€ ! Æì - . - € €? €? €?
C \ 1 9 ] ^ _ ` a +
€ÿÿÿÿ € ºj+‘ ! — ' € €? €? èÁ PÁ € T ÿþÿP o r t € ^ ÿþÿ
O u t p u t P o r t € ÿÿÿ ÿÿÿ € ( € ‹â !
' € €? €? Á PÁ € T ÿþÿP o r t € ^ ÿþÿP o r t € ÿÿÿ ÿÿÿ j € I J ! P ' € T ÿþÿP o r t € ^ ÿþÿP o r t € | € P o € ( $€€ $ !
' ' ( ) * + k ÒQ$ — 0 € €? €? D ÀA -€ õ 6 ÿþÿT r i p C o m m a n d I n € T ÿþÿ
O u t p u t S i g n a l € ^ ÿþÿO u t p u t S i g n a l 2 1 2 3€ ƒ ƒ D¿Õ‡ ” - 5 6 1 9 € ÿ ÿÿÿ + :€ m
m N- u - € €? €? €@ €¿ = > 1 9 ? + E€ ÿþÿD D B # 5 3 6 îÿÿÿ€ÿÿÿ* €ÿÿÿ* ! îÿÿÿ! Á×½ u 0 €ÐÕ? VI=}*BÀeNA H I 1 J K M N O Q R + @€ © ÃŽ $ $ € €? €? &à PÁ C € ^ ÿþÿO u t p u t P o r t L i n e 1 9 + E€ ÿþÿT r i p C o m m a n d I n òÿÿÿÜþÿÿ- Üþÿÿ- Ñ òÿÿÿÑ úÚ u ( €"ëÚ? Þ<(lBqÅ@ H U 1 J K M N O Q R + V€ ÿþÿ-U : \ 5 0 3 2 0 \ D E V \ s c h e m e o v \ r e s \ s i g n a l - o u t p u t . b m p BMv v ( € € €€ € € € €€ ÀÀÀ €€€ ÿ ÿ ÿÿ ÿ ÿ ÿ ÿÿ ÿÿÿ ÿÿÿÿÿÿÿÿÿÿÿÿð ÿÿÿÿ ÿÿÿð ÿÿ ÿÿÿ ÿÿ ÿÿð ÿÿÿ ÿÿ ÿÿÿ ÿÿÿÿ ÿÿÿÿ ÿÿ ÿÿÿ ÿÿð ÿÿÿ ÿÿÿ ÿÿ ÿÿÿð ÿÿ ÿÿÿÿ ÿÿÿÿð ÿÿÿÿÿÿÿÿ ¨V16v † , € €? €? äB @@ Y Z 1 + @€ ƒ q Ãùu ‡
C \ 1 9 ] ^ _ ` a + @€ ƒ q íu - ‡ - € €? €? €?
C \ 1 9 ] ^ _ ` a + ÿÿ CPSLLinkComponent ÿÿÿÿ & w € Q $ N ! T ' ' 1 € ^ ÿþÿ € T ÿþÿ € ( % v dáW0œ $ $ € d € F P€ ¤ 1 € ^ ÿþÿL i n k € T ÿþÿL i n k ‘ ÿÿ CODLineLinkShape œ $ Q $ Q $ $ ƽ~!œ $ $
9 ] ^ _ ` a 1 \ C + ”
) w i n s p o o l \ \ g b s r d 0 1 p s 0 4 \ g b s r d 0 1 p 7 2 2 6 4 I P _ 1 0 . 3 2 . 1 2 9 . 1 2 9 Ü ¸\ \ g b s r d 0 1 p s 0 4 \ g b s r d 0 1 p 7 2 2 6 4 Ü ¸Sÿ€ š4d X X A 4 PRIVâ0 ''' ' (ü ¼ P4 (ˆ þr”
ÿ ÿ ( SMTJ X e r o x W o r k C e n t r e 7 6 6 5 r e v 2 P S InputSlot *UseFormTrayTable PageSize A4 PageRegion LeadingEdge Resolution 600x600dpi Duplex DuplexNoTumble Collate True StapleLocation None XrxInputSlot True Rotation True ¼ 9XRX MOCX ^ l „ 4 š x l ³ e wœ ‘ i j o p q r ’ “ ” ȶ ÉØ ñ ò ß à û f — ¡ ˜c ¢c ™ £ – Ño Òê
Û áo âê
ã ê´ ëö Ö8 ×z º¼ »þ Î Ð Í Ï ƒ † ‡
‰ Œ Š ‹ Ê z@ | } Ë X YZ / Z [ \ ] % ( & ' ! 3 0 0 1 3 1 2 ú MSCF à , 90 R T P9e //Uncompressed-Data// yòói TCKãbb``ìsdHaÈeÈdÈâb††"†D ™¤GÁH ŒŒŒm6ö¹9
e©EÅ™ùy¶J†zJ
©yÉù)™yé¶J¡!nºJöv¼\6Å™i
@ÅV9‰ ©Ô<ÝÐ`%Œ>XÊ ÿÿÿÿ TCOM5 > > > > > > b R t ÿÿÿ
"""
| 853.795455
| 26,608
| 0.431789
| 8,574
| 75,134
| 3.826335
| 0.099837
| 0.379675
| 0.545371
| 0.708142
| 0.967324
| 0.965373
| 0.96391
| 0.961106
| 0.959704
| 0.959551
| 0
| 0.07894
| 0.49992
| 75,134
| 87
| 26,609
| 863.609195
| 0.784047
| 0
| 0
| 0.62069
| 0
| 0.603448
| 0.999241
| 0.043765
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
c586f1f2ce17a641832af20c2bd615bf9ef8a94b
| 10,841
|
py
|
Python
|
test/requirements/test_rdf_source.py
|
denz/ldp
|
e49cff6f39a4b6d68998d90b8c75158e5b9b450a
|
[
"BSD-3-Clause"
] | null | null | null |
test/requirements/test_rdf_source.py
|
denz/ldp
|
e49cff6f39a4b6d68998d90b8c75158e5b9b450a
|
[
"BSD-3-Clause"
] | null | null | null |
test/requirements/test_rdf_source.py
|
denz/ldp
|
e49cff6f39a4b6d68998d90b8c75158e5b9b450a
|
[
"BSD-3-Clause"
] | null | null | null |
"""
### 4.3 RDF Source
The following section contains normative clauses for Linked Data Platform RDF
Source.
#### 4.3.1 General
##### 4.3.1.1 Each LDP RDF Source _MUST_ also be a conforming LDP Resource as
defined in section 4.2 Resource, along with the restrictions in this section.
LDP clients _MAY_ infer the following triple: one whose subject is the LDP-RS,
whose predicate is `rdf:type`, and whose object is `ldp:Resource`, but there
is no requirement to materialize this triple in the LDP-RS representation.
##### 4.3.1.2 LDP-RSs representations _SHOULD_ have at least one `rdf:type`
set explicitly. This makes the representations much more useful to client
applications that don't support inferencing.
##### 4.3.1.3 The representation of a LDP-RS _MAY_ have an `rdf:type` of
`ldp:RDFSource` for Linked Data Platform RDF Source.
##### 4.3.1.4 LDP servers _MUST_ provide an RDF representation for LDP-RSs.
The HTTP `Request-URI` of the LDP-RS is typically the subject of most triples
in the response.
##### 4.3.1.5 LDP-RSs _SHOULD_ reuse existing vocabularies instead of creating
their own duplicate vocabulary terms. In addition to this general rule, some
specific cases are covered by other conformance rules.
##### 4.3.1.6 LDP-RSs predicates _SHOULD_ use standard vocabularies such as
Dublin Core [DC-TERMS], RDF [rdf11-concepts] and RDF Schema [rdf-schema],
whenever possible.
##### 4.3.1.7 In the absence of special knowledge of the application or
domain, LDP clients _MUST_ assume that any LDP-RS can have multiple `rdf:type`
triples with different objects.
##### 4.3.1.8 In the absence of special knowledge of the application or
domain, LDP clients _MUST_ assume that the `rdf:type` values of a given LDP-RS
can change over time.
##### 4.3.1.9 LDP clients _SHOULD_ always assume that the set of predicates
for a LDP-RS of a particular type at an arbitrary server is open, in the sense
that different resources of the same type may not all have the same set of
predicates in their triples, and the set of predicates that are used in the
state of any one LDP-RS is not limited to any pre-defined set.
##### 4.3.1.10 LDP servers _MUST NOT_ require LDP clients to implement
inferencing in order to recognize the subset of content defined by LDP. Other
specifications built on top of LDP may require clients to implement
inferencing [rdf11-concepts]. The practical implication is that all content
defined by LDP must be explicitly represented, unless noted otherwise within
this document.
##### 4.3.1.11 A LDP client _MUST_ preserve all triples retrieved from a LDP-
RS using HTTP `GET` that it doesn't change whether it understands the
predicates or not, when its intent is to perform an update using HTTP `PUT`.
The use of HTTP `PATCH` instead of HTTP `PUT` for update avoids this burden
for clients [RFC5789].
##### 4.3.1.12 LDP clients _MAY_ provide LDP-defined hints that allow servers
to optimize the content of responses. section 7.2 Preferences on the Prefer
Request Header defines hints that apply to LDP-RSs.
##### 4.3.1.13 LDP clients _MUST_ be capable of processing responses formed
by a LDP server that ignores hints, including LDP-defined hints.
Feature At Risk
The LDP Working Group proposes incorporation of the following clause to make
LDP clients paging aware:
##### 4.3.1.14 LDP clients _SHOULD_ be capable of processing successful HTTP
`GET` responses formed by a LDP server that independently initiated paging,
returning a page of representation instead of full resource representation
[LDP-PAGING].
#### 4.3.2 HTTP GET
##### 4.3.2.1 LDP servers _MUST_ respond with a Turtle representation of the
requested LDP-RS when the request includes an `Accept` header specifying
`text/turtle`, unless HTTP content negotiation _requires_ a different outcome
[turtle].
> _Non-normative note: _ In other words, Turtle must be returned by LDP
servers in the usual case clients would expect (client requests it) as well as
cases where the client requests Turtle or other media type(s), content
negotiation results in a tie, and Turtle is one of the tying media types. For
example, if the `Accept` header lists `text/turtle` as one of several media
types with the highest relative quality factor (`q=` value), LDP servers must
respond with Turtle. HTTP servers in general are not required to resolve ties
in this way, or to support Turtle at all, but LDP servers are. On the other
hand, if Turtle is one of several requested media types, but another media
type the server supports has a higher relative quality factor, standard HTTP
content negotiation rules apply and the server (LDP or not) would not respond
with Turtle.
##### 4.3.2.2 LDP servers _SHOULD_ respond with a `text/turtle` representation
of the requested LDP-RS whenever the `Accept` request header is absent
[turtle].
Feature At Risk
The LDP Working Group proposes incorporation of the following clause requiring
JSON-LD support.
##### 4.3.2.3 LDP servers _MUST_ respond with a `application/ld+json`
representation of the requested LDP-RS when the request includes an `Accept`
header, unless content negotiation or Turtle support _requires_ a different
outcome [JSON-LD].
*[LDPRs]: Linked Data Platform Resources
*[LDP-RS]: Linked Data Platform RDF Source
*[RDF]: Resource Description Framework
*[LDPR]: Linked Data Platform Resource
*[LDPC]: Linked Data Platform Container
"""
from test.requirements.base import LdpTestCase
from test.requirements.base import CONTINENTS
class LdprsGeneral(LdpTestCase):
def test_4_3_1_1(self):
"""
4.3.1.1 Each LDP RDF Source MUST also be
a conforming LDP Resource as defined in section 4.2 Resource, along with the
restrictions in this section. LDP clients MAY infer the following triple: one
whose subject is the LDP-RS,
whose predicate is rdf:type,
and whose object is ldp:Resource,
but there is no requirement to materialize this triple in the LDP-RS representation.
"""
pass
def test_4_3_1_2(self):
"""
4.3.1.2 LDP-RSs representations SHOULD
have at least one rdf:type
set explicitly. This makes the representations much more useful to
client applications that don’t support inferencing.
"""
pass
def test_4_3_1_4(self):
"""
4.3.1.4 LDP servers MUST provide an RDF representation for LDP-RSs.
The HTTP Request-URI of the LDP-RS is typically the subject of most triples in the response.
"""
pass
def test_4_3_1_5(self):
"""
4.3.1.5 LDP-RSs SHOULD reuse existing vocabularies instead of creating
their own duplicate vocabulary terms. In addition to this general rule, some specific cases are
covered by other conformance rules.
"""
pass
def test_4_3_1_6(self):
"""
4.3.1.6 LDP-RSs predicates SHOULD use standard vocabularies such as Dublin Core
[DC-TERMS], RDF [rdf11-concepts] and RDF Schema [rdf-schema], whenever
possible.
"""
pass
def test_4_3_1_7(self):
"""
4.3.1.7 In the absence of special knowledge of the application or domain,
LDP clients MUST assume that any LDP-RS can have multiple rdf:type triples with different objects.
"""
pass
def test_4_3_1_8(self):
"""
4.3.1.8 In the absence of special knowledge of the application or domain,
LDP clients MUST assume that the rdf:type values
of a given LDP-RS can change over time.
"""
pass
def test_4_3_1_9(self):
"""
4.3.1.9 LDP clients SHOULD always assume that the set of predicates for a
LDP-RS of a particular type at an arbitrary server is open, in the
sense that different resources of the same type may not all have the
same set of predicates in their triples, and the set of predicates that
are used in the state of any one LDP-RS is not limited to any pre-defined
set.
"""
pass
def test_4_3_1_11(self):
"""
4.3.1.11
A LDP client MUST preserve all triples retrieved from a LDP-RS using HTTP GET that
it doesn’t change whether it understands the predicates or not, when
its intent is to perform an update using HTTP PUT. The use of HTTP
PATCH instead of HTTP PUT for update avoids this burden for clients
[RFC5789].
"""
pass
def test_4_3_1_13(self):
"""
4.3.1.13 LDP clients MUST
be capable of processing responses formed by a LDP server that ignores hints,
including LDP-defined hints.
"""
pass
def test_4_3_1_14(self):
"""
4.3.1.14
LDP clients SHOULD
be capable of processing successful HTTP GET responses formed by a LDP server
that independently initiated paging, returning a page of representation instead of full resource
representation [LDP-PAGING].
"""
pass
class LdprsHttpGet(LdpTestCase):
GRAPHS = {'continents': {'source': 'test/continents.rdf',
'publicID': CONTINENTS},
}
def test_4_3_2_1(self):
"""
4.3.2.1 LDP servers MUST
respond with a Turtle
representation of the requested LDP-RS when
the request includes an Accept header specifying text/turtle,
unless HTTP content negotiation requires a different outcome
[turtle].
Non-normative note:
In other words, Turtle must be returned by LDP servers
in the usual case clients would expect (client requests it)
as well as cases where the client requests Turtle or other media type(s), content negotiation results in a tie,
and Turtle is one of the tying media types.
For example, if the Accept header lists text/turtle as one of several media types with the
highest relative quality
factor (q= value), LDP servers must respond with Turtle.
HTTP servers in general are not required to resolve ties in this way, or to support Turtle at all, but
LDP servers are.
On the other hand, if Turtle is one of several requested media types,
but another media type the server supports has a higher relative quality factor,
standard HTTP content negotiation rules apply and the server (LDP or not) would not respond with Turtle.
"""
response = self.app.get('/rdfsource/AF',
headers={'Accept':'application/ld+json'})
def test_4_3_2_2(self):
"""
4.3.2.2 LDP servers SHOULD
respond with a text/turtle
representation of the requested LDP-RS whenever
the Accept request header is absent [turtle].
"""
"""this is violated since html application lies on top"""
pass
def test_4_3_2_3(self):
"""
4.3.2.3 LDP servers MUST
respond with a application/ld+json
representation of the requested LDP-RS
when the request includes an Accept header, unless content negotiation
or Turtle support
requires a different outcome [JSON-LD].
"""
pass
| 39.565693
| 111
| 0.730006
| 1,756
| 10,841
| 4.451025
| 0.171412
| 0.012283
| 0.014202
| 0.016121
| 0.859775
| 0.843014
| 0.825102
| 0.825102
| 0.825102
| 0.817042
| 0
| 0.025441
| 0.205977
| 10,841
| 274
| 112
| 39.565693
| 0.882551
| 0.8799
| 0
| 0.361111
| 0
| 0
| 0.070681
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.388889
| false
| 0.361111
| 0.055556
| 0
| 0.527778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
c5adb2ef21ddc659b14d543ba94b65d62e1f2650
| 247
|
py
|
Python
|
src/prefect/backend/__init__.py
|
gabrielvieira37/prefect
|
6e9170134b59ed5b66afa752bb275e7e44330ed4
|
[
"Apache-2.0"
] | null | null | null |
src/prefect/backend/__init__.py
|
gabrielvieira37/prefect
|
6e9170134b59ed5b66afa752bb275e7e44330ed4
|
[
"Apache-2.0"
] | null | null | null |
src/prefect/backend/__init__.py
|
gabrielvieira37/prefect
|
6e9170134b59ed5b66afa752bb275e7e44330ed4
|
[
"Apache-2.0"
] | null | null | null |
from prefect.backend.task_run import TaskRunView
from prefect.backend.flow_run import FlowRunView, execute_flow_run
from prefect.backend.flow import FlowView
from prefect.backend.kv_store import set_key_value, get_key_value, delete_key, list_keys
| 49.4
| 88
| 0.870445
| 39
| 247
| 5.230769
| 0.512821
| 0.215686
| 0.352941
| 0.215686
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080972
| 247
| 4
| 89
| 61.75
| 0.898678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c5bdb96ec00d373011251a4dd31e0d344e095cd0
| 150
|
py
|
Python
|
src/spaceone/notification/manager/__init__.py
|
jihyungSong/plugin-email-notification-protocol
|
2e9ebc08b1b541f69b46e8ff8507fe2604058724
|
[
"Apache-2.0"
] | null | null | null |
src/spaceone/notification/manager/__init__.py
|
jihyungSong/plugin-email-notification-protocol
|
2e9ebc08b1b541f69b46e8ff8507fe2604058724
|
[
"Apache-2.0"
] | 1
|
2022-03-21T07:19:50.000Z
|
2022-03-21T07:19:50.000Z
|
src/spaceone/notification/manager/__init__.py
|
jihyungSong/plugin-email-notification-protocol
|
2e9ebc08b1b541f69b46e8ff8507fe2604058724
|
[
"Apache-2.0"
] | 1
|
2021-07-20T02:13:21.000Z
|
2021-07-20T02:13:21.000Z
|
from spaceone.notification.manager.notification_manager import NotificationManager
from spaceone.notification.manager.smtp_manager import SMTPManager
| 50
| 82
| 0.906667
| 16
| 150
| 8.375
| 0.5
| 0.425373
| 0.358209
| 0.462687
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053333
| 150
| 2
| 83
| 75
| 0.943662
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a83e1d73ef72a5fc774e5c9d52ecebd72160d193
| 14,630
|
py
|
Python
|
generators.py
|
xianqiu/math-homework
|
362ba744695650ecd9f5bdcd68793e53f8d9afcf
|
[
"MIT"
] | 1
|
2021-09-01T00:44:51.000Z
|
2021-09-01T00:44:51.000Z
|
generators.py
|
xianqiu/math-homework
|
362ba744695650ecd9f5bdcd68793e53f8d9afcf
|
[
"MIT"
] | 1
|
2022-02-16T11:59:33.000Z
|
2022-02-16T12:21:15.000Z
|
generators.py
|
xianqiu/math-homework
|
362ba744695650ecd9f5bdcd68793e53f8d9afcf
|
[
"MIT"
] | 1
|
2022-02-16T11:49:09.000Z
|
2022-02-16T11:49:09.000Z
|
__all__ = ['MathL1', 'MathL2', 'MathL3', 'MathL4',
'MathL5', 'MathL6', 'MathL7', 'MathL8',
'MathL9', 'MathL10', 'MathL11', 'MathL12',
'MathL13', 'MathL14', 'MathL15', 'MathL16',
'MathL17', 'MathL18', 'MathL19', 'MathL20',
'MathL21', 'MathL22', 'MathL23', 'MathL24',
'MathL25', 'MathL26', 'MathL27', 'MathL28'
]
import numpy as np
def _to_result(arr, ops, wrap=True, skip=None):
"""
把公式格式化成字符串
:param arr: 二维数组,每一行代表公式的数字,例如 [a, b]
:param ops: 二维数组,每一行代表公式的操作,例如 [+, =]
:param wrap: 自动加括号,例如 a + (-b)
:return: str list,例如 ['a1 + b1 = ', 'a2 + b2 = ']
"""
if skip is None:
skip = {}
res = []
for row, op in zip(arr, ops):
comb = []
for i in range(len(row)):
if wrap and row[i] < 0 and i not in skip:
comb.append('(' + str(int(row[i])) + ')')
else:
comb.append(str(int(row[i])))
if i < len(op):
comb.append(op[i])
res.append(' '.join(comb))
return res
def _gen_add_arr(lb, ub, k, num):
""" 生成二维数组。
1、每行 k 个数,用来相加,绝对值不大于 ub。
2、数组一共 num 行。
3、每个数不超过 ub,不低于lb。
"""
arr = np.random.randint(lb, ub, (num, k))
for i in range(len(arr)):
if abs(sum(arr[i])) > ub:
arr[i] = np.floor(arr[i] / k)
return arr
class MathL1(object):
"""
加法 a+b
"""
def __init__(self, ub=20):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 2, num)
ops = [['+', '=']] * num
return _to_result(arr, ops)
class MathL2(object):
"""
减法(结果为非负)a-b
"""
def __init__(self, ub=20):
self._ub = ub
def generate(self, num):
arr = np.random.randint(0, self._ub, (num, 2))
for i in range(len(arr)):
if arr[i][0] < arr[i][1]:
arr[i] = [arr[i][1], arr[i][0]]
ops = [['-', '=']] * num
return _to_result(arr, ops)
class MathL3(object):
"""
加减法 a+b 或 a-b(结果非负)
"""
def __init__(self, ub=20):
self._m1 = MathL1(ub)
self._m2 = MathL2(ub)
def generate(self, num):
res0 = self._m1.generate(num) + self._m2.generate(num)
indices = np.random.randint(0, len(res0), num)
return [res0[i] for i in indices]
class MathL4(object):
"""
连加法 a+b+c
"""
def __init__(self, ub=30):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 3, num)
ops = [['+', '+', '=']] * num
return _to_result(arr, ops)
class MathL5(object):
"""
连减法 a-b-c(结果非负)
"""
def __init__(self, ub):
self._ub = ub
def generate(self, num):
arr = np.random.randint(0, self._ub, (num, 2))
b = [0] * num
for i in range(len(arr)):
if arr[i][0] < arr[i][1]:
arr[i] = [arr[i][1], arr[i][0]]
b[i] = np.random.randint(0, arr[i][0] - arr[i][1])
arr = np.insert(arr, 2, values=b, axis=1)
ops = [['-', '-', '=']] * num
return _to_result(arr, ops)
class MathL6(object):
"""
连加 a+b+c 或 连减 a-b-c(结果非负)
"""
def __init__(self, ub=30):
self._m4 = MathL4(ub)
self._m5 = MathL5(ub)
def generate(self, num):
res0 = self._m4.generate(num) + self._m5.generate(num)
indices = np.random.randint(0, len(res0), num)
return [res0[i] for i in indices]
class MathL7(object):
"""
连加减法 a+b-c 或 a-b+c 或 a+b+c(结果非负)
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 3, num)
ops = [['']] * num
for i in range(len(arr)):
if arr[i][0] >= arr[i][1]:
ops[i] = ['-', '+', '=']
elif arr[i][1] >= arr[i][2]:
ops[i] = ['+', '-', '=']
else:
ops[i] = ['+', '+', '=']
return _to_result(arr, ops)
class MathL8(object):
"""
负数相加 - a - b
"""
def __init__(self, ub=30):
self._ub = ub
def generate(self, num):
arr1 = _gen_add_arr(-self._ub // 2, 0, 1, num)
arr2 = _gen_add_arr(0, self._ub // 2, 1, num)
arr = map(lambda a: [a[0][0], a[1][0]], zip(arr1, arr2))
ops = [['-', '=']] * num
return _to_result(arr, ops, skip={0})
class MathL9(object):
"""
减法 a-b, -a+b
"""
def __init__(self, ub=30):
self._ub = ub
def _generate1(self, num):
# a-b
arr = _gen_add_arr(0, self._ub, 2, num)
ops = [['-', '=']] * num
return _to_result(arr, ops)
def _generate2(self, num):
# -a+b
arr1 = _gen_add_arr(-self._ub, 0, 1, num)
arr2 = _gen_add_arr(0, self._ub, 1, num)
arr = map(lambda a: [a[0][0], a[1][0]], zip(arr1, arr2))
ops = [['+', '=']] * num
return _to_result(arr, ops, skip={0})
def generate(self, num):
res1 = self._generate1(num)
res2 = self._generate2(num)
res = res1 + res2
indices = np.random.randint(0, len(res), num)
return [res[i] for i in indices]
class MathL10(object):
"""
加法、减法:-a-b, -a+b, a-b
"""
def __init__(self, ub=30):
self._ub = ub
def _generate1(self, num):
# -a-b
arr1 = _gen_add_arr(-self._ub//2, 0, 1, num)
arr2 = _gen_add_arr(0, self._ub//2, 1, num)
arr = map(lambda a: [a[0][0], a[1][0]], zip(arr1, arr2))
ops = [['-', '=']] * num
return _to_result(arr, ops, skip={0})
def _generate2(self, num):
# -a+b
arr1 = _gen_add_arr(-self._ub, 0, 1, num)
arr2 = _gen_add_arr(0, self._ub, 1, num)
arr = map(lambda a: [a[0][0], a[1][0]], zip(arr1, arr2))
ops = [['+', '=']] * num
return _to_result(arr, ops, skip={0})
def _generate3(self, num):
# a-b
arr = _gen_add_arr(0, self._ub, 2, num)
ops = [['-', '=']] * num
return _to_result(arr, ops)
def generate(self, num):
res1 = self._generate1(num)
res2 = self._generate2(num)
res3 = self._generate3(num)
res = res1 + res2 + res3
indices = np.random.randint(0, len(res), num)
return [res[i] for i in indices]
class MathL11(object):
"""
连加法 a-b-c, -a-b-c
"""
def __init__(self, ub=30):
self._ub = ub
def _generate1(self, num):
# a-b-c
arr = _gen_add_arr(0, self._ub, 3, num)
ops = [['-', '-', '=']] * num
return _to_result(arr, ops)
def _generate2(self, num):
# -a-b-c
arr = _gen_add_arr(0, self._ub, 3, num)
arr[:, 0] = arr[:, 0] * -1
ops = [['-', '-', '=']] * num
return _to_result(arr, ops, skip={0})
def generate(self, num):
res1 = self._generate1(num)
res2 = self._generate2(num)
res = res1 + res2
indices = np.random.randint(0, len(res), num)
return [res[i] for i in indices]
class MathL12(object):
"""
连加减法 a+b-c 或 a-b+c
"""
def __init__(self, ub=30):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 3, num)
ops = [[]] * num
for i in range(len(arr)):
z = np.random.rand()
ops[i] = ['+', '-', '='] if z < 0.3 else ['-', '+', '=']
return _to_result(arr, ops)
class MathL13(object):
"""
连加减法 -a+b-c 或 -a-b+c
"""
def __init__(self, ub=30):
self._ub = ub
def generate(self, num):
arr1 = _gen_add_arr(-self._ub//2, 0, 1, num)
arr2 = _gen_add_arr(0, self._ub, 2, num)
arr = np.c_[arr1, arr2]
ops = [[]] * num
for i in range(len(arr)):
z = np.random.rand()
ops[i] = ['+', '-', '='] if z < 0.5 else ['-', '+', '=']
return _to_result(arr, ops, skip={0})
class MathL14(object):
"""
负负得正 a+(-b) 或 a-(-b)
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr1 = _gen_add_arr(0, self._ub//2, 1, num)
arr2 = _gen_add_arr(-self._ub//2, -1, 1, num)
arr = np.c_[arr1, arr2]
ops = [[]] * num
for i in range(num):
z = np.random.rand()
ops[i] = ['+', '='] if z > 0.5 else ['-', '=']
return _to_result(arr, ops)
class MathL15(object):
"""
负负得正 -a+(-b) 或 -a-(-b)
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(-self._ub, 0, 2, num)
ops = [[]] * num
for i in range(num):
z = np.random.rand()
ops[i] = ['+', '='] if z > 0.5 else ['-', '=']
return _to_result(arr, ops, skip={0})
class MathL16(object):
"""
a+b+c, abc可以带负号
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(-self._ub, self._ub, 3, num)
ops = [['+', '+', '=']] * num
return _to_result(arr, ops, skip={0})
class MathL17(object):
"""
加法填空 a+?=b
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 2, num)
ops = [['+ __ =']] * num
return _to_result(arr, ops)
class MathL18(object):
"""
减法填空 a-?=b
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 2, num)
ops = [['- __ =']] * num
return _to_result(arr, ops)
class MathL19(object):
"""
加减法填空 a+?=b 或 a-?=b
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 2, num)
ops = [['']] * num
for i in range(len(arr)):
z = np.random.rand()
if z > 0.5:
ops[i] = ['- __ =']
else:
ops[i] = ['+ __ =']
return _to_result(arr, ops)
class MathL20(object):
"""
加后填空 a+b+?=c 或 a+b-?=c
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 3, num)
ops = [[]] * num
for i in range(num):
z = np.random.rand()
ops[i] = ['+', '+ __ ='] if z < 0.4 else ['+', '- __ =']
return _to_result(arr, ops)
class MathL21(object):
"""
加法填空、减法填空 -a + ? = b 或 -a - ? = b
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr1 = _gen_add_arr(-self._ub, 0, 1, num)
arr2 = _gen_add_arr(0, self._ub, 1, num)
arr = np.c_[arr1, arr2]
ops = [[]] * num
for i in range(num):
z = np.random.rand()
ops[i] = ['+ __ ='] if z < 0.4 else ['- __ =']
return _to_result(arr, ops, skip={0})
class MathL22(object):
"""
加减法填空 a-b+?=c 或 a-b-?=c
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 3, num)
ops = [[]] * num
for i in range(num):
z = np.random.rand()
ops[i] = ['-', '+ __ ='] if z < 0.5 else ['-', '- __ =']
return _to_result(arr, ops)
class MathL23(object):
"""
加减法填空 -a-b+?=c 或 -a-b-?=c
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 3, num)
arr[:, 0] = arr[:, 0] * -1
ops = [[]] * num
for i in range(num):
z = np.random.rand()
ops[i] = ['-', '+ __ ='] if z < 0.5 else ['-', '- __ =']
return _to_result(arr, ops, skip={0})
class MathL24(object):
"""
加减法填空 -a+b+?=c 或 -a+b-?=c
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 3, num)
arr[:, 0] = arr[:, 0] * -1
ops = [[]] * num
for i in range(num):
z = np.random.rand()
ops[i] = ['+', '+ __ ='] if z < 0.5 else ['+', '- __ =']
return _to_result(arr, ops, skip={0})
class MathL25(object):
"""
加减法填空 -a+b+?=-c 或 -a-b+?=-c
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 3, num)
arr[:, 0] = arr[:, 0] * -1
arr[:, 2] = arr[:, 2] * -1
ops = [[]] * num
for i in range(num):
z = np.random.rand()
ops[i] = ['+', '+ __ ='] if z < 0.5 else ['-', '+ __ =']
return _to_result(arr, ops, skip={0, 2})
class MathL26(object):
"""
中间填空 a+?+b=c 或 a-?-b=c
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 3, num)
ops = [[]] * num
for i in range(num):
z = np.random.rand()
ops[i] = ['+ __ +', '='] if z < 0.5 else ['- __ -', '=']
return _to_result(arr, ops)
class MathL27(object):
"""
中间填空 -a+?+b=-c 或 -a-?-b=c
"""
def __init__(self, ub=40):
self._ub = ub
def generate(self, num):
arr = _gen_add_arr(0, self._ub, 3, num)
arr[:, 0] = arr[:, 0] * -1
ops = [[]] * num
for i in range(num):
z = np.random.rand()
ops[i] = ['+ __ +', '='] if z < 0.5 else ['- __ -', '=']
if np.random.rand() < 0.5:
arr[i][2] = arr[i][2] * -1
return _to_result(arr, ops, skip={0, 2})
class MathL28(object):
"""
a+b+?=c 或 ?+a+b=c 或 a+?+b=c, abc可以带负号
"""
def __init__(self, ub=40):
self._ub = ub
def _generate1(self, num):
# a+b+?=c
arr = _gen_add_arr(-self._ub, self._ub, 3, num)
ops = [['+', '+ __ =']] * num
return _to_result(arr, ops, skip={0, 2})
def _generate2(self, num):
# ?+a+b=c
arr = _gen_add_arr(-self._ub, self._ub, 3, num)
ops = [['+', '=']] * num
res = _to_result(arr, ops, skip={2})
return ['__ + ' + res[i] for i in range(num)]
def _generate3(self, num):
# a+?+b=c
arr = _gen_add_arr(-self._ub, self._ub, 3, num)
ops = [['+ __ +', '=']] * num
return _to_result(arr, ops, skip={0, 2})
def generate(self, num):
res1 = self._generate1(num)
res2 = self._generate2(num)
res3 = self._generate3(num)
res = res1 + res2 + res3
indices = np.random.randint(0, len(res), num)
return [res[i] for i in indices]
| 25.267703
| 68
| 0.466507
| 2,070
| 14,630
| 3.083575
| 0.07343
| 0.09118
| 0.05358
| 0.07238
| 0.826414
| 0.813097
| 0.793827
| 0.774714
| 0.757794
| 0.746201
| 0
| 0.044744
| 0.340055
| 14,630
| 578
| 69
| 25.311419
| 0.616365
| 0.059877
| 0
| 0.740947
| 0
| 0
| 0.03052
| 0
| 0.011142
| 0
| 0
| 0
| 0
| 1
| 0.189415
| false
| 0
| 0.002786
| 0
| 0.381616
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a848cbe3d0571fa2759fc4c25126e577ee5ea719
| 37,395
|
py
|
Python
|
ApplicationPerformance/webautomation/webAutomation.py
|
hsy5332/Blog
|
3c17e097b31dcddfc41896149cc14b69fea1ae14
|
[
"Apache-2.0"
] | null | null | null |
ApplicationPerformance/webautomation/webAutomation.py
|
hsy5332/Blog
|
3c17e097b31dcddfc41896149cc14b69fea1ae14
|
[
"Apache-2.0"
] | null | null | null |
ApplicationPerformance/webautomation/webAutomation.py
|
hsy5332/Blog
|
3c17e097b31dcddfc41896149cc14b69fea1ae14
|
[
"Apache-2.0"
] | null | null | null |
# to do 发送邮件,以及需要增加用例的执行结果
import time
import ApplicationPerformance.sendReport as sendReport
import ApplicationPerformance.applicationperformance.launchTime as launchTime # MAC
# import ApplicationPerformance.applicationperformance.launchTime as launchTime # Windows
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class WebAutomation(object):
# 启动浏览
def startBrowser(self, browsername, testurl, *browserconfigure):
if "谷歌" in browsername:
driver = webdriver.Chrome()
return driver
elif "火狐" in browsername:
if browserconfigure[0] != "": # 判断是否有配置路径
driver = webdriver.Firefox(webdriver.FirefoxProfile(browserconfigure[0])) # 带着配置启动火狐浏览器(比如增加Xpth插件等。)
return driver
else:
driver = webdriver.Firefox()
return driver
else:
print("您的测试用例中,存在无法识别的浏览器名称,请检查用例。")
# 双击操作
def operateDoubleClick(self, operatetype, element, driver, caseid):
if operatetype == "双击_id":
try:
driver.find_element_by_id(element).double_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "双击_xpath":
try:
driver.find_element_by_xpath(element).double_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "双击_textname": # 点击textname
try:
driver.find_elements_by_name(element)[0].double_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "双击_classname":
try:
driver.find_elements_by_class_name(element)[0].double_click() # 点击xpath
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "双击_linkname":
try:
driver.find_elements_by_link_text(element)[0].double_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 右点击击操作
def operateRightClick(self, operatetype, element, driver, caseid):
if operatetype == "右击_id":
try:
driver.find_element_by_id(element).context_click().perform()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "右击_xpath":
try:
driver.find_element_by_xpath(element).context_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "右击_textname": # 点击textname
try:
driver.find_elements_by_name(element)[0].context_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "右击_classname":
try:
driver.find_elements_by_class_name(element)[0].context_click() # 点击xpath
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "右击_linkname":
try:
driver.find_elements_by_link_text(element)[0].context_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 左点击击操作
def operateClick(self, operatetype, element, driver, caseid):
if operatetype == "点击_id":
try:
driver.find_element_by_id(element).click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "点击_xpath":
try:
driver.find_element_by_xpath(element).click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "点击_textname": # 点击textname
try:
driver.find_elements_by_name(element)[0].click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "点击_classname":
try:
driver.find_elements_by_class_name(element)[0].click() # 点击xpath
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "点击_linkname":
try:
driver.find_elements_by_link_text(element)[0].click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
# 扩展性 查找元素方法
elif operatetype == "点击_cssid":
try:
driver.find_element_by_css_selector("#%s" % (element)).click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "点击_cssname":
try:
driver.find_element_by_css_selector("a[name=\"%s\"]" % (element)).click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 检查元素是否存在
def operateCheckElement(self, operatetype, element, driver, caseid):
if operatetype == "查找_id":
try:
driver.find_element_by_id(element)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "查找_xpath":
try:
driver.find_element_by_xpath(element)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "查找_textname": # 查找textname
try:
driver.find_elements_by_name(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "查找_classname":
try:
driver.find_elements_by_class_name(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "查找_linkname":
try:
driver.find_elements_by_link_text(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "if包含_id":
try:
driver.find_element_by_id(element)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "if包含_xpath":
try:
driver.find_element_by_xpath(element)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "if包含_textname":
try:
driver.find_elements_by_name(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "if包含_classname":
try:
driver.find_elements_by_class_name(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "if包含_linkname":
try:
driver.find_elements_by_link_text(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 清空输入框
def clearInput(self, operatetype, element, driver, caseid):
if operatetype == "清空输入框_id":
try:
driver.find_element_by_id(element).clear()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "清空输入框_xpath":
try:
driver.find_element_by_xpath(element).clear()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "清空输入框_textname":
try:
driver.find_elements_by_name(element)[0].clear()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 输入操作
def operateInput(self, operatetype, element, driver, caseid, *parameter):
if operatetype == "输入_id":
try:
driver.find_element_by_id(element).send_keys(parameter[0])
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "输入_xpath":
try:
driver.find_element_by_xpath(element).send_keys(parameter[0])
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "输入_textname":
try:
driver.find_elements_by_name(element)[0].send_keys(parameter[0])
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
# 扩展性 查找元素方法
elif operatetype == "输入_cssid":
try:
driver.find_element_by_css_selector("#%s" % (element)).send_keys(parameter[0])
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "输入_cssname":
try:
driver.find_element_by_css_selector("a[name=\"%s\"]" % (element)).send_keys(parameter[0])
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# Android物理按键操作
def operatePhysicsKye(self, operatetype, element, driver, caseid):
if operatetype == "按enter_id":
try:
driver.find_element_by_id(element).send_keys(Keys.ENTER)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "按enter_xpath":
try:
driver.find_element_by_xpath(element).send_keys(Keys.ENTER)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "按enter_textname":
try:
driver.find_elements_by_name(element)[0].send_keys(Keys.ENTER)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pagedown_id":
try:
driver.find_element_by_id(element).send_keys(Keys.PAGE_DOWN)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pagedown_xpath":
try:
driver.find_element_by_xpath(element).send_keys(Keys.PAGE_DOWN)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pagedown_textname":
try:
driver.find_elements_by_name(element)[0].send_keys(Keys.PAGE_DOWN)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pageup_id":
try:
driver.find_element_by_id(element).send_keys(Keys.PAGE_UP)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pageup_xpath":
try:
driver.find_element_by_xpath(element).send_keys(Keys.PAGE_UP)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pageup_textname":
try:
driver.find_elements_by_name(element)[0].send_keys(Keys.PAGE_UP)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "浏览器全屏":
try:
driver.maximize_window()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "设置分辨率":
try:
windowslist = element.split(',')
driver.set_window_size(int(windowslist[0]), int(windowslist[1]))
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 执行用例
def runCase(self):
deviceinfo = launchTime.ReadExcel().readeExcelData('browserinfo')
startautomationtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) # 开始自动化用例时间
for i in range(1, deviceinfo.get('caserows')):
devicesinfocase = deviceinfo.get('excledata_sheel').row_values(i)
browsername = devicesinfocase[0]
browserconfigure = devicesinfocase[1]
testurl = devicesinfocase[2]
browserstatus = devicesinfocase[3]
print(devicesinfocase)
eventid = time.strftime('%Y%m%d%H%M%S', time.localtime())
if "Y" in browserstatus:
driver = WebAutomation().startBrowser(browsername, browserconfigure)
time.sleep(5)
driver.get(testurl)
casedata = launchTime.ReadExcel().readeExcelData('browseefuncase') # 读取自动化用例数据
endcasenumber = []
casenumber = []
for j in range(1, casedata.get('caserows')): # Excel中的测试用例数据,使用for遍历每一行的数据,进行判断执行对应的操作
excelcasedata = casedata.get('excledata_sheel').row_values(
j)
operatetype = excelcasedata[1]
if "if" in operatetype:
casenumber.append(j)
if "end" in operatetype:
endcasenumber.append(j)
x = 1
ifnumber = 0
try:
casecount = casedata.get('caserows')-1 # 用例总数
while x <= casecount:
excelcasedata = casedata.get('excledata_sheel').row_values(x)
x = x + 1
try:
caseid = int(excelcasedata[0]) # 用例编号
except:
caseid = excelcasedata[0]
operatetype = excelcasedata[1] # 操作类型
element = excelcasedata[2] # 元素
parameter = str(excelcasedata[3]) # 参数 必须要转成字符串,要不然在使用send_keys(必须要是字符串类型)时无法使用
rundescribe = excelcasedata[6] # 步骤描述
caseexecute = excelcasedata[7] # 用例状态
driver.implicitly_wait(60)
startonecasetime = time.time()
if excelcasedata[5] == "": # 等待时间
waittime = 2
else:
waittime = int(excelcasedata[5])
if "Y" in caseexecute:
if operatetype == "等待时间":
time.sleep(waittime)
casereport = "用例编号:%s,执行通过。" % (caseid)
print(casereport)
elif operatetype == "点击_id":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_xpath":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_textname":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_linkname":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_classname":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_cssid":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_cssname":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "输入_id":
print(WebAutomation().operateInput(operatetype, element, driver, caseid, parameter))
time.sleep(waittime)
elif operatetype == "输入_xpath":
print(WebAutomation().operateInput(operatetype, element, driver, caseid, parameter))
time.sleep(waittime)
elif operatetype == "输入_textname":
print(WebAutomation().operateInput(operatetype, element, driver, caseid, parameter))
time.sleep(waittime)
elif operatetype == "输入_cssid":
print(WebAutomation().operateInput(operatetype, element, driver, caseid, parameter))
time.sleep(waittime)
elif operatetype == "输入_cssname":
print(WebAutomation().operateInput(operatetype, element, driver, caseid, parameter))
time.sleep(waittime)
elif operatetype == "清空输入框_id":
print(WebAutomation().clearInput(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "清空输入框_xpath":
print(WebAutomation().clearInput(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "清空输入框_textname":
print(WebAutomation().clearInput(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "查找_id":
print(WebAutomation().operateCheckElement(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "查找_xpath":
print(WebAutomation().operateCheckElement(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "查找_textname":
print(WebAutomation().operateCheckElement(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "查找_linkname":
print(WebAutomation().operateCheckElement(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "end":
casereport = "用例编号:%s,执行通过。" % (caseid)
print(casereport)
elif "if" in operatetype:
if operatetype == "if包含_id":
casereport = WebAutomation().operateCheckElement(operatetype, element,
driver, caseid)
if "执行通过" in casereport:
print(casereport)
else:
print(casereport)
if len(endcasenumber) == len(casenumber):
x = endcasenumber[ifnumber]
else:
print("当前用例中的if和and不等,请检查用例")
x = endcasenumber[-1]
elif "if包含_xpath":
casereport = WebAutomation().operateCheckElement(operatetype, element,
driver, caseid)
if "执行通过" in casereport:
print(casereport)
else:
print(casereport)
if len(endcasenumber) == len(casenumber):
x = endcasenumber[ifnumber]
else:
print("当前用例中的if和and不等,请检查用例")
x = endcasenumber[-1]
elif "if包含_classname":
casereport = WebAutomation().operateCheckElement(operatetype, element,
driver, caseid)
if "执行通过" in casereport:
print(casereport)
else:
print(casereport)
if len(endcasenumber) == len(casenumber):
x = endcasenumber[ifnumber]
else:
print("当前用例中的if和and不等,请检查用例")
x = endcasenumber[-1]
elif "if包含_textname":
casereport = WebAutomation().operateCheckElement(operatetype, element,
driver, caseid)
if "执行通过" in casereport:
print(casereport)
else:
print(casereport)
if len(endcasenumber) == len(casenumber):
x = endcasenumber[ifnumber]
else:
print("当前用例中的if和and不等,请检查用例")
x = endcasenumber[-1]
elif "if包含_linkname":
casereport = WebAutomation().operateCheckElement(operatetype, element,
driver, caseid)
if "执行通过" in casereport:
print(casereport)
else:
print(casereport)
if len(endcasenumber) == len(casenumber):
x = endcasenumber[ifnumber]
else:
print("当前用例中的if和and不等,请检查用例")
x = endcasenumber[-1]
else:
casereport = "用例编号:%s操作类型错误,该用例不执行。" % (caseid)
print(casereport)
ifnumber = ifnumber + 1
elif operatetype == "查找_classname":
print(WebAutomation().operateCheckElement(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "右击_id":
print(WebAutomation().operateRightClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "右击_xpath":
print(WebAutomation().operateRightClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "右击_textname":
print(WebAutomation().operateRightClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "右击_linkname":
print(WebAutomation().operateRightClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "右击_classname":
print(WebAutomation().operateRightClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "双击_id":
print(WebAutomation().operateDoubleClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "双击_xpath":
print(WebAutomation().operateDoubleClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "双击_textname":
print(WebAutomation().operateDoubleClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "双击_linkname":
print(WebAutomation().operateDoubleClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "双击_classname":
print(WebAutomation().operateDoubleClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "按enter_id":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "按enter_xpath":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "按enter_textname":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pagedown_id":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pagedown_xpath":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pagedown_textname":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pageup_id":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pageup_xpath":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pageup_textname":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "浏览器全屏":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "设置分辨率":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
else:
casereport = "用例编号:%s操作类型错误,该用例不执行。" % (caseid)
print(casereport)
else:
casereport = "用例编号:%s,执行状态为No,故不执行。" % (caseid)
print(casereport)
endonecasetime = time.time()
runonecasetime = round(endonecasetime - startonecasetime, 2)
savedata = "insert into automationquery_automation_function_web (`browsername`,`browserconfigure`,`browserstatus`,`operatetype`,`element`,`parameter`,`waittime`,`rundescribe`,`caseexecute`,`runcasetime`,`caseid`,`eventid`,`casereport`,`createdtime`,`updatetime`)VALUES('%s','%s','%s','%s',\'''%s\''','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (
browsername, browserconfigure, browserstatus, operatetype, element, parameter, waittime,
rundescribe,
caseexecute,
runonecasetime, caseid, eventid, casereport,
time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
try:
launchTime.MysqlConnect().saveDatatoMysql("%s" % (savedata))
time.sleep(1)
except:
print("数据库连接失败,保存数据失败。")
except:
driver.close()
driver.quit()
driver.close()
driver.quit()
else:
print("浏览%s,状态为不执行,故该浏览器上不运行用例。" % (devicesinfocase[0]))
savedata = "insert into automationquery_automation_function_web (`browsername`,`browserconfigure`,`browserstatus`,`operatetype`,`element`,`parameter`,`waittime`,`rundescribe`,`caseexecute`,`runcasetime`,`caseid`,`eventid`,`casereport`,`createdtime`,`updatetime`)VALUES('%s','%s','%s','%s',\'''%s\''','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (
browsername, browserconfigure, browserstatus, "", "", "", "",
"",
"",
"", "", eventid, casereport,
time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
try:
launchTime.MysqlConnect().saveDatatoMysql("%s" % (savedata))
time.sleep(1)
except:
print("数据库连接失败,保存数据失败。")
tomail = "allenyao224@qq.com,1653838404@qq.com"
ccemail = "268455431@qq.com"
print(sendReport.SendReport().senderEmail(tomail, ccemail, startautomationtime, casecount))
if __name__ == "__main__":
WebAutomation().runCase()
| 50.194631
| 380
| 0.468886
| 2,772
| 37,395
| 6.22114
| 0.08189
| 0.08443
| 0.088721
| 0.092201
| 0.84639
| 0.840533
| 0.831777
| 0.809974
| 0.784575
| 0.778313
| 0
| 0.004029
| 0.435834
| 37,395
| 744
| 381
| 50.262097
| 0.813386
| 0.01182
| 0
| 0.795455
| 0
| 0.002841
| 0.100225
| 0.028876
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012784
| false
| 0
| 0.007102
| 0
| 0.166193
| 0.09517
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.