hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e3c8c4ab21bfc32860481768ba00b47689728664
| 1,029
|
py
|
Python
|
tests/fixtures/responses.py
|
myslak71/dmt
|
fcf892391656c02118733db6680d9e4e33b59495
|
[
"MIT"
] | 1
|
2019-01-30T14:05:25.000Z
|
2019-01-30T14:05:25.000Z
|
tests/fixtures/responses.py
|
myslak71/dmt
|
fcf892391656c02118733db6680d9e4e33b59495
|
[
"MIT"
] | null | null | null |
tests/fixtures/responses.py
|
myslak71/dmt
|
fcf892391656c02118733db6680d9e4e33b59495
|
[
"MIT"
] | null | null | null |
fixed_time_entries = [{'stop': '2018-03-05T18:08:21+00:00', 'at': '2018-03-05T18:08:21+00:00', 'duration': 70,
'guid': '68c5136314e9680a54d0a28508139836', 'start': '2018-03-05T18:08:15+00:00', 'id': 1,
'description': 'task-1', 'duronly': False, 'uid': 2488778, 'billable': False, 'wid': 1688309},
{'stop': '2018-03-05T18:08:21+00:00', 'at': '2018-03-05T18:08:21+00:00', 'duration': 6,
'guid': '68c5136314e9680a54d0a28508139836', 'start': '2018-03-05T18:08:15+00:00', 'id': 2,
'description': 'task-2', 'duronly': False, 'uid': 2488778, 'billable': False, 'wid': 1688309},
{'stop': '2018-03-05T18:08:21+00:00', 'at': '2018-03-05T18:08:21+00:00', 'duration': 500,
'guid': '68c5136314e9680a54d0a28508139836', 'start': '2018-03-05T18:08:15+00:00', 'id': 3,
'description': 'task-3', 'duronly': False, 'uid': 2488778, 'billable': False, 'wid': 1688309}]
| 102.9
| 117
| 0.536443
| 126
| 1,029
| 4.365079
| 0.253968
| 0.098182
| 0.18
| 0.212727
| 0.867273
| 0.867273
| 0.867273
| 0.867273
| 0.785455
| 0.785455
| 0
| 0.381258
| 0.242954
| 1,029
| 9
| 118
| 114.333333
| 0.324775
| 0
| 0
| 0
| 0
| 0
| 0.495627
| 0.311953
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
e3e39704e6e4d3c55660e7f0481b47f4fb734d77
| 11,728
|
py
|
Python
|
tests/test_elastic.py
|
tfcollins/test-results-manager
|
38c9a9c28afaea67b6d73d4eb567a460dcfd276a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_elastic.py
|
tfcollins/test-results-manager
|
38c9a9c28afaea67b6d73d4eb567a460dcfd276a
|
[
"BSD-3-Clause"
] | 5
|
2022-01-26T13:28:03.000Z
|
2022-03-02T19:08:23.000Z
|
tests/test_elastic.py
|
sdgtt/telemetry
|
38c9a9c28afaea67b6d73d4eb567a460dcfd276a
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
import os
import telemetry
import time
import datetime
server = os.environ.get("SERVER") if "SERVER" in os.environ else "alpine"
@pytest.fixture(autouse=True)
def run_around_tests():
# Before test
res = telemetry.elastic(server=server)
res.index_name = "dummy"
res.delete_index()
yield
# After test
res = telemetry.elastic(server=server)
res.index_name = "dummy"
res.delete_index()
def test_db_connect():
telemetry.elastic(server=server)
# Should complete without error
assert True
def test_db_create_delete():
res = telemetry.elastic(server=server)
loc = os.path.dirname(__file__)
loc = os.path.split(loc)[:-1]
loc = os.path.join(loc[0], "telemetry", "resources", "evm_tests_el.json")
s = res.import_schema(loc)
res.create_db_from_schema(s)
res.delete_index()
def test_add_entry():
res = telemetry.elastic(server=server)
loc = os.path.dirname(__file__)
loc = os.path.split(loc)[:-1]
loc = os.path.join(loc[0], "telemetry", "resources", "evm_tests_el.json")
s = res.import_schema(loc)
res.create_db_from_schema(s)
# Add entry
import datetime
entry = {
"test_name": "EVM_1",
"date": datetime.datetime.now(),
"tx_device": "PlutoA",
"rx_device": "PlutoA",
"CARRIER_FREQUENCY": 1000000000,
"tx_sample_rate": 1000000,
"rx_sample_rate": 1000000,
"standard": "LTE10_ETM3p1",
"evm_db": 3.2,
"iteration": 1,
}
res.add_entry(entry)
time.sleep(2)
results = res.search_all()
res.delete_index()
print(results)
assert results["hits"]["total"]["value"] == 1
def test_ingest_tx_quad_cal():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
tel.log_ad9361_tx_quad_cal_test("test1", "pluto", 4, 100, 0)
time.sleep(2)
results = tel.db.search_all()
tel.db.delete_index()
assert results["hits"]["total"]["value"] == 1
def test_search_tx_quad_cal():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
tel.log_ad9361_tx_quad_cal_test("test1", "pluto", 4, 100, 0)
time.sleep(2)
tel = telemetry.searches(server=server)
tel.use_test_index = True
x, y, t = tel.ad9361_tx_quad_cal_test()
tel.db.delete_index()
assert y == [4]
assert t == [100]
assert x
def test_search_tx_quad_cal_chan1():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
tel.log_ad9361_tx_quad_cal_test("test1", "pluto", 4, 100, 0)
time.sleep(2)
tel = telemetry.searches(server=server)
tel.use_test_index = True
x, y, t = tel.ad9361_tx_quad_cal_test(channel=1)
tel.db.delete_index()
assert y == []
assert t == []
assert x == []
def test_search_tx_quad_cal_chan0():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
tel.log_ad9361_tx_quad_cal_test("test1", "pluto", 4, 100, 0)
time.sleep(2)
tel = telemetry.searches(server=server)
tel.use_test_index = True
x, y, t = tel.ad9361_tx_quad_cal_test(channel=0)
tel.db.delete_index()
assert y == [4]
assert t == [100]
assert x
def test_ingest_lte():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
tel.log_lte_evm_test(
"AD9361",
-10,
"slow_attack",
1e9,
"TM 3.1",
"5 MHz",
0.1,
0.2,
0.3,
0.4,
0.5,
0.6,
0.7,
0.8,
)
time.sleep(2)
results = tel.db.search_all()
tel.db.delete_index()
assert results["hits"]["total"]["value"] == 1
def test_ingest_github_stats():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
tel.log_ad9361_tx_quad_cal_test("TransceiverToolbox", 1, 2, 3, 4)
time.sleep(2)
results = tel.db.search_all()
tel.db.delete_index()
assert results["hits"]["total"]["value"] == 1
def test_search_github_stats():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
tel.log_github_stats("TransceiverToolbox", 1, 2, 3, 4)
time.sleep(2)
tel = telemetry.searches(server=server)
tel.use_test_index = True
stats = tel.github_stats()
tel.db.delete_index()
for k in stats:
assert k == "TransceiverToolbox"
assert stats["TransceiverToolbox"]["views"] == 1
assert stats["TransceiverToolbox"]["clones"] == 2
assert stats["TransceiverToolbox"]["view_unique"] == 3
assert stats["TransceiverToolbox"]["clones_unique"] == 4
def test_log_github_release_stats():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
rd = datetime.datetime.now()
tel.log_github_release_stats("TransceiverToolbox", "v19.2.1", 1024, rd)
time.sleep(2)
results = tel.db.search_all()
tel.db.delete_index()
assert results["hits"]["total"]["value"] == 1
def test_search_github_release_stats():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
rd = datetime.datetime.now()
tel.log_github_release_stats("TransceiverToolbox", "v19.2.1", 1024, rd)
time.sleep(2)
tel = telemetry.searches(server=server)
tel.use_test_index = True
stats = tel.github_release_stats()
tel.db.delete_index()
s = datetime.datetime.strptime(
stats["TransceiverToolbox"]["release_date"], "%Y-%m-%dT%H:%M:%S.%f"
)
for k in stats:
assert k == "TransceiverToolbox"
assert stats["TransceiverToolbox"]["tag"] == "v19.2.1"
assert stats["TransceiverToolbox"]["downloads"] == 1024
assert s == rd
def test_ingest_boot_tests_stats():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
inputs = {
"boot_folder_name": "zynq-adrv9361-z7035-bob",
"hdl_hash": "ecd880d44cdd000691283f2edbd31aa52d6ccc3e",
"linux_hash": "b0cb7c3bfd1fec02b1671b061112cd2551a9b340",
"boot_partition_hash": "decb7c3bfd1fec02b1671b061112cd2551a9b3ac",
"hdl_branch": "hdl_2019_r2",
"linux_branch": "2019_R2",
"boot_partition_branch": "NA",
"is_hdl_release": True,
"is_linux_release": True,
"is_boot_partition_release": True,
"uboot_reached": True,
"linux_prompt_reached": True,
"drivers_enumerated": 10,
"drivers_missing": 5,
"pytest_errors": 5,
"pytest_failures": 5,
"pytest_skipped": 5,
"pytest_tests": 5,
"matlab_errors": 5,
"matlab_failures": 6,
"matlab_skipped": 7,
"matlab_tests": 8,
"dmesg_warnings_found": 0,
"dmesg_errors_found": 0,
"jenkins_job_date": datetime.datetime.now(), # "Dec 31, 2020 @ 13:47:04.129",
"jenkins_build_number": 34,
"jenkins_project_name": "pyadi-iio-hw",
"jenkins_agent": "master",
"jenkins_trigger": "manual",
"last_failing_stage": "NA",
"last_failing_stage_failure": "NA"
}
tel.log_boot_tests(**inputs)
time.sleep(2)
results = tel.db.search_all()
tel.db.delete_index()
assert results["hits"]["total"]["value"] == 1
def test_ingest_hdl_resources():
loc = os.path.dirname(__file__)
loc = os.path.split(loc)[:-1]
loc = os.path.join(loc[0], "tests", "resource_utilization.csv")
tel = telemetry.ingest(server=server)
tel.use_test_index = True
tel.log_hdl_resources_from_csv(loc)
time.sleep(2)
results = tel.db.search_all()
tel.db.delete_index()
assert results["hits"]["total"]["value"] == 1
def test_search_boot_tests():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
inputs = {
"boot_folder_name": "zynq-adrv9361-z7035-bob",
"hdl_hash": "ecd880d44cdd000691283f2edbd31aa52d6ccc3e",
"linux_hash": "b0cb7c3bfd1fec02b1671b061112cd2551a9b340",
"boot_partition_hash": "decb7c3bfd1fec02b1671b061112cd2551a9b3ac",
"hdl_branch": "hdl_2019_r2",
"linux_branch": "2019_R2",
"boot_partition_branch": "NA",
"is_hdl_release": True,
"is_linux_release": True,
"is_boot_partition_release": True,
"uboot_reached": True,
"linux_prompt_reached": True,
"drivers_enumerated": 10,
"drivers_missing": 5,
"pytest_errors": 5,
"pytest_failures": 5,
"pytest_skipped": 5,
"pytest_tests": 5,
"matlab_errors": 5,
"matlab_failures": 6,
"matlab_skipped": 7,
"matlab_tests": 8,
"dmesg_warnings_found": 0,
"dmesg_errors_found": 0,
"jenkins_job_date": datetime.datetime.now(), # "Dec 31, 2020 @ 13:47:04.129",
"jenkins_build_number": 34,
"jenkins_project_name": "pyadi-iio-hw",
"jenkins_agent": "master",
"jenkins_trigger": "manual",
"last_failing_stage": "NA",
"last_failing_stage_failure": "NA"
}
tel.log_boot_tests(**inputs)
time.sleep(2)
inputs["boot_folder_name"] = "zynq-adrv9361-z7035-fmc"
tel.log_boot_tests(**inputs)
time.sleep(2)
tel = telemetry.searches(server=server)
tel.use_test_index = True
res = tel.boot_tests()
tel.db.delete_index()
assert len(res) == 2
assert "zynq-adrv9361-z7035-fmc" in res.keys()
assert "zynq-adrv9361-z7035-bob" in res.keys()
def test_ingest_log_artifacts():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
inputs = {
"url": "http://SERVER/jenkins/job/HW_tests/job/HW_test_multiconfig/690/artifact/dmesg_zynq-adrv9361-z7035-fmc_err.log",
"server": "http://SERVER/jenkins",
"job": "HW_tests/HW_test_multiconfig",
"job_no": 690,
"job_date": None,
"job_build_parameters": "NA",
"file_name": "dmesg_zynq-adrv9361-z7035-fmc_err.log",
"target_board": "zynq-adrv9361-z7035-fmc",
"artifact_info_type": "dmesg_error",
"payload_raw": "[ 3.820072] systemd[1]: Failed to look up module alias 'autofs4': Function not implemented",
"payload_ts": "3.820072",
"payload": "systemd[1]: Failed to look up module alias 'autofs4': Function not implemented",
"payload_param": "False-0-adi.adrv9002"
}
tel.log_artifacts(**inputs)
time.sleep(2)
results = tel.db.search_all()
tel.db.delete_index()
assert results["hits"]["total"]["value"] == 1
def test_search_artifacts():
tel = telemetry.ingest(server=server)
tel.use_test_index = True
inputs = {
"url": "http://SERVER/jenkins/job/HW_tests/job/HW_test_multiconfig/690/artifact/dmesg_zynq-adrv9361-z7035-fmc_err.log",
"server": "http://SERVER/jenkins",
"job": "HW_tests/HW_test_multiconfig",
"job_no": 690,
"job_date": None,
"job_build_parameters": "NA",
"file_name": "dmesg_zynq-adrv9361-z7035-fmc_err.log",
"target_board": "zynq-adrv9361-z7035-fmc",
"artifact_info_type": "dmesg_error",
"payload_raw": "[ 3.820072] systemd[1]: Failed to look up module alias 'autofs4': Function not implemented",
"payload_ts": "3.820072",
"payload": "systemd[1]: Failed to look up module alias 'autofs4': Function not implemented",
"payload_param": "False-0-adi.adrv9002"
}
tel.log_artifacts(**inputs)
time.sleep(2)
inputs["file_name"] = "zynq-adrv9361-z7035-fmc_enumerated_devs.log"
tel.log_artifacts(**inputs)
time.sleep(2)
tel = telemetry.searches(server=server)
tel.use_test_index = True
res = tel.artifacts()
tel.db.delete_index()
assert len(res) == 2
assert "zynq-adrv9361-z7035-fmc_enumerated_devs.log" in res[0]['file_name']
assert "dmesg_zynq-adrv9361-z7035-fmc_err.log" in res[1]['file_name']
| 31.442359
| 127
| 0.639836
| 1,534
| 11,728
| 4.638201
| 0.147327
| 0.043851
| 0.044273
| 0.053127
| 0.845257
| 0.833591
| 0.830358
| 0.811103
| 0.796627
| 0.790864
| 0
| 0.063222
| 0.217769
| 11,728
| 372
| 128
| 31.526882
| 0.712339
| 0.010573
| 0
| 0.714286
| 0
| 0.012422
| 0.300078
| 0.07088
| 0
| 0
| 0
| 0
| 0.102484
| 1
| 0.055901
| false
| 0
| 0.024845
| 0
| 0.080745
| 0.003106
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5819067099605c281f435a3dad4f6c284961eb24
| 3,694
|
py
|
Python
|
tests/llvm/static/test.py
|
ganeshutah/FPChecker
|
53a471429762ace13f69733cb2f8b7227fc15b9f
|
[
"Apache-2.0"
] | 19
|
2019-09-28T16:15:45.000Z
|
2022-02-15T15:11:28.000Z
|
tests/llvm/static/test.py
|
tanmaytirpankar/FPChecker
|
d3fe4bd9489c5705df58a67dbbc388ac1ebf56bf
|
[
"Apache-2.0"
] | 16
|
2020-02-01T18:43:00.000Z
|
2021-12-22T14:47:39.000Z
|
tests/llvm/static/test.py
|
tanmaytirpankar/FPChecker
|
d3fe4bd9489c5705df58a67dbbc388ac1ebf56bf
|
[
"Apache-2.0"
] | 5
|
2020-07-27T18:15:36.000Z
|
2021-11-01T18:43:34.000Z
|
#!/usr/bin/env python
import test_config
import subprocess
import os
import sys
def main():
print "* Static Tests *"
###########################################################################
t = "Test: find instrumentation functions"
testTarget = test_config.textWidth.format(t)
sys.stdout.write(testTarget)
os.chdir(test_config.path + "/test_find_inst_functions/")
cmd = ["./test.py"]
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
sys.stdout.write(cmdOutput)
os.chdir("../")
###########################################################################
###########################################################################
t = "Test: num. fp operations"
testTarget = test_config.textWidth.format(t)
sys.stdout.write(testTarget)
os.chdir(test_config.path + "/test_number_fp_operations/")
cmd = ["./test.py"]
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
sys.stdout.write(cmdOutput)
os.chdir("../")
###########################################################################
###########################################################################
t = "Test: a device function is found"
testTarget = test_config.textWidth.format(t)
sys.stdout.write(testTarget)
os.chdir(test_config.path + "/test_device_func_found/")
cmd = ["./test.py"]
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
sys.stdout.write(cmdOutput)
os.chdir("../")
###########################################################################
###########################################################################
t = "Test: a global function is found"
testTarget = test_config.textWidth.format(t)
sys.stdout.write(testTarget)
os.chdir(test_config.path + "/test_global_func_found/")
cmd = ["./test.py"]
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
sys.stdout.write(cmdOutput)
os.chdir("../")
###########################################################################
###########################################################################
t = "Test: main() is found"
testTarget = test_config.textWidth.format(t)
sys.stdout.write(testTarget)
os.chdir(test_config.path + "/test_main_is_found/")
cmd = ["./test.py"]
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
sys.stdout.write(cmdOutput)
os.chdir("../")
###########################################################################
###########################################################################
t = "Test: global array instrumentation"
testTarget = test_config.textWidth.format(t)
sys.stdout.write(testTarget)
os.chdir(test_config.path + "/test_global_array_instrumentation/")
cmd = ["./test.py"]
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
sys.stdout.write(cmdOutput)
os.chdir("../")
###########################################################################
###########################################################################
t = "Test: correct func are found and instrumented"
testTarget = test_config.textWidth.format(t)
sys.stdout.write(testTarget)
os.chdir(test_config.path + "/test_correct_inst_functions_found/")
cmd = ["./test.py"]
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
sys.stdout.write(cmdOutput)
os.chdir("../")
###########################################################################
main()
| 41.044444
| 82
| 0.479426
| 328
| 3,694
| 5.265244
| 0.155488
| 0.086856
| 0.113492
| 0.117545
| 0.82513
| 0.82513
| 0.82513
| 0.82513
| 0.82513
| 0.82513
| 0
| 0
| 0.135084
| 3,694
| 89
| 83
| 41.505618
| 0.540532
| 0.005414
| 0
| 0.666667
| 0
| 0
| 0.19634
| 0.065193
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.063492
| null | null | 0.015873
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
588c7b60ead93b3b3f3300d29b0e672ec555a4a2
| 18,545
|
py
|
Python
|
startup/users/30-user-McNeil.py
|
mrakitin/profile_collection-smi
|
1eea45a3b886b2c0daeec715ce94f27da24d0ba3
|
[
"BSD-3-Clause"
] | null | null | null |
startup/users/30-user-McNeil.py
|
mrakitin/profile_collection-smi
|
1eea45a3b886b2c0daeec715ce94f27da24d0ba3
|
[
"BSD-3-Clause"
] | null | null | null |
startup/users/30-user-McNeil.py
|
mrakitin/profile_collection-smi
|
1eea45a3b886b2c0daeec715ce94f27da24d0ba3
|
[
"BSD-3-Clause"
] | null | null | null |
def waxs_S_edge_guil(t=1):
dets = [pil300KW]
names = ['sample02', 'sample03', 'sample04', 'sample05', 'sample06', 'sample07', 'sample08', 'sample09', 'sample10', 'sample11', 'sample12']
x = [26500, 21500, 16000, 10500, 5000, 0, -5500, -10500, 16000, -21000, -26500]#, -34000, -41000]
y = [600, 600, 800, 700, 700, 600, 600, 600, 600, 900, 900]#, 700, 800]
energies = np.linspace(2450, 2500, 26)
waxs_arc = [0, 6.5, 13]
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yss = np.linspace(ys, ys + 1300, 26)
if int(waxs.arc.position) == 0:
waxs_arc = [0, 6.5, 13]
elif int(waxs.arc.position) == 13:
waxs_arc = [13, 6.5, 0]
if name == 'sample02':
waxs_arc = [6.5, 0]
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_wa{wax}'
for e, ysss in zip(energies, yss):
yield from bps.sleep(1)
yield from bps.mv(energy, e)
yield from bps.mv(piezo.y, ysss)
sample_name = name_fmt.format(sample=name, energy=e, wax = wa)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
def giwaxs_S_edge_chris(t=1):
dets = [pil300KW]
# names = ['e1_01', 'e1_02', 'e1_03', 'e1_04', 'd1_01', 'd1_02', 'd1_03', 'd1_04', 'd1_05', 'd1_06']
# x = [56000, 45500, 34000, 22000, 11000, 0, -11000, -22500, -34000, -46000]
names = ['d1_07', 'd1_08', 'd1_10', 'd1_11']
x = [55000, 42500, 31000, 19000]
energies = np.arange(2450, 2470, 5).tolist() + np.arange(2470, 2480, 0.25).tolist() + np.arange(2480, 2490, 1).tolist()+ np.arange(2490, 2501, 5).tolist()
waxs_arc = np.linspace(0, 39, 7)
ai0 = 0
for name, xs in zip(names, x):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.th, ai0)
yield from bps.mv(GV7.open_cmd, 1 )
yield from bps.sleep(1)
yield from bps.mv(GV7.open_cmd, 1 )
yield from bps.sleep(1)
yield from alignement_gisaxs(angle = 0.4)
# yield from bps.mv(att2_9, 'Insert')
yield from bps.mv(GV7.close_cmd, 1 )
yield from bps.sleep(1)
# yield from bps.mv(att2_9, 'Insert')
yield from bps.mv(GV7.close_cmd, 1 )
yield from bps.sleep(1)
ai0 = piezo.th.position
yield from bps.mv(piezo.th, ai0 + 0.7)
xss = np.linspace(xs, xs - 8000, 57)
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e, xsss in zip(energies, xss):
yield from bps.mv(energy, e)
yield from bps.sleep(2)
yield from bps.mv(piezo.x, xsss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
def giwaxs_S_edge_chris_redo(t=1):
dets = [pil300KW]
names = ['a1_02_redo']#, 'sample03', 'sample04', 'sample05', 'sample06', 'sample07', 'sample08', 'sample09', 'sample10', 'sample11', 'sample12']
x = [38000,-6000,-16000, -26000, -38000]#, 21500, 16000, 10500, 5000, 0, -5500, -10500, 16000, -21000, -26500]#, -34000, -41000]
#y = [600]#, 600, 800, 700, 700, 600, 600, 600, 600, 900, 900]#, 700, 800]
energiess = [[2495, 2500], [2495], [2455, 2470, 2495, 2500], [2488, 2490, 2495, 2500], [2495, 2500]]
waxs_arc = np.linspace(0, 39, 7)
ai0 = 0
for name, xs, energies in zip(names, x, energiess):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.th, ai0)
yield from bps.mv(GV7.open_cmd, 1 )
yield from bps.sleep(1)
yield from bps.mv(GV7.open_cmd, 1 )
yield from bps.sleep(1)
yield from alignement_gisaxs(angle = 0.4)
yield from bps.mv(att2_9, 'Insert')
yield from bps.mv(GV7.close_cmd, 1 )
yield from bps.sleep(1)
yield from bps.mv(att2_9, 'Insert')
yield from bps.mv(GV7.close_cmd, 1 )
yield from bps.sleep(1)
ai0 = piezo.th.position
yield from bps.mv(piezo.th, ai0 + 0.7)
'''
if int(waxs.arc.position) == 0:
waxs_arc = [0, 6.5, 13]
elif int(waxs.arc.position) == 13:
waxs_arc = [13, 6.5, 0]
'''
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
yield from bps.mvr(piezo.x, -500)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
def waxs_S_edge_chris_n(t=1):
dets = [pil300KW]
names = ['b2_08', 'b2_09', 'b2_10', 'c2_01', 'c2_02', 'c2_03', 'c2_04', 'c2_05', 'c2_06', 'c2_07', 'c2_08']
x = [41500, 36300, 30900, 25600, 20200, 15200, 9700, 4700, -500, -5900, -11500]
y = [1000, 800, 800, 900, 800, 800, 600, 400, 500, 600, 500]
energies = np.arange(2445, 2470, 5).tolist() + np.arange(2470, 2480, 0.25).tolist() + np.arange(2480, 2490, 1).tolist()+ np.arange(2490, 2501, 5).tolist()
waxs_arc = np.linspace(0, 39, 7)
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yss = np.linspace(ys, ys + 1000, 29)
xss = np.array([xs, xs + 500])
yss, xss = np.meshgrid(yss, xss)
yss = yss.ravel()
xss = xss.ravel()
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e, xsss, ysss in zip(energies, xss, yss):
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(piezo.y, ysss)
yield from bps.mv(piezo.x, xsss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
dets = [pil300KW]
names = ['EH_static']
x = [-16700]
y = [1000]
energies = [2450, 2470, 2475, 2500]
waxs_arc = np.linspace(0, 39, 7)
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
yield from bps.mv(energy, 2450)
yield from bps.sleep(5)
yield from bps.mv(energy, 2500)
yield from bps.sleep(5)
yield from bps.mv(energy, 2550)
yield from bps.sleep(5)
yield from bps.mv(energy, 2580)
yield from bps.sleep(5)
yield from bps.mv(energy, 2610)
yield from bps.sleep(5)
yield from bps.mv(energy, 2640)
yield from bps.sleep(5)
yield from bps.mv(energy, 2660)
yield from bps.sleep(5)
yield from bps.mv(energy, 2680)
yield from bps.sleep(5)
yield from bps.mv(energy, 2700)
yield from bps.sleep(5)
yield from bps.mv(energy, 2720)
yield from bps.sleep(5)
yield from bps.mv(energy, 2740)
yield from bps.sleep(5)
yield from bps.mv(energy, 2760)
yield from bps.sleep(5)
yield from bps.mv(energy, 2780)
yield from bps.sleep(5)
yield from bps.mv(energy, 2800)
yield from bps.sleep(5)
dets = [pil300KW]
names = ['c2_04', 'c2_06', 'c2_08']
x = [10600, 400, -10400]
y = [600, 500, 500]
energies = np.arange(2810, 2820, 5).tolist() + np.arange(2820, 2840, 0.5).tolist() + np.arange(2840, 2850, 1).tolist()
waxs_arc = np.linspace(0, 36, 5)
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yss = np.linspace(ys, ys + 1000, 52)
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e, ysss in zip(energies, yss):
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(piezo.y, ysss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2830)
yield from bps.sleep(5)
yield from bps.mv(energy, 2810)
yield from bps.sleep(5)
def waxs_S_edge_chris_night(t=1):
dets = [pil300KW]
energies = np.arange(2445, 2470, 5).tolist() + np.arange(2470, 2480, 0.25).tolist() + np.arange(2480, 2490, 1).tolist()+ np.arange(2490, 2501, 5).tolist()
waxs_arc = np.linspace(0, 39, 7)
yield from bps.mv(stage.th, 0)
yield from bps.mv(stage.y, 0)
names = ['e2_01', 'e2_02', 'e2_03', 'e2_04', 'b2_04', 'b2_08', 'd2_01', 'd2_02', 'd2_03',
'd2_04', 'd2_05', 'd2_06', 'd2_07', 'd2_08']
x = [41600, 35800, 29400, 23500, 6500, 1200, -4500, -9800,-15200,-21000,-26700,-32000,-37200,-42800,]
y = [-4300, -4300, -4100, -4000, -4200,-4200, -4300, -4200, -4200, -4300, -4300, -4200, -4100, -4300, ]
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yss = np.linspace(ys, ys + 1000, 29)
xss = np.array([xs, xs + 500])
yss, xss = np.meshgrid(yss, xss)
yss = yss.ravel()
xss = xss.ravel()
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e, xsss, ysss in zip(energies, xss, yss):
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(piezo.y, ysss)
yield from bps.mv(piezo.x, xsss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
yield from bps.mv(stage.th, 1)
yield from bps.mv(stage.y, -8)
names = ['d2_10', 'd2_11']
x = [-15700, -10200]
y = [-8800, -8800]
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yss = np.linspace(ys, ys + 1000, 29)
xss = np.array([xs, xs + 500])
yss, xss = np.meshgrid(yss, xss)
yss = yss.ravel()
xss = xss.ravel()
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e, xsss, ysss in zip(energies, xss, yss):
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(piezo.y, ysss)
yield from bps.mv(piezo.x, xsss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
yield from bps.mv(energy, 2450)
yield from bps.sleep(5)
yield from bps.mv(energy, 2500)
yield from bps.sleep(5)
yield from bps.mv(energy, 2550)
yield from bps.sleep(5)
yield from bps.mv(energy, 2580)
yield from bps.sleep(5)
yield from bps.mv(energy, 2610)
yield from bps.sleep(5)
yield from bps.mv(energy, 2640)
yield from bps.sleep(5)
yield from bps.mv(energy, 2660)
yield from bps.sleep(5)
yield from bps.mv(energy, 2680)
yield from bps.sleep(5)
yield from bps.mv(energy, 2700)
yield from bps.sleep(5)
yield from bps.mv(energy, 2720)
yield from bps.sleep(5)
yield from bps.mv(energy, 2740)
yield from bps.sleep(5)
yield from bps.mv(energy, 2760)
yield from bps.sleep(5)
yield from bps.mv(energy, 2780)
yield from bps.sleep(5)
yield from bps.mv(energy, 2800)
yield from bps.sleep(5)
dets = [pil300KW]
yield from bps.mv(stage.th, 0)
yield from bps.mv(stage.y, 0)
names = ['b2_01', 'b2_02', 'b2_04', 'b2_08']
x = [17800, 12200, 6750, 1450]
y = [-4100, -4200,-4200,-4200]
energies = np.arange(2810, 2820, 5).tolist() + np.arange(2820, 2825, 1).tolist() + np.arange(2825, 2835, 0.25).tolist() + np.arange(2835, 2840, 0.5).tolist() + np.arange(2840, 2850, 1).tolist()
waxs_arc = np.linspace(0, 39, 7)
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yss = np.linspace(ys, ys + 1000, 29)
xss = np.array([xs, xs + 500])
yss, xss = np.meshgrid(yss, xss)
yss = yss.ravel()
xss = xss.ravel()
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e, xsss, ysss in zip(energies, xss, yss):
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(piezo.y, ysss)
yield from bps.mv(piezo.x, xsss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2830)
yield from bps.sleep(2)
yield from bps.mv(energy, 2810)
yield from bps.sleep(2)
def nexafs_90deg_McNeil(t=1):
dets = [pil300KW]
energies = np.arange(2445, 2470, 5).tolist() + np.arange(2470, 2480, 0.25).tolist() + np.arange(2480, 2490, 1).tolist()+ np.arange(2490, 2501, 5).tolist()
waxs_arc = [52.5]
ai = [0.7, 20, 55]
names = ['D1_06']
for name in names:
det_exposure_time(t,t)
name_fmt = 'nexafs_vert_{sample}_{energy}eV_angle{ai}_bpm{xbpm}'
ai0 = prs.position
for ais in ai:
yield from bps.mv(prs, ai0-ais)
yield from bps.mvr(piezo.y, 100)
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai ='%2.2d'%ais, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
def giwaxs_vert_S_edge_McNeil(t=1):
dets = [pil300KW]
names = ['D1_06']
energies = np.arange(2445, 2470, 5).tolist() + np.arange(2470, 2480, 0.25).tolist() + np.arange(2480, 2490, 1).tolist()+ np.arange(2490, 2501, 5).tolist()
waxs_arc = [4, 10.5, 17]
dets = [pil300KW]
for name in names:
for i, wa in enumerate(waxs_arc):
if i==0:
print('wa=4deg')
else:
yield from bps.mv(waxs, wa)
name_fmt = 'GIWAXS_90deg_{sample}_{energy}eV_ai0.7_wa{wax}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
| 33.234767
| 197
| 0.54144
| 2,774
| 18,545
| 3.525595
| 0.088681
| 0.172086
| 0.213497
| 0.173211
| 0.87546
| 0.855215
| 0.849182
| 0.845501
| 0.840695
| 0.836503
| 0
| 0.13037
| 0.3134
| 18,545
| 558
| 198
| 33.234767
| 0.637713
| 0.029765
| 0
| 0.818653
| 0
| 0
| 0.071669
| 0.024152
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018135
| false
| 0
| 0
| 0
| 0.018135
| 0.031088
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5455ebcc430ceab97bee5eae3b35ccb133680914
| 12,990
|
py
|
Python
|
tests/api/v3_0_0/test_sponsor_group.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 36
|
2021-05-18T16:24:19.000Z
|
2022-03-05T13:44:41.000Z
|
tests/api/v3_0_0/test_sponsor_group.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 15
|
2021-06-08T19:03:37.000Z
|
2022-02-25T14:47:33.000Z
|
tests/api/v3_0_0/test_sponsor_group.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 6
|
2021-06-10T09:32:01.000Z
|
2022-01-12T08:34:39.000Z
|
# -*- coding: utf-8 -*-
"""IdentityServicesEngineAPI sponsor_group API fixtures and tests.
Copyright (c) 2021 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import pytest
from fastjsonschema.exceptions import JsonSchemaException
from ciscoisesdk.exceptions import MalformedRequest
from ciscoisesdk.exceptions import ciscoisesdkException
from tests.environment import IDENTITY_SERVICES_ENGINE_VERSION
pytestmark = pytest.mark.skipif(IDENTITY_SERVICES_ENGINE_VERSION != '3.0.0', reason='version does not match')
def is_valid_get_sponsor_group_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_eaa0d7c339d152b688876c2e10f51fe7_v3_0_0').validate(obj.response)
return True
def get_sponsor_group_by_id(api):
endpoint_result = api.sponsor_group.get_sponsor_group_by_id(
id='string'
)
return endpoint_result
@pytest.mark.sponsor_group
def test_get_sponsor_group_by_id(api, validator):
try:
assert is_valid_get_sponsor_group_by_id(
validator,
get_sponsor_group_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_sponsor_group_by_id_default(api):
endpoint_result = api.sponsor_group.get_sponsor_group_by_id(
id='string'
)
return endpoint_result
@pytest.mark.sponsor_group
def test_get_sponsor_group_by_id_default(api, validator):
try:
assert is_valid_get_sponsor_group_by_id(
validator,
get_sponsor_group_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_update_sponsor_group_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_dfc44f7f24d153d789efa48e904b3832_v3_0_0').validate(obj.response)
return True
def update_sponsor_group_by_id(api):
endpoint_result = api.sponsor_group.update_sponsor_group_by_id(
active_validation=False,
auto_notification=True,
create_permissions={'canImportMultipleAccounts': True, 'importBatchSizeLimit': 0, 'canCreateRandomAccounts': True, 'randomBatchSizeLimit': 0, 'defaultUsernamePrefix': 'string', 'canSpecifyUsernamePrefix': True, 'canSetFutureStartDate': True, 'startDateFutureLimitDays': 0},
description='string',
guest_types=['string'],
id='string',
is_default_group=True,
is_enabled=True,
locations=['string'],
manage_permission='string',
member_groups=['string'],
name='string',
other_permissions={'canUpdateGuestContactInfo': True, 'canViewGuestPasswords': True, 'canSendSmsNotifications': True, 'canResetGuestPasswords': True, 'canExtendGuestAccounts': True, 'canDeleteGuestAccounts': True, 'canSuspendGuestAccounts': True, 'requireSuspensionReason': True, 'canReinstateSuspendedAccounts': True, 'canApproveSelfregGuests': True, 'limitApprovalToSponsorsGuests': True, 'canAccessViaRest': True},
payload=None
)
return endpoint_result
@pytest.mark.sponsor_group
def test_update_sponsor_group_by_id(api, validator):
try:
assert is_valid_update_sponsor_group_by_id(
validator,
update_sponsor_group_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def update_sponsor_group_by_id_default(api):
endpoint_result = api.sponsor_group.update_sponsor_group_by_id(
active_validation=False,
id='string',
auto_notification=None,
create_permissions=None,
description=None,
guest_types=None,
is_default_group=None,
is_enabled=None,
locations=None,
manage_permission=None,
member_groups=None,
name=None,
other_permissions=None,
payload=None
)
return endpoint_result
@pytest.mark.sponsor_group
def test_update_sponsor_group_by_id_default(api, validator):
try:
assert is_valid_update_sponsor_group_by_id(
validator,
update_sponsor_group_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_delete_sponsor_group_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_61c28a45acf05fec98879d8d2ac51129_v3_0_0').validate(obj.response)
return True
def delete_sponsor_group_by_id(api):
endpoint_result = api.sponsor_group.delete_sponsor_group_by_id(
id='string'
)
return endpoint_result
@pytest.mark.sponsor_group
def test_delete_sponsor_group_by_id(api, validator):
try:
assert is_valid_delete_sponsor_group_by_id(
validator,
delete_sponsor_group_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def delete_sponsor_group_by_id_default(api):
endpoint_result = api.sponsor_group.delete_sponsor_group_by_id(
id='string'
)
return endpoint_result
@pytest.mark.sponsor_group
def test_delete_sponsor_group_by_id_default(api, validator):
try:
assert is_valid_delete_sponsor_group_by_id(
validator,
delete_sponsor_group_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_sponsor_group(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_f1196f1f6fde5978b0522f096926d443_v3_0_0').validate(obj.response)
return True
def get_sponsor_group(api):
endpoint_result = api.sponsor_group.get_sponsor_group(
filter='value1,value2',
filter_type='string',
page=0,
size=0,
sortasc='string',
sortdsc='string'
)
return endpoint_result
@pytest.mark.sponsor_group
def test_get_sponsor_group(api, validator):
try:
assert is_valid_get_sponsor_group(
validator,
get_sponsor_group(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_sponsor_group_default(api):
endpoint_result = api.sponsor_group.get_sponsor_group(
filter=None,
filter_type=None,
page=None,
size=None,
sortasc=None,
sortdsc=None
)
return endpoint_result
@pytest.mark.sponsor_group
def test_get_sponsor_group_default(api, validator):
try:
assert is_valid_get_sponsor_group(
validator,
get_sponsor_group_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_create_sponsor_group(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_56311acd30d35ee2ae16ff23757de7d8_v3_0_0').validate(obj.response)
return True
def create_sponsor_group(api):
endpoint_result = api.sponsor_group.create_sponsor_group(
active_validation=False,
auto_notification=True,
create_permissions={'canImportMultipleAccounts': True, 'importBatchSizeLimit': 0, 'canCreateRandomAccounts': True, 'randomBatchSizeLimit': 0, 'defaultUsernamePrefix': 'string', 'canSpecifyUsernamePrefix': True, 'canSetFutureStartDate': True, 'startDateFutureLimitDays': 0},
description='string',
guest_types=['string'],
is_default_group=True,
is_enabled=True,
locations=['string'],
manage_permission='string',
member_groups=['string'],
name='string',
other_permissions={'canUpdateGuestContactInfo': True, 'canViewGuestPasswords': True, 'canSendSmsNotifications': True, 'canResetGuestPasswords': True, 'canExtendGuestAccounts': True, 'canDeleteGuestAccounts': True, 'canSuspendGuestAccounts': True, 'requireSuspensionReason': True, 'canReinstateSuspendedAccounts': True, 'canApproveSelfregGuests': True, 'limitApprovalToSponsorsGuests': True, 'canAccessViaRest': True},
payload=None
)
return endpoint_result
@pytest.mark.sponsor_group
def test_create_sponsor_group(api, validator):
try:
assert is_valid_create_sponsor_group(
validator,
create_sponsor_group(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def create_sponsor_group_default(api):
endpoint_result = api.sponsor_group.create_sponsor_group(
active_validation=False,
auto_notification=None,
create_permissions=None,
description=None,
guest_types=None,
is_default_group=None,
is_enabled=None,
locations=None,
manage_permission=None,
member_groups=None,
name=None,
other_permissions=None,
payload=None
)
return endpoint_result
@pytest.mark.sponsor_group
def test_create_sponsor_group_default(api, validator):
try:
assert is_valid_create_sponsor_group(
validator,
create_sponsor_group_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_version(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_e8d4001b740751e08cfc19e1fdc5fddf_v3_0_0').validate(obj.response)
return True
def get_version(api):
endpoint_result = api.sponsor_group.get_version(
)
return endpoint_result
@pytest.mark.sponsor_group
def test_get_version(api, validator):
try:
assert is_valid_get_version(
validator,
get_version(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_version_default(api):
endpoint_result = api.sponsor_group.get_version(
)
return endpoint_result
@pytest.mark.sponsor_group
def test_get_version_default(api, validator):
try:
assert is_valid_get_version(
validator,
get_version_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
| 33.222506
| 425
| 0.711008
| 1,490
| 12,990
| 5.911409
| 0.145638
| 0.108992
| 0.052452
| 0.059946
| 0.821866
| 0.820504
| 0.818801
| 0.814941
| 0.811989
| 0.796776
| 0
| 0.015076
| 0.208545
| 12,990
| 390
| 426
| 33.307692
| 0.84165
| 0.089222
| 0
| 0.703947
| 0
| 0
| 0.134303
| 0.089479
| 0
| 0
| 0
| 0
| 0.118421
| 1
| 0.098684
| false
| 0.006579
| 0.023026
| 0
| 0.200658
| 0.019737
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
54bd4be82b2a42329aede2359d2d5d9f389c156d
| 7,026
|
py
|
Python
|
pdf_ocr.py
|
im47cn/pdfOCR
|
b8ce65c62752824046607b95d21c741e97f634f0
|
[
"Apache-2.0"
] | null | null | null |
pdf_ocr.py
|
im47cn/pdfOCR
|
b8ce65c62752824046607b95d21c741e97f634f0
|
[
"Apache-2.0"
] | null | null | null |
pdf_ocr.py
|
im47cn/pdfOCR
|
b8ce65c62752824046607b95d21c741e97f634f0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import fitz
import time
from paddleocr import PaddleOCR, draw_ocr
def pdf2png_tbo(pdfPath, imgPath, zoom_x=2, zoom_y=2, rotation_angle=0):
'''
# 将PDF转化为图片
pdfPath pdf文件的路径
imgPath 图像要保存的文件夹
zoom_x x方向的缩放系数
zoom_y y方向的缩放系数
rotation_angle 旋转角度
'''
time_start = time.time()
# 打开PDF文件
pdf = fitz.open(pdfPath)
files = []
# 逐页读取PDF
for pg in range(0, pdf.pageCount):
page = pdf[pg]
# 删除图片
img_list = page.get_images()
for img in img_list:
pdf._deleteObject(img[0])
# 设置缩放和旋转系数
trans = fitz.Matrix(zoom_x, zoom_y)
# pm = page.get_pixmap(matrix=trans, alpha=False)
# if pm.width > 2000 or pm.height > 2000:
# pm = page.get_pixmap(matrix=fitz.Matrix(1, 1), alpha=False)
# pm.save(imgPath + str(pg) + ".jpeg")
# tbo No.
clip1 = fitz.Rect(515, 86, 558, 100)
pm1 = page.get_pixmap(matrix=trans, clip=clip1, alpha=False)
file1 = imgPath + str(pg) + "-1.jpeg"
files.append(file1)
pm1.save(file1)
# tbo Issues by
clip2 = fitz.Rect(380, 105, 558, 119)
# clip2 = fitz.Rect(37, 256, 280, 269)
pm2 = page.get_pixmap(matrix=trans, clip=clip2, alpha=False)
file2 = imgPath + str(pg) + "-2.jpeg"
files.append(file2)
pm2.save(file2)
# tbo payment
clip3 = fitz.Rect(450, 255, 558, 285)
pm3 = page.get_pixmap(matrix=trans, clip=clip3, alpha=False)
file3 = imgPath + str(pg) + "-3.jpeg"
files.append(file3)
pm3.save(file3)
pdf.close()
time_end = time.time()
time_cost = time_end - time_start
print('totally cost: {}, page: {}, each page cost: {}'.format(
time_cost, pg + 1, time_cost / (pg + 1)))
return files
def pdf2png_exp(pdfPath, imgPath, zoom_x=2, zoom_y=2, rotation_angle=0):
'''
# 将PDF转化为图片
pdfPath pdf文件的路径
imgPath 图像要保存的文件夹
zoom_x x方向的缩放系数
zoom_y y方向的缩放系数
rotation_angle 旋转角度
'''
time_start = time.time()
# 打开PDF文件
pdf = fitz.open(pdfPath)
files = []
# 逐页读取PDF
for pg in range(0, pdf.pageCount):
page = pdf[pg]
# 删除图片
img_list = page.get_images()
for img in img_list:
pdf._deleteObject(img[0])
# 设置缩放和旋转系数
trans = fitz.Matrix(zoom_x, zoom_y)
# pm = page.get_pixmap(matrix=trans, alpha=False)
# if pm.width > 2000 or pm.height > 2000:
# pm = page.get_pixmap(matrix=fitz.Matrix(1, 1), alpha=False)
# pm.save(imgPath + str(pg) + ".jpeg")
# exp No.
clip1 = fitz.Rect(160, 180, 440, 205)
pm1 = page.get_pixmap(matrix=trans, clip=clip1, alpha=False)
file1 = imgPath + str(pg) + "-1.jpeg"
files.append(file1)
pm1.save(file1)
# exp payment
clip2 = fitz.Rect(460, 440, 535, 465)
pm2 = page.get_pixmap(matrix=trans, clip=clip2, alpha=False)
file2 = imgPath + str(pg) + "-2.jpeg"
files.append(file2)
pm2.save(file2)
pdf.close()
time_end = time.time()
time_cost = time_end - time_start
print('totally cost: {}, page: {}, each page cost: {}'.format(
time_cost, pg + 1, time_cost / (pg + 1)))
return files
def pdf2png_agoda(pdfPath, imgPath, zoom_x=2, zoom_y=2, rotation_angle=0):
'''
# 将PDF转化为图片
pdfPath pdf文件的路径
imgPath 图像要保存的文件夹
zoom_x x方向的缩放系数
zoom_y y方向的缩放系数
rotation_angle 旋转角度
'''
time_start = time.time()
# 打开PDF文件
pdf = fitz.open(pdfPath)
files = []
# 逐页读取PDF
for pg in range(0, pdf.pageCount):
page = pdf[pg]
# 删除图片
img_list = page.get_images()
for img in img_list:
pdf._deleteObject(img[0])
# 设置缩放和旋转系数
trans = fitz.Matrix(zoom_x, zoom_y)
pm = page.get_pixmap(matrix=trans, alpha=False)
if pm.width > 2000 or pm.height > 2000:
pm = page.get_pixmap(matrix=fitz.Matrix(1, 1), alpha=False)
pm.save(imgPath + str(pg) + ".jpeg")
# agoda No.
clip1 = fitz.Rect(160, 180, 440, 205)
pm1 = page.get_pixmap(matrix=trans, clip=clip1, alpha=False)
file1 = imgPath + str(pg) + "-1.jpeg"
files.append(file1)
pm1.save(file1)
# agoda payment
clip2 = fitz.Rect(460, 440, 535, 465)
pm2 = page.get_pixmap(matrix=trans, clip=clip2, alpha=False)
file2 = imgPath + str(pg) + "-2.jpeg"
files.append(file2)
pm2.save(file2)
pdf.close()
time_end = time.time()
time_cost = time_end - time_start
print('totally cost: {}, page: {}, each page cost: {}'.format(
time_cost, pg + 1, time_cost / (pg + 1)))
return files
def pdf2png_agoda2(pdfPath, imgPath, zoom_x=2, zoom_y=2, rotation_angle=0):
'''
# 将PDF转化为图片
pdfPath pdf文件的路径
imgPath 图像要保存的文件夹
zoom_x x方向的缩放系数
zoom_y y方向的缩放系数
rotation_angle 旋转角度
'''
time_start = time.time()
# 打开PDF文件
pdf = fitz.open(pdfPath)
files = []
# 逐页读取PDF
for pg in range(0, pdf.pageCount):
page = pdf[pg]
# 删除图片
img_list = page.get_images()
for img in img_list:
pdf._deleteObject(img[0])
# 设置缩放和旋转系数
trans = fitz.Matrix(zoom_x, zoom_y)
# pm = page.get_pixmap(matrix=trans, alpha=False)
# if pm.width > 2000 or pm.height > 2000:
# pm = page.get_pixmap(matrix=fitz.Matrix(1, 1), alpha=False)
# pm.save(imgPath + str(pg) + ".jpeg")
# agoda No.
clip1 = fitz.Rect(140, 95, 200, 110)
pm1 = page.get_pixmap(matrix=trans, clip=clip1, alpha=False)
file1 = imgPath + str(pg) + "-1.jpeg"
files.append(file1)
pm1.save(file1)
# agoda payment
clip2 = fitz.Rect(205, 436, 290, 446)
pm2 = page.get_pixmap(matrix=trans, clip=clip2, alpha=False)
file2 = imgPath + str(pg) + "-2.jpeg"
files.append(file2)
pm2.save(file2)
pdf.close()
time_end = time.time()
time_cost = time_end - time_start
print('totally cost: {}, page: {}, each page cost: {}'.format(
time_cost, pg + 1, time_cost / (pg + 1)))
return files
if __name__ == '__main__':
ocr = PaddleOCR(
# det_model_dir='./PaddleOCR/output/ch_db_mv3_inference/inference',
use_angle_cls=False,
use_gpu=False,
language_type='en')
pdfFolder = '../agoda2'
for p in os.listdir(pdfFolder):
if p[-4:] == '.pdf':
pdfPath = pdfFolder + '/' + p
imgPath = p[:-4]
# imgPath = pdfFolder+'/'+os.path.basename(p)[:-4]+'/'
# os.mkdir(imgPath)
files = pdf2png_agoda2(pdfPath, imgPath)
for file in files:
result = ocr.ocr(file, cls=False)
print(result[0][1])
| 27.880952
| 82
| 0.561771
| 914
| 7,026
| 4.191466
| 0.157549
| 0.038371
| 0.057687
| 0.084312
| 0.833986
| 0.833986
| 0.826677
| 0.826677
| 0.826677
| 0.826677
| 0
| 0.056254
| 0.30928
| 7,026
| 251
| 83
| 27.992032
| 0.733155
| 0.200968
| 0
| 0.732824
| 0
| 0
| 0.050819
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030534
| false
| 0
| 0.030534
| 0
| 0.091603
| 0.038168
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
54c0f339696f8692840ba44f55fc9be7f91836e4
| 299
|
py
|
Python
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/core/views.py
|
pythdasch/cookiecutter-django
|
c998afe16cc7632af329e623d29e7fb7e6b3795a
|
[
"BSD-3-Clause"
] | null | null | null |
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/core/views.py
|
pythdasch/cookiecutter-django
|
c998afe16cc7632af329e623d29e7fb7e6b3795a
|
[
"BSD-3-Clause"
] | null | null | null |
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/core/views.py
|
pythdasch/cookiecutter-django
|
c998afe16cc7632af329e623d29e7fb7e6b3795a
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.urls import reverse
from django.views.generic import DetailView, RedirectView, UpdateView
from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
| 42.714286
| 69
| 0.862876
| 41
| 299
| 6.195122
| 0.560976
| 0.23622
| 0.200787
| 0.165354
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093645
| 299
| 6
| 70
| 49.833333
| 0.937269
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
54c51b24b61f37011b617fdccb5bb0d0fd562013
| 1,752
|
py
|
Python
|
depend.py
|
Google1234/NLP
|
393ed1131c07ca55aaadf144b74251a466b3d418
|
[
"Apache-2.0"
] | 1
|
2016-07-05T13:11:18.000Z
|
2016-07-05T13:11:18.000Z
|
depend.py
|
Google1234/NLP
|
393ed1131c07ca55aaadf144b74251a466b3d418
|
[
"Apache-2.0"
] | null | null | null |
depend.py
|
Google1234/NLP
|
393ed1131c07ca55aaadf144b74251a466b3d418
|
[
"Apache-2.0"
] | null | null | null |
#-*- coding: UTF-8 -*-
class quick_sort():
a=[]
def __init__(self,data):
self.a=data[:]
def sort(self,left,right):
if left!=right:
mid=left+int((right-left)/2)
self.sort(left,mid)
self.sort(mid+1,right)
b=[]
i=0
for i in range(left,mid+1):
b.append(self.a[i])
b.append('zz')
i=0
for i in range(mid+1,right+1):
b.append(self.a[i])
b.append('zz')
i=j=0
k=mid-left+2
for i in range(left,right+1):
if b[j]>b[k]:
value=b[k]
k+=1
else:
value=b[j]
j+=1
self.a[i]=value
return self.a
def show(self):
print(self.a)
class quick_sort_Multidimensional_Data():
a=[]
def __init__(self,data):
self.a=data[:]
def sort(self,left,right):
if left!=right:
mid=left+int((right-left)/2)
self.sort(left,mid)
self.sort(mid+1,right)
b=[]
i=0
for i in range(left,mid+1):
b.append(self.a[i])
b.append(['',10])
i=0
for i in range(mid+1,right+1):
b.append(self.a[i])
b.append(['',10])
i=j=0
k=mid-left+2
for i in range(left,right+1):
if b[j][0]>b[k][0]:
value=b[k]
k+=1
else:
value=b[j]
j+=1
self.a[i]=value
return self.a
def show(self):
print(self.a)
| 26.953846
| 42
| 0.384703
| 233
| 1,752
| 2.841202
| 0.150215
| 0.090634
| 0.054381
| 0.099698
| 0.903323
| 0.903323
| 0.903323
| 0.903323
| 0.903323
| 0.903323
| 0
| 0.033514
| 0.472032
| 1,752
| 65
| 43
| 26.953846
| 0.682162
| 0.011986
| 0
| 0.935484
| 0
| 0
| 0.002311
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096774
| false
| 0
| 0
| 0
| 0.193548
| 0.032258
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f7730e63139c8af678a4948c000165bd6e5b7b3
| 7,501
|
py
|
Python
|
tests/test_crystal.py
|
davidtangGT/mol2vector
|
081f06eca5d66279853c24bf2de698edee689261
|
[
"MIT"
] | 57
|
2017-03-17T02:41:20.000Z
|
2022-02-21T04:29:26.000Z
|
tests/test_crystal.py
|
davidtangGT/mol2vector
|
081f06eca5d66279853c24bf2de698edee689261
|
[
"MIT"
] | 7
|
2019-05-23T18:32:40.000Z
|
2020-10-13T20:43:46.000Z
|
tests/test_crystal.py
|
davidtangGT/mol2vector
|
081f06eca5d66279853c24bf2de698edee689261
|
[
"MIT"
] | 20
|
2017-03-17T02:41:53.000Z
|
2020-10-07T07:06:20.000Z
|
import unittest
import numpy
from molml.molecule import Connectivity
from molml.crystal import GenerallizedCrystal
from molml.crystal import EwaldSumMatrix, SineMatrix
H_ELES = ['H']
H_NUMS = [1]
H_COORDS = numpy.array([[0.0, 0.0, 0.0]])
H_UNIT = numpy.array([
[2., .5, 0.],
[.25, 1., 0.],
[0., .3, 1.],
])
H_INPUT = ("elements", "coords", "unit_cell")
H = (H_ELES, H_COORDS, H_UNIT)
H2_ELES = ['H', 'H']
H2_NUMS = [1, 1]
H2_COORDS = numpy.array([
[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
])
H2_CONNS = {
0: {1: '1'},
1: {0: '1'},
}
H2_UNIT = numpy.array([
[2., .5, 0.],
[.25, 1., 0.],
[0., .3, 1.],
])
H2 = (H2_ELES, H2_COORDS, H2_UNIT)
class GenerallizedCrystalTest(unittest.TestCase):
def test_fit(self):
t = Connectivity(input_type=H_INPUT)
a = GenerallizedCrystal(transformer=t, radius=2.5)
a.fit([H])
self.assertEqual(a.transformer.get_labels(), ('H', ))
def test_transform(self):
t = Connectivity(input_type=H_INPUT)
a = GenerallizedCrystal(transformer=t, radius=2.5)
a.fit([H])
res = a.transform([H])
self.assertEqual(res, numpy.array([[37]]))
def test_transform_before_fit(self):
t = Connectivity(input_type=H_INPUT)
a = GenerallizedCrystal(transformer=t, radius=2.5)
with self.assertRaises(ValueError):
a.transform([H])
def test_fit_transform(self):
t = Connectivity(input_type=H_INPUT)
a = GenerallizedCrystal(transformer=t, radius=2.5)
res = a.fit_transform([H])
self.assertEqual(res, numpy.array([[37]]))
def test_radius_and_units(self):
t = Connectivity(input_type=H_INPUT)
with self.assertRaises(ValueError):
GenerallizedCrystal(transformer=t, radius=2.5, units=2)
class EwaldSumMatrixCrystalTest(unittest.TestCase):
def test_fit(self):
a = EwaldSumMatrix()
a.fit([(H2_ELES, H2_COORDS)])
self.assertEqual(a._max_size, 2)
def test_transform(self):
a = EwaldSumMatrix(input_type=H_INPUT)
a.fit([H2])
res = a.transform([H2])
expected = numpy.array([[-1.68059225, 0.94480435,
0.94480435, -1.68059225]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
def test_G_max(self):
a = EwaldSumMatrix(input_type=H_INPUT, G_max=2)
a.fit([H2])
res = a.transform([H2])
expected = numpy.array([[-1.68059225, 0.945167,
0.945167, -1.68059225]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
def test_L_max(self):
a = EwaldSumMatrix(input_type=H_INPUT, L_max=2)
a.fit([H2])
res = a.transform([H2])
expected = numpy.array([[-1.68059225, 0.43748,
0.43748, -1.68059225]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
def test_small_to_large_transform(self):
a = EwaldSumMatrix(input_type=H_INPUT)
a.fit([H])
with self.assertRaises(ValueError):
a.transform([H2])
def test_large_to_small_transform(self):
a = EwaldSumMatrix(input_type=H_INPUT)
a.fit([(H2_ELES, H2_COORDS, H2_UNIT)])
res = a.transform([H])
expected = numpy.array([[-1.944276, 0., 0., 0.]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
def test_transform_before_fit(self):
a = EwaldSumMatrix()
with self.assertRaises(ValueError):
a.transform([H])
def test_fit_transform(self):
a = EwaldSumMatrix(input_type=H_INPUT)
res = a.fit_transform([H2])
expected = numpy.array([[-1.68059225, 0.94480435,
0.94480435, -1.68059225]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
def test_sort(self):
a = EwaldSumMatrix(input_type=H_INPUT, sort=True)
res = a.fit_transform([H2])
expected = numpy.array([[-1.68059225, 0.94480435,
0.94480435, -1.68059225]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
def test_eigen(self):
a = EwaldSumMatrix(input_type=H_INPUT, eigen=True)
res = a.fit_transform([H2])
expected = numpy.array([[-0.735788, -2.625397]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
class SineMatrixTest(unittest.TestCase):
def test_fit(self):
a = SineMatrix()
a.fit([(H2_ELES, H2_COORDS)])
self.assertEqual(a._max_size, 2)
def test_transform(self):
a = SineMatrix(input_type=H_INPUT)
a.fit([H2])
res = a.transform([H2])
expected = numpy.array([[0.5, 0.475557, 0.475557, 0.5]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
def test_small_to_large_transform(self):
a = SineMatrix(input_type=H_INPUT)
a.fit([H])
with self.assertRaises(ValueError):
a.transform([H2])
def test_large_to_small_transform(self):
a = SineMatrix(input_type=H_INPUT)
a.fit([H2])
res = a.transform([H])
expected = numpy.array([[0.5, 0., 0., 0.]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
def test_transform_before_fit(self):
a = SineMatrix()
with self.assertRaises(ValueError):
a.transform([H])
def test_fit_transform(self):
a = SineMatrix(input_type=H_INPUT)
res = a.fit_transform([H2])
expected = numpy.array([[0.5, 0.475557, 0.475557, 0.5]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
def test_sort(self):
a = SineMatrix(input_type=H_INPUT, sort=True)
res = a.fit_transform([H2])
expected = numpy.array([[0.5, 0.475557, 0.475557, 0.5]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
def test_eigen(self):
a = SineMatrix(input_type=H_INPUT, eigen=True)
res = a.fit_transform([H2])
expected = numpy.array([[0.975557, 0.024443]])
try:
numpy.testing.assert_array_almost_equal(
res,
expected)
except AssertionError as e:
self.fail(e)
if __name__ == '__main__':
unittest.main()
| 29.648221
| 67
| 0.556859
| 910
| 7,501
| 4.405495
| 0.096703
| 0.04016
| 0.047393
| 0.07109
| 0.861312
| 0.860065
| 0.830881
| 0.782739
| 0.758294
| 0.747319
| 0
| 0.071596
| 0.318491
| 7,501
| 252
| 68
| 29.765873
| 0.712637
| 0
| 0
| 0.779817
| 0
| 0
| 0.004933
| 0
| 0
| 0
| 0
| 0
| 0.16055
| 1
| 0.105505
| false
| 0
| 0.022936
| 0
| 0.142202
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3fd7cbdeddb359ba677ec43426fa7ed1dcddbb98
| 6,088
|
py
|
Python
|
libs/fully_connected_opt_weight_generation.py
|
achenbachsven/learningSkript
|
7af067cbf0c8d7eed010806923f8af2e38977be2
|
[
"BSD-3-Clause"
] | null | null | null |
libs/fully_connected_opt_weight_generation.py
|
achenbachsven/learningSkript
|
7af067cbf0c8d7eed010806923f8af2e38977be2
|
[
"BSD-3-Clause"
] | 3
|
2020-03-24T15:59:06.000Z
|
2022-02-10T01:53:37.000Z
|
libs/fully_connected_opt_weight_generation.py
|
achenbachsven/learningSkript
|
7af067cbf0c8d7eed010806923f8af2e38977be2
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import numpy as np
def convert_to_x4_q7_weights(weights):
[r, h, w, c] = weights.shape
weights = np.reshape(weights, (r, h*w*c))
num_of_rows = r
num_of_cols = h*w*c
new_weights = np.copy(weights)
new_weights = np.reshape(new_weights, (r*h*w*c))
counter = 0
for i in range(int(num_of_rows/4)):
# we only need to do the re-ordering for every 4 rows
row_base = 4*i
for j in range(int(num_of_cols/4)):
# for each 4 entries
column_base = 4*j
new_weights[counter] = weights[row_base ][column_base ]
new_weights[counter+1] = weights[row_base+1][column_base ]
new_weights[counter+2] = weights[row_base ][column_base+2]
new_weights[counter+3] = weights[row_base+1][column_base+2]
new_weights[counter+4] = weights[row_base+2][column_base ]
new_weights[counter+5] = weights[row_base+3][column_base ]
new_weights[counter+6] = weights[row_base+2][column_base+2]
new_weights[counter+7] = weights[row_base+3][column_base+2]
new_weights[counter+8] = weights[row_base ][column_base+1]
new_weights[counter+9] = weights[row_base+1][column_base+1]
new_weights[counter+10] = weights[row_base ][column_base+3]
new_weights[counter+11] = weights[row_base+1][column_base+3]
new_weights[counter+12] = weights[row_base+2][column_base+1]
new_weights[counter+13] = weights[row_base+3][column_base+1]
new_weights[counter+14] = weights[row_base+2][column_base+3]
new_weights[counter+15] = weights[row_base+3][column_base+3]
counter = counter + 16
# the remaining ones are in order
for j in range((int)(num_of_cols-num_of_cols%4), int(num_of_cols)):
new_weights[counter] = weights[row_base][j]
new_weights[counter+1] = weights[row_base+1][j]
new_weights[counter+2] = weights[row_base+2][j]
new_weights[counter+3] = weights[row_base+3][j]
counter = counter + 4
return new_weights.reshape(r, h)
def convert_to_x4_q15_weights(weights):
[r, h, w, c] = weights.shape
weights = np.reshape(weights, (r, h*w*c))
num_of_rows = r
num_of_cols = h*w*c
new_weights = np.copy(weights)
new_weights = np.reshape(new_weights, (r*h*w*c))
counter = 0
for i in range(int(num_of_rows/4)):
# we only need to do the re-ordering for every 4 rows
row_base = 4*i
for j in range(int(num_of_cols/4)):
# for each 2 entries
column_base = 2*j
new_weights[counter] = weights[row_base ][column_base ]
new_weights[counter+1] = weights[row_base ][column_base+1]
new_weights[counter+2] = weights[row_base+1][column_base ]
new_weights[counter+3] = weights[row_base+1][column_base+1]
new_weights[counter+4] = weights[row_base+2][column_base ]
new_weights[counter+5] = weights[row_base+2][column_base+1]
new_weights[counter+6] = weights[row_base+3][column_base ]
new_weights[counter+7] = weights[row_base+3][column_base+1]
counter = counter + 8
# the remaining ones are in order
for j in range((int)(num_of_cols-num_of_cols%2), int(num_of_cols)):
new_weights[counter] = weights[row_base][j]
new_weights[counter+1] = weights[row_base+1][j]
new_weights[counter+2] = weights[row_base+2][j]
new_weights[counter+3] = weights[row_base+3][j]
counter = counter + 4
return new_weights.reshape(r, h)
def convert_q7_q15_weights(weights):
[r, h, w, c] = weights.shape
weights = np.reshape(weights, (r, h*w*c))
num_of_rows = r
num_of_cols = h*w*c
new_weights = np.copy(weights)
new_weights = np.reshape(new_weights, (r*h*w*c))
counter = 0
for i in range(int(num_of_rows/4)):
# we only need to do the re-ordering for every 4 rows
row_base = 4*i
for j in range(int(num_of_cols/4)):
# for each 2 entries
column_base = 2*j
new_weights[counter] = weights[row_base ][column_base ]
new_weights[counter+1] = weights[row_base+1][column_base ]
new_weights[counter+2] = weights[row_base ][column_base+1]
new_weights[counter+3] = weights[row_base+1][column_base+1]
new_weights[counter+4] = weights[row_base+2][column_base ]
new_weights[counter+5] = weights[row_base+3][column_base ]
new_weights[counter+6] = weights[row_base+2][column_base+1]
new_weights[counter+7] = weights[row_base+3][column_base+1]
counter = counter + 8
# the remaining ones are in order
for j in range((int)(num_of_cols-num_of_cols%2), int(num_of_cols)):
new_weights[counter] = weights[row_base][j]
new_weights[counter+1] = weights[row_base+1][j]
new_weights[counter+2] = weights[row_base+2][j]
new_weights[counter+3] = weights[row_base+3][j]
counter = counter + 4
return new_weights.reshape(r, h)
if __name__ == "__main__":
# input dimensions
vec_dim = 127
row_dim = 127
weight = np.zeros((row_dim,vec_dim), dtype=int)
# generate random inputs
for i in range(row_dim):
for j in range(vec_dim):
weight[i][j] = np.random.randint(256)-128
weight = np.reshape(weight, (row_dim, vec_dim, 1, 1))
outfile = open("../Ref_Implementations/fully_connected_testing_weights.h", "w")
outfile.write("#define IP2_WEIGHT {")
weight.tofile(outfile,sep=",",format="%d")
outfile.write("}\n\n")
new_weight = convert_to_x4_q7_weights(weight)
outfile.write("#define IP4_WEIGHT {")
new_weight.tofile(outfile,sep=",",format="%d")
outfile.write("}\n\n")
new_weight = convert_q7_q15_weights(weight)
outfile.write("#define IP4_q7_q15_WEIGHT {")
new_weight.tofile(outfile,sep=",",format="%d")
outfile.write("}\n\n")
new_weight = convert_to_x4_q15_weights(weight)
outfile.write("#define IP4_WEIGHT_Q15 {")
new_weight.tofile(outfile,sep=",",format="%d")
outfile.write("}\n\n")
outfile.close()
| 40.586667
| 83
| 0.64931
| 964
| 6,088
| 3.856846
| 0.106846
| 0.150619
| 0.201183
| 0.058096
| 0.895643
| 0.886767
| 0.839161
| 0.800968
| 0.800968
| 0.768424
| 0
| 0.034974
| 0.21567
| 6,088
| 149
| 84
| 40.85906
| 0.743665
| 0.060611
| 0
| 0.635593
| 0
| 0
| 0.032942
| 0.009813
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025424
| false
| 0
| 0.008475
| 0
| 0.059322
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3fe8590f0de37e670e206f9cfe46d5785e6ac0a9
| 631
|
py
|
Python
|
electricity_api/models.py
|
gbakthavatchalam/electricity-api
|
5f8e8c9a813ffc059db8fb39d4022fb9a00f216b
|
[
"Apache-2.0"
] | null | null | null |
electricity_api/models.py
|
gbakthavatchalam/electricity-api
|
5f8e8c9a813ffc059db8fb39d4022fb9a00f216b
|
[
"Apache-2.0"
] | null | null | null |
electricity_api/models.py
|
gbakthavatchalam/electricity-api
|
5f8e8c9a813ffc059db8fb39d4022fb9a00f216b
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
class Days(models.Model):
class Meta:
db_table = "days"
day_id = models.IntegerField(primary_key=True)
user_id = models.IntegerField()
timestamp_raw = models.TextField(db_column="timestamp")
consumption = models.IntegerField()
temperature = models.IntegerField()
class Months(models.Model):
class Meta:
db_table = "months"
month_id = models.IntegerField(primary_key=True)
user_id = models.IntegerField()
timestamp_raw = models.TextField(db_column="timestamp")
consumption = models.IntegerField()
temperature = models.IntegerField()
| 26.291667
| 59
| 0.709984
| 71
| 631
| 6.140845
| 0.352113
| 0.330275
| 0.183486
| 0.091743
| 0.857798
| 0.857798
| 0.733945
| 0.733945
| 0.733945
| 0.733945
| 0
| 0
| 0.187005
| 631
| 23
| 60
| 27.434783
| 0.849903
| 0
| 0
| 0.588235
| 0
| 0
| 0.044374
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.882353
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3fef40fcd20ff41a32204e723549c85b6402b10b
| 9,762
|
py
|
Python
|
robotframework-onap/tests/ONAPLibrary/JSONKeywordsTest.py
|
onap/testsuite-python-testing-utils
|
84d2f0e4e68862b8cbfd5b1aec7fea747507d501
|
[
"Apache-2.0"
] | 2
|
2019-04-30T23:50:46.000Z
|
2019-05-22T13:45:42.000Z
|
robotframework-onap/tests/ONAPLibrary/JSONKeywordsTest.py
|
onap/testsuite-python-testing-utils
|
84d2f0e4e68862b8cbfd5b1aec7fea747507d501
|
[
"Apache-2.0"
] | null | null | null |
robotframework-onap/tests/ONAPLibrary/JSONKeywordsTest.py
|
onap/testsuite-python-testing-utils
|
84d2f0e4e68862b8cbfd5b1aec7fea747507d501
|
[
"Apache-2.0"
] | 1
|
2021-10-15T15:32:51.000Z
|
2021-10-15T15:32:51.000Z
|
# Copyright 2019 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from unittest import TestCase
from ONAPLibrary.JSONKeywords import JSONKeywords
class JSONKeywordsTest(TestCase):
content_empty_string = '{}'
content_empty_dict = {}
content1_string = '{"foo": "bar"}'
content1_dict = {"foo": u"bar"}
content1b_string = '{"foo": "quuz"}'
content1b_dict = {"foo": u"quuz"}
content2_string = '{"baz": "quuz"}'
content2_dict = {"baz": u"quuz"}
content_big_string = '{"foo": "bar", "baz": "quuz"}'
content_big_dict = {"foo": u"bar", "baz": u"quuz"}
def setUp(self):
self.jk = JSONKeywords()
# equality - corner cases
def test_json_empty_strings_equality(self):
left_json_string = JSONKeywordsTest.content_empty_string
right_json_string = JSONKeywordsTest.content_empty_string
self.assertTrue(self.jk.json_equals(left_json_string, right_json_string))
def test_json_empty_objects_equality(self):
left_json_object = JSONKeywordsTest.content_empty_dict
right_json_object = JSONKeywordsTest.content_empty_dict
self.assertTrue(self.jk.json_equals(left_json_object, right_json_object))
# equality - type conversions
def test_json_strings_equality(self):
left_json_string = JSONKeywordsTest.content1_string
right_json_string = JSONKeywordsTest.content1_string
self.assertTrue(self.jk.json_equals(left_json_string, right_json_string))
def test_json_objects_equality(self):
left_json_object = JSONKeywordsTest.content1_dict
right_json_object = JSONKeywordsTest.content1_dict
self.assertTrue(self.jk.json_equals(left_json_object, right_json_object))
def test_json_string_object_equality(self):
left_json_string = JSONKeywordsTest.content1_string
right_json_object = JSONKeywordsTest.content1_dict
self.assertTrue(self.jk.json_equals(left_json_string, right_json_object))
def test_json_object_string_equality(self):
left_json_object = JSONKeywordsTest.content1_dict
right_json_string = JSONKeywordsTest.content1_string
self.assertTrue(self.jk.json_equals(left_json_object, right_json_string))
# equality - difference detection
def test_json_strings_inequality(self):
left_json_string = JSONKeywordsTest.content1_string
right_json_string = JSONKeywordsTest.content2_string
self.assertFalse(self.jk.json_equals(left_json_string, right_json_string))
def test_json_objects_inequality(self):
left_json_object = JSONKeywordsTest.content1_dict
right_json_object = JSONKeywordsTest.content2_dict
self.assertFalse(self.jk.json_equals(left_json_object, right_json_object))
def test_json_string_object_inequality(self):
left_json_string = JSONKeywordsTest.content1_string
right_json_object = JSONKeywordsTest.content2_dict
self.assertFalse(self.jk.json_equals(left_json_string, right_json_object))
def test_json_object_string_inequality(self):
left_json_object = JSONKeywordsTest.content1_dict
right_json_string = JSONKeywordsTest.content2_string
self.assertFalse(self.jk.json_equals(left_json_object, right_json_string))
# subsets - corner cases
def test_json_empty_strings_subset(self):
left_json_string = JSONKeywordsTest.content_empty_string
right_json_string = JSONKeywordsTest.content_empty_string
self.assertTrue(self.jk.json_should_contain_sub_json(left_json_string, right_json_string))
def test_json_empty_objects_subset(self):
left_json_object = JSONKeywordsTest.content_empty_dict
right_json_object = JSONKeywordsTest.content_empty_dict
self.assertTrue(self.jk.json_should_contain_sub_json(left_json_object, right_json_object))
# subsets - type conversions
def test_json_strings_subset(self):
left_json_string = JSONKeywordsTest.content1_string
right_json_string = JSONKeywordsTest.content1_string
self.assertTrue(self.jk.json_should_contain_sub_json(left_json_string, right_json_string))
def test_json_objects_subset(self):
left_json_object = JSONKeywordsTest.content1_dict
right_json_object = JSONKeywordsTest.content1_dict
self.assertTrue(self.jk.json_should_contain_sub_json(left_json_object, right_json_object))
def test_json_string_object_subset(self):
left_json_string = JSONKeywordsTest.content1_string
right_json_object = JSONKeywordsTest.content1_dict
self.assertTrue(self.jk.json_should_contain_sub_json(left_json_string, right_json_object))
def test_json_object_string_subset(self):
left_json_object = JSONKeywordsTest.content1_dict
right_json_string = JSONKeywordsTest.content1_string
self.assertTrue(self.jk.json_should_contain_sub_json(left_json_object, right_json_string))
# subsets - inclusion
def test_json_strings_proper_subset(self):
left_json_string = JSONKeywordsTest.content_big_string
right_json_string = JSONKeywordsTest.content1_string
self.assertTrue(self.jk.json_should_contain_sub_json(left_json_string, right_json_string))
def test_json_objects_proper_subset(self):
left_json_object = JSONKeywordsTest.content_big_dict
right_json_object = JSONKeywordsTest.content1_dict
self.assertTrue(self.jk.json_should_contain_sub_json(left_json_object, right_json_object))
def test_json_string_object_proper_subset(self):
left_json_string = JSONKeywordsTest.content_big_string
right_json_object = JSONKeywordsTest.content1_dict
self.assertTrue(self.jk.json_should_contain_sub_json(left_json_string, right_json_object))
def test_json_object_string_proper_subset(self):
left_json_object = JSONKeywordsTest.content_big_dict
right_json_string = JSONKeywordsTest.content1_string
self.assertTrue(self.jk.json_should_contain_sub_json(left_json_object, right_json_string))
# subsets - intersection
def test_json_strings_intersection(self):
left_json_string = JSONKeywordsTest.content1_string
right_json_string = JSONKeywordsTest.content_big_string
self.assertFalse(self.jk.json_should_contain_sub_json(left_json_string, right_json_string))
def test_json_objects_intersection(self):
left_json_object = JSONKeywordsTest.content1_dict
right_json_object = JSONKeywordsTest.content_big_dict
self.assertFalse(self.jk.json_should_contain_sub_json(left_json_object, right_json_object))
def test_json_string_object_intersection(self):
left_json_string = JSONKeywordsTest.content1_dict
right_json_object = JSONKeywordsTest.content_big_string
self.assertFalse(self.jk.json_should_contain_sub_json(left_json_string, right_json_object))
def test_json_object_string_intersection(self):
left_json_object = JSONKeywordsTest.content1_string
right_json_string = JSONKeywordsTest.content_big_dict
self.assertFalse(self.jk.json_should_contain_sub_json(left_json_object, right_json_string))
# subsets - exclusion
def test_json_strings_exclusion(self):
left_json_string = JSONKeywordsTest.content1_string
right_json_string = JSONKeywordsTest.content2_string
self.assertFalse(self.jk.json_equals(left_json_string, right_json_string))
def test_json_objects_exclusion(self):
left_json_object = JSONKeywordsTest.content1_dict
right_json_object = JSONKeywordsTest.content2_dict
self.assertFalse(self.jk.json_equals(left_json_object, right_json_object))
def test_json_string_object_exclusion(self):
left_json_string = JSONKeywordsTest.content1_string
right_json_object = JSONKeywordsTest.content2_dict
self.assertFalse(self.jk.json_equals(left_json_string, right_json_object))
def test_json_object_string_exclusion(self):
left_json_object = JSONKeywordsTest.content1_dict
right_json_string = JSONKeywordsTest.content2_string
self.assertFalse(self.jk.json_equals(left_json_object, right_json_string))
# subsets - value change detection
def test_json_strings_changed_value(self):
left_json_string = JSONKeywordsTest.content1_string
right_json_string = JSONKeywordsTest.content1b_string
self.assertFalse(self.jk.json_equals(left_json_string, right_json_string))
def test_json_objects_changed_value(self):
left_json_object = JSONKeywordsTest.content1_dict
right_json_object = JSONKeywordsTest.content1b_dict
self.assertFalse(self.jk.json_equals(left_json_object, right_json_object))
def test_json_string_object_changed_value(self):
left_json_string = JSONKeywordsTest.content1_string
right_json_object = JSONKeywordsTest.content1b_dict
self.assertFalse(self.jk.json_equals(left_json_string, right_json_object))
def test_json_object_string_changed_value(self):
left_json_object = JSONKeywordsTest.content1_dict
right_json_string = JSONKeywordsTest.content1b_string
self.assertFalse(self.jk.json_equals(left_json_object, right_json_string))
| 47.619512
| 99
| 0.775866
| 1,230
| 9,762
| 5.713821
| 0.097561
| 0.101024
| 0.050085
| 0.056773
| 0.865822
| 0.858139
| 0.85074
| 0.823136
| 0.823136
| 0.810615
| 0
| 0.007547
| 0.158472
| 9,762
| 204
| 100
| 47.852941
| 0.847961
| 0.081746
| 0
| 0.583333
| 0
| 0
| 0.012079
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.229167
| false
| 0
| 0.020833
| 0
| 0.326389
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2092a0dd4d5965d46c3866ca3cf0f46f54a9264
| 416,844
|
py
|
Python
|
1andi.py
|
Fahsaiiii2608/tk
|
44b4a14de11a328a9fa2ca6d0c23b1f52850adff
|
[
"MIT"
] | null | null | null |
1andi.py
|
Fahsaiiii2608/tk
|
44b4a14de11a328a9fa2ca6d0c23b1f52850adff
|
[
"MIT"
] | null | null | null |
1andi.py
|
Fahsaiiii2608/tk
|
44b4a14de11a328a9fa2ca6d0c23b1f52850adff
|
[
"MIT"
] | 2
|
2018-11-04T11:17:46.000Z
|
2019-01-20T14:10:48.000Z
|
# -*- coding: utf-8 -*-
import LINETCR
from LINETCR.lib.curve.ttypes import *
from datetime import datetime
from bs4 import BeautifulSoup
from threading import Thread
from googletrans import Translator
import requests
from io import StringIO
from threading import Thread
from urllib import urlopen
from gtts import gTTS
import time,random,sys,json,codecs,threading,glob,urllib,urllib2,urllib3,re,ast,os,subprocess,requests,wikipedia,tempfile
cl = LINETCR.LINE()
cl.login(token="")
cl.loginResult()
print u"login success"
reload(sys)
sys.setdefaultencoding('utf-8')
myhelpMessage ="""──────┅═ইई═┅──────
╔════════════════
╠==[ DAENG TEAM BOT ]==
╚════════════════
──────┅═ইई═┅──────
🄲🄾🄼🄼🄰🄽🄳 🄷🄴🄻🄿
──────┅═ইई═┅──────
❂͜͡☆➣ ╠Id
❂͜͡☆➣ ╠Cek @
❂͜͡☆➣ ╠Mid @
❂͜͡☆➣ ╠Gn:
❂͜͡☆➣ ╠Gname:
❂͜͡☆➣ ╠Cancel
❂͜͡☆➣ ╠tagmem
❂͜͡☆➣ ╠Halo
❂͜͡☆➣ ╠Hola
❂͜͡☆➣ ╠Tag all
❂͜͡☆➣ ╠Lurk on/off
❂͜͡☆➣ ╠Lurkers
❂͜͡☆➣ ╠Intip on/off
❂͜͡☆➣ ╠Intip
❂͜͡☆➣ ╠Apakah
❂͜͡☆➣ ╠Kapan
❂͜͡☆➣ ╠Berapa
❂͜͡☆➣ ╠Ginfo
❂͜͡☆➣ ╠Glist
❂͜͡☆➣ ╠Memlist
❂͜͡☆➣ ╠Friendlist
❂͜͡☆➣ ╠Friendinfo:
❂͜͡☆➣ ╠Frienpict:
❂͜͡☆➣ ╠Speed
❂͜͡☆➣ ╠.Speed
❂͜͡☆➣ ╠About
❂͜͡☆➣ ╠About1
❂͜͡☆➣ ╠Runtime
❂͜͡☆➣ ╠Runtime1
❂͜͡☆➣ ╠Detail @
❂͜͡☆➣ ╠Detail grup
❂͜͡☆➣ ╠Pp @
❂͜͡☆➣ ╠Cover @
❂͜͡☆➣ ╠Picturl @
❂͜͡☆➣ ╠Backup:on / off
❂͜͡☆➣ ╠Mybackup
❂͜͡☆➣ ╠Allname:
❂͜͡☆➣ ╠Allbio
❂͜͡☆➣ ╠Invite
❂͜͡☆➣ ╠Blacklist
❂͜͡☆➣ ╠Blacklist:
❂͜͡☆➣ ╠Blacklist @
❂͜͡☆➣ ╠Unban:
❂͜͡☆➣ ╠Unban:on
❂͜͡☆➣ ╠Unban @
❂͜͡☆➣ ╠Whitelist
❂͜͡☆➣ ╠Whitelist @
──────┅═ইई═┅──────
🄲🄾🄼🄼🄰🄽🄳 🄰🄳🄼🄸🄽
──────┅═ইई═┅──────
❂͜͡☆➣ ╠•cw
❂͜͡☆➣ ╠•lc
❂͜͡☆➣ ╠•pt
❂͜͡☆➣ ╠•ps
❂͜͡☆➣ ╠•fb
❂͜͡☆➣ ╠Frofileig
❂͜͡☆➣ ╠Music
❂͜͡☆➣ ╠•ms
❂͜͡☆➣ ╠•Music
❂͜͡☆➣ ╠/musik
❂͜͡☆➣ ╠/musrik
❂͜͡☆➣ ╠•lr
❂͜͡☆➣ ╠Lirik
❂͜͡☆➣ ╠•yt
❂͜͡☆➣ ╠•Youtube
❂͜͡☆➣ ╠ytsearch
❂͜͡☆➣ ╠Youtube
❂͜͡☆➣ ╠Youtubemp4
❂͜͡☆➣ ╠Gift @
❂͜͡☆➣ ╠List favorite
❂͜͡☆➣ ╠Reboot
❂͜͡☆➣ ╠Hay @
❂͜͡☆➣ ╠Woy! @
❂͜͡☆➣ ╠Spamtag @
❂͜͡☆➣ ╠Update sambutan
❂͜͡☆➣ ╠Papay
──────┅═ইई═┅──────
🄲🄾🄼🄼🄰🄽🄳 🅂🄴🅃🅃🄸🄽🄶🅂
──────┅═ইई═┅──────
❂͜͡☆➣ ╠Auto join on/off
❂͜͡☆➣ ╠Auto add on/off
❂͜͡☆➣ ╠Leave on/off
❂͜͡☆➣ ╠Backup:on/off
❂͜͡☆➣ ╠Contact on/off
❂͜͡☆➣ ╠Com on/off
❂͜͡☆➣ ╠Share on/off
❂͜͡☆➣ ╠Protect on/off
❂͜͡☆➣ ╠Qrprotect on/off
❂͜͡☆➣ ╠InviteProtect on/off
❂͜͡☆➣ ╠Cancelprotect on/off
❂͜͡☆➣ ╠Set all on/off
❂͜͡☆➣ ╠Panick:on/off
❂͜͡☆➣ ╠Respon on/off
❂͜͡☆➣ ╠Tag on/off
❂͜͡☆➣ ╠Sambutan on/off
❂͜͡☆➣ ╠Simisimi on/off
❂͜͡☆➣ ╠Like:on/off
──────┅═ইई═┅──────
🅃🅁🄰🄽🅂🄻🄰🅃🄴
──────┅═ইई═┅──────
❂͜͡☆➣ ╠En@id
❂͜͡☆➣ ╠Jp@id
❂͜͡☆➣ ╠Ko@id
❂͜͡☆➣ ╠Ar@id
❂͜͡☆➣ ╠Th@id
❂͜͡☆➣ ╠Say
❂͜͡☆➣ ╠Say-ar
❂͜͡☆➣ ╠Say-jp
❂͜͡☆➣ ╠Say-ko
❂͜͡☆➣ ╠Say-en
──────┅═ইई═┅──────
╔════════════════
╠==[ DAENG TEAM BOT ]==
╚════════════════
──────┅═ইई═┅──────
"""
helo=""
KAC=[cl]
mid = cl.getProfile().mid
admin = []
targets = []
Bots = [""]
creator = ""
admsa = ""
admin = ""
whitelist=[""]
wait = {
'contact':False,
'autoJoin':True,
'autoCancel':{"on":False,"members":50},
'AutoJoinCancel':True,
'Admin':True,
'Members':1,
'leaveRoom':False,
'timeline':False,
'autoAdd':False,
'message':"Hayo ketauan nge add",
"lang":"JP",
"comment1":"Auto Like By ҉̶̶̘̟̼̉̈́͐͋͌̊D҉ͩ͂҉̘̟̼̉̈́͐͋͌̊҉̶̶̘̟̼̉̈́͐͋͌̊Δ̶҉̶̶̘̟̼̉̈́͐͋͌̊҉̘̉̈́͐͋͌̊҉̶̘̟̼̉̈́͐͋͌̊҉̘̟̉̈́͐͋͌̊E҉̶̶̘̟̼̉̈́͐͋͌̊҉̘̉̈́͐͋͌̊҉̶̘̟̼̉̈́͐͋͌̊҉̘̟̉̈́͐͋͌̊N҉ͩ͂҉̘̟̼̉̈́͐͋͌̊҉̶̶̘̟̼̉̈́͐͋͌̊҉̘̟̉̈́͐͋͌̊G҉ͩ͂҉̘̟̼̉̈́͐͋͌̊҉̶̶̘̟̼̉̈́͐͋͌̊҉ͩ͂҉̘̟̼̉̈́͐͋͌̊҉̶̶̘̟̼̉̈́͐͋͌̊ྈT҉̶̶̘̟̼̉̈́͐͋͌̊҉̘̉̈́͐͋͌̊҉̶̘̟̼̉̈́͐͋͌̊҉̘̟̉̈́͐͋͌̊E҉̶̘̟̼̉̈́͐͋͌̊҉̶̶̘̟̼̉̈́͐͋͌̊A҉ͩ͂҉̘̟̼̉̈́͐͋͌̊҉̶̶̘̟̼̉̈́͐͋͌̊҉̶̶̶̘̟̼̉̈́͐͋͌̊҉ͩ͂҉̘̟̼̉̈́͐͋͌̊҉̶̶̘̟̼̉̈́͐͋͌̊M҉̶̘̟̼̉̈́͐͋͌̊҉ͩ͂ྈB҉̶̶̘̟̼̉̈́͐͋͌̊Ω҉̶̘̟̼̉̈́͐͋͌̊T҉̶̘̟̼̉̈́͐͋͌̊S҉̶̘̟̼̉̈́͐͋͌̊ ",
"commentOn":True,
"likeOn":True,
"commentBlack":{},
"wblack":False,
"dblack":False,
"clock":False,
"me":"me",
"cNames":"",
"blacklist":{},
"wblacklist":False,
"dblacklist":False,
"protect":False,
"cancelprotect":False,
"inviteprotect":False,
"linkprotect":False,
"detectMention":True,
"kickMention":False,
"Backup":False,
"atjointicket":True,
"gift":{},
"Sider":{},
"tag":False,
"sticker":{},
"Sambutan":False,
"userAgent": [
"Mozilla/5.0 (X11; U; Linux i586; de; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (X11; U; Linux amd64; rv:5.0) Gecko/20100101 Firefox/5.0 (Debian)",
"Mozilla/5.0 (X11; U; Linux amd64; en-US; rv:5.0) Gecko/20110619 Firefox/5.0",
"Mozilla/5.0 (X11; Linux) Gecko Firefox/5.0",
"Mozilla/5.0 (X11; Linux x86_64; rv:5.0) Gecko/20100101 Firefox/5.0 FirePHP/0.5",
"Mozilla/5.0 (X11; Linux x86_64; rv:5.0) Gecko/20100101 Firefox/5.0 Firefox/5.0",
"Mozilla/5.0 (X11; Linux x86_64) Gecko Firefox/5.0",
"Mozilla/5.0 (X11; Linux ppc; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (X11; Linux AMD64) Gecko Firefox/5.0",
"Mozilla/5.0 (X11; FreeBSD amd64; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 6.2; WOW64; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:5.0) Gecko/20110619 Firefox/5.0",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 6.1; rv:6.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 6.1.1; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 5.2; WOW64; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 5.1; U; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 5.1; rv:2.0.1) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 5.0; WOW64; rv:5.0) Gecko/20100101 Firefox/5.0",
"Mozilla/5.0 (Windows NT 5.0; rv:5.0) Gecko/20100101 Firefox/5.0"
],
}
wait2 = {
'readPoint':{},
'readMember':{},
'setTime':{},
"ricoinvite":{},
'ROM':{},
}
tikel = {
'STKID': '17866',
'STKPKGID': '1070',
'STKVER': '2'
}
mimic = {
"copy":False,
"copy2":False,
"status":False,
"target":{}
}
cctv = {
"cyduk":{},
"point":{},
"MENTION":{},
"sidermem":{}
}
settings = {
"simiSimi":{}
}
res = {
'num':{},
'us':{},
'au':{},
}
setTime = {}
setTime = wait2['setTime']
mulai = time.time()
contact = cl.getProfile()
mybackup = cl.getProfile()
mybackup.displayName = contact.displayName
mybackup.statusMessage = contact.statusMessage
mybackup.pictureStatus = contact.pictureStatus
def removeAllMessages(self, lastMessageId):
return self.Talk.client.removeAllMessages(0, lastMessageId)
def restart_program():
python = sys.executable
os.execl(python, python, * sys.argv)
def download_page(url):
version = (3,0)
cur_version = sys.version_info
if cur_version >= version:
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
req = urllib.request.Request(url, headers = headers)
resp = urllib.request.urlopen(req)
respData = str(resp.read())
return respData
except Exception as e:
print(str(e))
else:
import urllib2
try:
headers = {}
headers['User-Agent'] = "Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.27 Safari/537.17"
req = urllib2.Request(url, headers = headers)
response = urllib2.urlopen(req)
page = response.read()
return page
except:
return"Page Not found"
def _images_get_next_item(s):
start_line = s.find('rg_di')
if start_line == -1:
end_quote = 0
link = "no_links"
return link, end_quote
else:
start_line = s.find('"class="rg_meta"')
start_content = s.find('"ou"',start_line+1)
end_content = s.find(',"ow"',start_content+1)
content_raw = str(s[start_content+6:end_content-1])
return content_raw, end_content
def _images_get_all_items(page):
items = []
while True:
item, end_content = _images_get_next_item(page)
if item == "no_links":
break
else:
items.append(item)
time.sleep(0.1)
page = page[end_content:]
return items
def upload_tempimage(client):
'''
Upload a picture of a kitten. We don't ship one, so get creative!
'''
config = {
'album': album,
'name': 'bot auto upload',
'title': 'bot auto upload',
'description': 'bot auto upload'
}
print("Uploading image... ")
image = client.upload_from_path(image_path, config=config, anon=False)
print("Done")
print()
def sendSticker(self,
stickerId = "13",
stickerPackageId = "1",
stickerVersion = "100",
stickerText="[null]"):
try:
message = Message(to=self.id, text="")
message.contentType = ContentType.STICKER
message.contentMetadata = {
'STKID': stickerId,
'STKPKGID': stickerPackageId,
'STKVER': stickerVersion,
'STKTXT': stickerText,
}
self._client.sendMessage(message)
return True
except Exception as e:
raise e
def sendSticker(self, to, packageId, stickerId):
contentMetadata = {
'STKVER': '100',
'STKPKGID': packageId,
'STKID': stickerId
}
return self.sendMessage(to, '', contentMetadata, 7)
def summon(to,nama):
aa = ""
bb = ""
strt = int(12)
akh = int(12)
nm = nama
#print nm
for mm in nm:
akh = akh + 2
aa += """{"S":"""+json.dumps(str(strt))+""","E":"""+json.dumps(str(akh))+""","M":"""+json.dumps(mm)+"},"""
strt = strt + 6
akh = akh + 4
bb += "• @c \n"
aa = (aa[:int(len(aa)-1)])
msg = Message()
msg.to = to
msg.text = "「Mention」\n"+bb
msg.contentMetadata = {'MENTION':'{"MENTIONEES":['+aa+']}','EMTVER':'4'}
#print msg
try:
cl.sendMessage(msg)
except Exception as error:
print error
def waktu(secs):
mins, secs = divmod(secs,60)
hours, mins = divmod(mins,60)
return '%02d Jam %02d Menit %02d Detik' % (hours, mins, secs)
def yt(query):
with requests.session() as s:
isi = []
if query == "":
query = "S1B tanysyz"
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'http://www.youtube.com/results'
params = {'search_query': query}
r = s.get(url, params=params)
soup = BeautifulSoup(r.content, 'html5lib')
for a in soup.select('.yt-lockup-title > a[title]'):
if '&list=' not in a['href']:
if 'watch?v' in a['href']:
b = a['href'].replace('watch?v=', '')
isi += ['youtu.be' + b]
return isi
def sendImage(self, to_, path):
M = Message(to=to_,contentType = 1)
M.contentMetadata = None
M.contentPreview = None
M_id = self._client.sendMessage(M).id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'image',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self._client.post_content('https://os.line.naver.jp/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload image failure.')
#r.content
return True
def sendImageWithURL(self, to_, url):
path = '%s/pythonLine-%i.data' % (tempfile.gettempdir(), randint(0, 9))
r = requests.get(url, stream=True)
if r.status_code == 200:
with open(path, 'w') as f:
shutil.copyfileobj(r.raw, f)
else:
raise Exception('Download image failure.')
try:
self.sendImage(to_, path)
except Exception as e:
raise e
def sendAudio(self, to_, path):
M = Message(to=to_, text=None, contentType = 3)
M.contentMetadata = None
M.contentPreview = None
M2 = self.Talk.client.sendMessage(0,M)
M_id = M2.id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'audio',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self.post_content('https://os.line.naver.jp/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload audio failure.')
return True
def sendAudioWithUrl(self, to_, url):
path = '%s/pythonLine-%1.data' % (tempfile.gettempdir(), randint(0, 9))
r = requests.get(url, stream=True)
if r.status_code == 200:
with open(path, 'w') as f:
shutil.copyfileobj(r.raw, f)
else:
raise Exception('Download audio failure.')
try:
self.sendAudio(to_, path)
except Exception as e:
raise (e)
def waktu(secs):
mins, secs = divmod(secs,60)
hours, mins = divmod(mins,60)
return '%02d Jam %02d Menit %02d Detik' % (hours, mins, secs)
def cms(string, commands): #/XXX, >XXX, ;XXX, ^XXX, %XXX, $XXX...
tex = ["+","@","/",">",";","^","%","$","^","サテラ:","サテラ:","サテラ:","サテラ:"]
for texX in tex:
for command in commands:
if string ==command:
return True
return False
def sendMessage(to, text, contentMetadata={}, contentType=0):
mes = Message()
mes.to, mes.from_ = to, profile.mid
mes.text = text
mes.contentType, mes.contentMetadata = contentType, contentMetadata
if to not in messageReq:
messageReq[to] = -1
messageReq[to] += 1
def NOTIFIED_READ_MESSAGE(op):
try:
if op.param1 in wait2['readPoint']:
Name = cl.getContact(op.param2).displayName
if Name in wait2['readMember'][op.param1]:
pass
else:
wait2['readMember'][op.param1] += "\n9§9" + Name
wait2['ROM'][op.param1][op.param2] = "9§9" + Name
else:
pass
except:
pass
def bot(op):
try:
if op.type == 0:
return
if op.type == 13:
if mid in op.param3:
G = cl.getGroup(op.param1)
if wait["autoJoin"] == True:
if wait["autoCancel"]["on"] == True:
if len(G.members) <= wait["autoCancel"]["members"]:
cl.rejectGroupInvitation(op.param1)
else:
cl.acceptGroupInvitation(op.param1)
else:
cl.acceptGroupInvitation(op.param1)
elif wait["autoCancel"]["on"] == True:
if len(G.members) <= wait["autoCancel"]["members"]:
cl.rejectGroupInvitation(op.param1)
else:
Inviter = op.param3.replace("",',')
InviterX = Inviter.split(",")
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, InviterX)
if matched_list == []:
pass
else:
cl.cancelGroupInvitation(op.param1, matched_list)
if op.type == 19:
if mid in op.param3:
wait["blacklist"][op.param2] = True
if op.type == 22:
if wait["leaveRoom"] == True:
cl.leaveRoom(op.param1)
if op.type == 24:
if wait["leaveRoom"] == True:
cl.leaveRoom(op.param1)
if op.type == 26:
msg = op.message
if msg.toType == 0:
msg.to = msg.from_
if msg.from_ == "u9fb3e682d923b374004d3942221fc439":
if "join:" in msg.text:
list_ = msg.text.split(":")
try:
cl.acceptGroupInvitationByTicket(list_[1],list_[2])
G = cl.getGroup(list_[1])
G.preventJoinByTicket = True
cl.updateGroup(G)
except:
cl.sendText(msg.to,"error")
if msg.toType == 1:
if wait["leaveRoom"] == True:
cl.leaveRoom(msg.to)
if msg.contentType == 16:
url = msg.contentMetadata["postEndUrl"]
cl.like(url[25:58], url[66:], likeType=1001)
if op.type == 26:
msg = op.message
if msg.from_ in mimic["target"] and mimic["status"] == True and mimic["target"][msg.from_] == True:
text = msg.text
if text is not None:
cl.sendText(msg.to,text)
if op.type == 26:
msg = op.message
if msg.to in settings["simiSimi"]:
if settings["simiSimi"][msg.to] == True:
if msg.text is not None:
text = msg.text
r = requests.get("http://api.ntcorp.us/chatbot/v1/?text=" + text.replace(" ","+") + "&key=beta1.nt")
data = r.text
data = json.loads(data)
if data['status'] == 200:
if data['result']['result'] == 100:
cl.sendText(msg.to, "[From Simi]\n" + data['result']['response'].encode('utf-8'))
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["detectMention"] == True:
#msg.text.replace("@"+cl.getProfile().displayName,"")
contact = cl.getContact(msg.from_)
cName = contact.displayName
balas = [cName + "\nDᴏɴᴛ Tᴀɢ Mᴇ!! Cɪᴘᴏᴋ Lᴏʜ Nᴀɴᴛɪ"]
#balas = ["Kenapa Tag Si "+cl.getProfile().displayName+"Kangen yah..!!!\nPC aja langsung ownernya biar anu hihi..!!\n[autoRespon]","Nah ngetag lagi si "+cl.getProfile().displayName+" mending ajak mojok aja ownernya dari pada ngetag mulu.. wkwk...!!!\n[autoRespon]"]
balas = [cName + "\n Iɴɪ Pᴇɴᴀᴍᴘᴀᴋᴀɴ Mᴀᴋʜʟᴜᴋ Jᴏɴᴇs \nYᴀɴɢ sᴜᴋᴀ Nɢᴇᴛᴀɢ ɢᴡ!! ",cName + "\nHᴀʟᴏ Fᴀɴs Bᴇʀᴀᴛ Gᴡ!! \nAᴅᴀ Pᴇʀʟᴜ ᴀᴘᴀ Tᴀɢ Gᴡ..",cName + "\nɴᴀᴘᴀ ʟᴏ ᴛᴀɢ ɢᴡ! \nᴘᴇʀɢɪʜ ᴊᴀᴜʜ ᴊᴀᴜʜ sᴀɴᴀ",cName + "\nHᴀᴅɪʀʀʀʀʀ Sᴇʟᴀʟᴜ \nAᴅᴀ ʏᴀɴɢ ʙɪsᴀ sᴀʏᴀ ʙᴀɴᴛᴜ",cName + "\nJᴀᴅɪ ᴏʀᴀɴɢ cantik ᴇᴍᴀɴᴋ sᴜsᴀʜ \nsʟᴀʟᴜ ᴅɪ ᴛᴀɢ ᴅᴍɴᴀ ᴍᴀɴᴀ",cName + "\nDᴏɴᴛ Tᴀɢ Mᴇ....!!! \nPᴇʀɢɪ sᴀɴᴀ ᴊᴀᴜʜ ᴊᴀᴜʜ \nHᴜs Hᴜs Hᴜs"]
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
ret_ = "[Auto Respond] \n" + random.choice(balas)
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in Bots:
#cl.sendText(msg.to,"@"+cl.getProfile().displayName,"")
#summon(op.param1, [op.param2])
cl.sendText(msg.to,ret_)
summon(op.param1, [op.param2])
cl.sendImageWithURL(msg.to,path)
#msg.contentType = 7
#msg.text = None
#msg.contentMetadata = {'STKID' : '22987198'
# 'STKPKGID' : '1711359'
# 'STKVER' : '1'
# 'STKVER' : '1'}
#cl.sendMessage(msg)
msg.contentType = 7
msg.text = None
msg.contentMetadata = {'STKID': '27533225',
'STKPKGID': '10306',
'STKVER': '1'}
#'STKVER': '1'}
# cl.sendMessage(msg)
# msg.contentType = 7
# msg.text = None
# msg.contentMetadata = {'STKID': '7',
# 'STKPKGID': '1',
# 'STKVER': '100'}
cl.sendMessage(msg)
break
#------------------------------------------------------------------------------------------#
if 'MENTION' in msg.contentMetadata.keys()!=None:
if wait["tag"] == True:
names = re.findall(r'@(\w+)',msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in Bots:
xname = cl.getContact(msg.from_).displayName
xlen = str(len(xname)+1)
msg.contentType = 0
balas = "@" +xname+ "\nApasih tag-tag? Penting PC aja. ","@" +xname+ "\nfans aku yaa?","@" +xname+ "\nNggak Usah Tag-Tag! \nKalo Ada Perlu PC Aja!","@" +xname+ "\n\nKenapa Tag saya?","@" +xname+ "\n\nSokap deh ngetag. ","@" +xname+ "\n\napaan ngetag?"
msg.text = random.choice(balas)
#ret_ = "[Auto Respond] \n" + random.choice(balas)
msg.contentMetadata ={'MENTION':'{"MENTIONEES":[{"S":"0","E":'+json.dumps(xlen)+',"M":'+json.dumps(msg.from_)+'}]}','EMTVER':'4'}
#cl.sendText(msg.to,ret_)
cl.sendMessage(msg)
if msg.contentType == 16:
if wait['likeOn'] == True:
url = msg.contentMetadata["postEndUrl"]
cl.like(url[25:58], url[66:], likeType=1005)
#ki.like(url[25:58], url[66:], likeType=1002)
#ki2.like(url[25:58], url[66:], likeType=1004)
#ki3.like(url[25:58], url[66:], likeType=1003)
#ki4.like(url[25:58], url[66:], likeType=1001)
#ki5.like(url[25:58], url[66:], likeType=1001)
#ki6.like(url[25:58], url[66:], likeType=1001)
#ki7.like(url[25:58], url[66:], likeType=1001)
cl.comment(url[25:58], url[66:], wait["comment1"])
#ki.comment(url[25:58], url[66:], wait["comment2"])
#ki2.comment(url[25:58], url[66:], wait["comment3"])
#ki3.comment(url[25:58], url[66:], wait["comment4"])
#ki4.comment(url[25:58], url[66:], wait["comment5"])
#ki5.comment(url[25:58], url[66:], wait["comment6"])
#ki6.comment(url[25:58], url[66:], wait["comment7"])
#ki7.comment(url[25:58], url[66:], wait["comment8"])
#cl.sendText(msg.to,"Like Success")
wait['likeOn'] = True
if op.type == 25:
msg = op.message
if msg.contentType == 7:
if wait['sticker'] == True:
stk_id = msg.contentMetadata['STKID']
stk_ver = msg.contentMetadata['STKVER']
pkg_id = msg.contentMetadata['STKPKGID']
filler = "☸ Sticker Check ☸\n\n☑ STKID : %s\n☑ STKPKGID : %s\n☑ STKVER : %s\n☸ Link:\nline://shop/detail/%s" % (stk_id,pkg_id,stk_ver,pkg_id)
cl.sendText(msg.to, filler)
else:
pass
#------------------------------------------------------------------------------------------#
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["kickMention"] == True:
contact = cl.getContact(msg.from_)
cName = contact.displayName
balas = ["",cName + " Ngapain Ngetag?, ", cName + " Kenapa Tag saya?, " + cName + "?", "Ada Perlu apa, " + cName + "?","Tag doang tidak perlu., ", "Tersummon -_-, "]
ret_ = "[**Auto Respond**]\n " + random.choice(balas)
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in Bots:
cl.sendText(msg.to,ret_)
cl.kickoutFromGroup(msg.to,[msg.from_])
break
if op.type == 25:
msg = op.message
if msg.contentType == 13:
if wait["wblack"] == True:
if msg.contentMetadata["mid"] in wait["commentBlack"]:
cl.sendText(msg.to,"sudah masuk daftar hitam")
wait["wblack"] = False
else:
wait["commentBlack"][msg.contentMetadata["mid"]] = True
wait["wblack"] = False
cl.sendText(msg.to,"Itu tidak berkomentar")
elif wait["dblack"] == True:
if msg.contentMetadata["mid"] in wait["commentBlack"]:
del wait["commentBlack"][msg.contentMetadata["mid"]]
cl.sendText(msg.to,"Done")
wait["dblack"] = False
else:
wait["dblack"] = False
cl.sendText(msg.to,"Tidak ada dalam daftar hitam")
elif wait["wblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
cl.sendText(msg.to,"sudah masuk daftar hitam")
wait["wblacklist"] = False
else:
wait["blacklist"][msg.contentMetadata["mid"]] = True
wait["wblacklist"] = False
cl.sendText(msg.to,"Done")
elif wait["dblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
del wait["blacklist"][msg.contentMetadata["mid"]]
cl.sendText(msg.to,"Done")
wait["dblacklist"] = False
else:
wait["dblacklist"] = False
cl.sendText(msg.to,"Done")
elif wait["contact"] == True:
msg.contentType = 0
cl.sendText(msg.to,msg.contentMetadata["mid"])
if 'displayName' in msg.contentMetadata:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"[displayName]:\n" + msg.contentMetadata["displayName"] + "\n[mid]:\n" + msg.contentMetadata["mid"] + "\n[statusMessage]:\n" + contact.statusMessage + "\n[pictureStatus]:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n[coverURL]:\n" + str(cu))
else:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"[displayName]:\n" + contact.displayName + "\n[mid]:\n" + msg.contentMetadata["mid"] + "\n[statusMessage]:\n" + contact.statusMessage + "\n[pictureStatus]:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n[coverURL]:\n" + str(cu))
if op.type == 13:
if op.param3 in mid:
if op.param2 in kimid:
G = ki. getGroup(op.param1)
G.preventJoinByTicket = False
ki.updateGroup(G)
Ticket = ki.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki.updateGroup(G)
Ticket = ki.reissueGroupTicket(op.param1)
if op.param3 in kimid:
if op.param2 in ki2mid:
X = ki2.getGroup(op.param1)
X.preventJoinByTicket = False
ki2.updateGroup(X)
Ti = ki2.reissueGroupTicket(op.param1)
ki.acceptGroupInvitationByTicket(op.param1,Ti)
X.preventJoinByTicket = True
ki2.updateGroup(X)
Ti = ki2.reissueGroupTicket(op.param1)
if op.param3 in ki2mid:
if op.param2 in ki3mid:
X = ki3.getGroup(op.param1)
X.preventJoinByTicket = False
ki3.updateGroup(X)
Ti = ki3.reissueGroupTicket(op.param1)
ki2.acceptGroupInvitationByTicket(op.param1,Ti)
X.preventJoinByTicket = True
ki3.updateGroup(X)
Ti = ki3.reissueGroupTicket(op.param1)
if op.param3 in ki4mid:
if op.param2 in ki5mid:
X = ki5.getGroup(op.param1)
X.preventJoinByTicket = False
ki5.updateGroup(X)
Ti = ki5.reissueGroupTicket(op.param1)
ki4.acceptGroupInvitationByTicket(op.param1,Ti)
X.preventJoinByTicket = True
ki5.updateGroup(X)
Ti = ki5.reissueGroupTicket(op.param1)
if op.param3 in ki5mid:
if op.param2 in ki6mid:
X = ki6.getGroup(op.param1)
X.preventJoinByTicket = False
ki6.updateGroup(X)
Ti = ki6.reissueGroupTicket(op.param1)
k5.acceptGroupInvitationByTicket(op.param1,Ti)
X.preventJoinByTicket = True
ki6.updateGroup(X)
#Ti = ki2.reissueGroupTicket(op.param1)
if msg.contentType == 13:
if wait["wblack"] == True:
if msg.contentMetadata["mid"] in wait["commentBlack"]:
cl.sendText(msg.to,"sudah masuk daftar hitam👈")
wait["wblack"] = False
else:
wait["commentBlack"][msg.contentMetadata["mid"]] = True
wait["wblack"] = False
cl.sendText(msg.to,"Itu tidak berkomentar👈")
elif wait["dblack"] == True:
if msg.contentMetadata["mid"] in wait["commentBlack"]:
del wait["commentBlack"][msg.contentMetadata["mid"]]
cl.sendText(msg.to,"Done")
wait["dblack"] = False
else:
wait["dblack"] = False
cl.sendText(msg.to,"Tidak ada dalam daftar hitam👈")
elif wait["wblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
cl.sendText(msg.to,"sudah masuk daftar hitam")
wait["wblacklist"] = False
else:
wait["blacklist"][msg.contentMetadata["mid"]] = True
wait["wblacklist"] = False
cl.sendText(msg.to,"Done👈")
elif wait["dblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
del wait["blacklist"][msg.contentMetadata["mid"]]
cl.sendText(msg.to,"Done👈")
wait["dblacklist"] = False
else:
wait["dblacklist"] = False
cl.sendText(msg.to,"Done👈")
elif wait["contact"] == True:
msg.contentType = 0
cl.sendText(msg.to,msg.contentMetadata["mid"])
if 'displayName' in msg.contentMetadata:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"[displayName]:\n" + msg.contentMetadata["displayName"] + "\n[mid]:\n" + msg.contentMetadata["mid"] + "\n[statusMessage]:\n" + contact.statusMessage + "\n[pictureStatus]:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n[coverURL]:\n" + str(cu))
else:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"[displayName]:\n" + contact.displayName + "\n[mid]:\n" + msg.contentMetadata["mid"] + "\n[statusMessage]:\n" + contact.statusMessage + "\n[pictureStatus]:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n[coverURL]:\n" + str(cu))
elif msg.contentType == 16:
if wait["timeline"] == True:
msg.contentType = 0
if wait["lang"] == "JP":
msg.text = "menempatkan URL\n" + msg.contentMetadata["postEndUrl"]
else:
msg.text = "URL→\n" + msg.contentMetadata["postEndUrl"]
cl.sendText(msg.to,msg.text)
elif msg.text is None:
return
elif msg.text.lower() == 'help':
if wait["lang"] == "JP":
cl.sendText(msg.to,myhelpMessage)
else:
cl.sendText(msg.to,myhelpMessage)
elif "Mybot" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': mid}
cl.sendMessage(msg)
elif "Pro1" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': kimid}
ki.sendMessage(msg)
elif msg.text in ["Bot1 Gift","Bot1 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '2'}
msg.text = None
ki.sendMessage(msg)
elif msg.text in ["Gift","gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '3'}
msg.text = None
cl.sendMessage(msg)
elif msg.text in ["Bot2 Gift","Bot2 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '3'}
msg.text = None
ki2.sendMessage(msg)
elif msg.text in ["Bot3 Gift","Bot3 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '4'}
msg.text = None
ki3.sendMessage(msg)
elif msg.text in ["Bot4 Gift","Bot4 gift"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': '3b92ccf5-54d3-4765-848f-c9ffdc1da020',
'PRDTYPE': 'THEME',
'MSGTPL': '5'}
msg.text = None
ki4.sendMessage(msg)
elif msg.text in ["Cancel","cancel"]:
if msg.from_ in admin:
if msg.toType == 2:
group = cl.getGroup(msg.to)
if group.invitee is not None:
gInviMids = [contact.mid for contact in group.invitee]
cl.cancelGroupInvitation(msg.to, gInviMids)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"No invites👈")
else:
cl.sendText(msg.to,"Invite people inside not👈")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Tidak ada undangan👈")
else:
cl.sendText(msg.to,"invitan tidak ada")
elif "Contact" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': msg.to}
cl.sendMessage(msg)
elif "Pro1 mid" == msg.text:
ki.sendText(msg.to,kimid)
elif "Pro2 mid" == msg.text:
ki2.sendText(msg.to,ki2mid)
elif "Pro3 mid" == msg.text:
ki3.sendText(msg.to,ki3mid)
elif "Pro4 mid" == msg.text:
ki4.sendText(msg.to,ki4mid)
elif "Pro5 mid" == msg.text:
ki5.sendText(msg.to,ki5mid)
elif "Pro6 mid" == msg.text:
ki6.sendText(msg.to,ki6mid)
# elif "Pro7 mid" == msg.text:
# ki7.sendText(msg.to,ki7mid)
# elif "Pro8 mid" == msg.text:
# ki8.sendText(msg.to,ki8mid)
# elif "Pro9 mid" == msg.text:
# ki9.sendText(msg.to,ki9mid)
# elif "Pro10 mid" == msg.text:
# k1.sendText(msg.to,k1mid)
# elif "Pro11 mid" == msg.text:
# k2.sendText(msg.to,k2mid)
# elif "Pro12 mid" == msg.text:
# k3.sendText(msg.to,k3mid)
# elif "Pro13 mid" == msg.text:
# k4.sendText(msg.to,k4mid)
# elif "Pro14 mid" == msg.text:
# k5.sendText(msg.to,k5mid)
# elif "Pro15 mid" == msg.text:
# k6.sendText(msg.to,k6mid)
# elif "Pro16 mid" == msg.text:
# k7.sendText(msg.to,k7mid)
# elif "Pro17 mid" == msg.text:
# k8.sendText(msg.to,k8mid)
# elif "Pro18 mid" == msg.text:
# k9.sendText(msg.to,k9mid)
# elif "Pro19 mid" == msg.text:
# w1.sendText(msg.to,w1mid)
# elif "Pro20 mid" == msg.text:
# w2.sendText(msg.to,w2mid)
# elif "Pro21 mid" == msg.text:
# w3.sendText(msg.to,w3mid)
# elif "Pro22 mid" == msg.text:
# w4.sendText(msg.to,w4mid)
# elif "Pro23 mid" == msg.text:
# w5.sendText(msg.to,w5mid)
# elif "Pro24 mid" == msg.text:
# w6.sendText(msg.to,w6mid)
# elif "Pro25 mid" == msg.text:
# w7.sendText(msg.to,w7mid)
# elif "Pro26 mid" == msg.text:
# w8.sendText(msg.to,w8mid)
# elif "Pro27 mid" == msg.text:
# w9.sendText(msg.to,w9mid)
# elif "Pro28 mid" == msg.text:
# l1.sendText(msg.to,l1mid)
# elif "Pro29 mid" == msg.text:
# l2.sendText(msg.to,l2mid)
# elif "Pro30 mid" == msg.text:
# l3.sendText(msg.to,l3mid)
# elif "Pro31 mid" == msg.text:
# l4.sendText(msg.to,l4mid)
# elif "Pro32 mid" == msg.text:
# l5.sendText(msg.to,l5mid)
elif "All mid" == msg.text:
ki.sendText(msg.to,kimid)
ki2.sendText(msg.to,ki2mid)
ki3.sendText(msg.to,ki3mid)
ki4.sendText(msg.to,ki4mid)
ki5.sendText(msg.to,ki5mid)
ki6.sendText(msg.to,ki6mid)
#ki7.sendText(msg.to,ki7mid)
#ki8.sendText(msg.to,ki8mid)
#ki9.sendText(msg.to,ki9mid)
#k1.sendText(msg.to,k1mid)
#k2.sendText(msg.to,k2mid)
#k3.sendText(msg.to,k3mid)
#k4.sendText(msg.to,k4mid)
#k5.sendText(msg.to,k5mid)
#k6.sendText(msg.to,k6mid)
#k7.sendText(msg.to,k7mid)
#k8.sendText(msg.to,k8mid)
#k9.sendText(msg.to,k9mid)
#w1.sendText(msg.to,w1mid)
#w2.sendText(msg.to,w2mid)
#w3.sendText(msg.to,w3mid)
#w4.sendText(msg.to,w4mid)
#w5.sendText(msg.to,w5mid)
#w6.sendText(msg.to,w6mid)
#w7.sendText(msg.to,w7mid)
#w8.sendText(msg.to,w8mid)
#w9.sendText(msg.to,w9mid)
#l1.sendText(msg.to,l1mid)
#l2.sendText(msg.to,l2mid)
#l3.sendText(msg.to,l3mid)
#l4.sendText(msg.to,l4mid)
#l5.sendText(msg.to,l5mid)
elif "Timeline: " in msg.text:
tl_text = msg.text.replace("Timeline: ","")
cl.sendText(msg.to,"line://home/post?userMid="+mid+"&postId="+cl.new_post(tl_text)["result"]["post"]["postInfo"]["postId"])
elif "Allname: " in msg.text:
string = msg.text.replace("Allname: ","")
if len(string.decode('utf-8')) <= 20:
profile = ki.getProfile()
profile.displayName = string
ki.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki2.getProfile()
profile.displayName = string
ki2.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki3.getProfile()
profile.displayName = string
ki3.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki4.getProfile()
profile.displayName = string
ki4.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki5.getProfile()
profile.displayName = string
ki5.updateProfile(profile)
if len(string.decode('utf-8')) <= 20:
profile = ki6.getProfile()
profile.displayName = string
ki6.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = ki7.getProfile()
# profile.displayName = string
# ki7.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = ki8.getProfile()
# profile.displayName = string
# ki8.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = ki9.getProfile()
# profile.displayName = string
# ki9.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = k1.getProfile()
# profile.displayName = string
# k1.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = k2.getProfile()
# profile.displayName = string
# k2.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = k3.getProfile()
# profile.displayName = string
# k3.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = k4.getProfile()
# profile.displayName = string
# k4.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = k5.getProfile()
# profile.displayName = string
# k5.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = k6.getProfile()
# profile.displayName = string
# k6.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = k7.getProfile()
# profile.displayName = string
# k7.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = k8.getProfile()
# profile.displayName = string
# k8.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = k9.getProfile()
# profile.displayName = string
# k9.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = w1.getProfile()
# profile.displayName = string
# w1.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = w2.getProfile()
# profile.displayName = string
# w2.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = w3.getProfile()
# profile.displayName = string
# w3.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = w4.getProfile()
# profile.displayName = string
# w4.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = w5.getProfile()
# profile.displayName = string
# w5.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = w6.getProfile()
# profile.displayName = string
# w6.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = w7.getProfile()
# profile.displayName = string
# w7.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = w8.getProfile()
# profile.displayName = string
# w8.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = w9.getProfile()
# profile.displayName = string
# w9.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = l1.getProfile()
# profile.displayName = string
# l1.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = l2.getProfile()
# profile.displayName = string
# l2.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = l3.getProfile()
# profile.displayName = string
# l3.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = l4.getProfile()
# profile.displayName = string
# l4.updateProfile(profile)
# if len(string.decode('utf-8')) <= 20:
# profile = l5.getProfile()
# profile.displayName = string
# l5.updateProfile(profile)
elif "Allbio: " in msg.text:
string = msg.text.replace("Allbio: ","")
if len(string.decode('utf-8')) <= 500:
profile = ki.getProfile()
profile.statusMessage = string
ki.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki2.getProfile()
profile.statusMessage = string
ki2.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki3.getProfile()
profile.statusMessage = string
ki3.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki4.getProfile()
profile.statusMessage = string
ki4.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki5.getProfile()
profile.statusMessage = string
ki5.updateProfile(profile)
if len(string.decode('utf-8')) <= 500:
profile = ki6.getProfile()
profile.statusMessage = string
ki6.updateProfile(profile)
#--------------------------------------------------------
elif "Out: " in msg.text:
ng = msg.text.replace("Out: ","")
gid = cl.getGroupIdsJoined()
for i in gid:
h = cl.getGroup(i).name
if h == ng:
cl.sendText(i,"Bye "+h+"~")
cl.leaveGroup(i)
cl.sendText(msg.to,"Sukses Keluar Dari Grup ["+ h +"] ~")
else:
pass
#-----------------------------------------------
#---------------------------------------------------------
elif msg.text in ["Mibot"]:
#msg.contentType = 13
#msg.contentMetadata = {'mid': mid}
#cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': mid}
cl.sendMessage(msg)
#--------------------------
#---------------------------------------------------------
elif "1pro: " in msg.text:
string = msg.text.replace("1pro: ","")
if len(string.decode('utf-8')) <= 20:
profile = ki.getProfile()
profile.displayName = string
ki.updateProfile(profile)
ki.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "2pro: " in msg.text:
string = msg.text.replace("2pro: ","")
if len(string.decode('utf-8')) <= 20:
profile = ki2.getProfile()
profile.displayName = string
ki2.updateProfile(profile)
ki2.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "3pro: " in msg.text:
string = msg.text.replace("3pro: ","")
if len(string.decode('utf-8')) <= 20:
profile = ki3.getProfile()
profile.displayName = string
ki3.updateProfile(profile)
ki3.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "4pro: " in msg.text:
string = msg.text.replace("4pro: ","")
if len(string.decode('utf-8')) <= 20:
profile = ki4.getProfile()
profile.displayName = string
ki4.updateProfile(profile)
ki4.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "5pro: " in msg.text:
string = msg.text.replace("5pro: ","")
if len(string.decode('utf-8')) <= 20:
profile = ki5.getProfile()
profile.displayName = string
ki5.updateProfile(profile)
ki5.sendText(msg.to,"??Update Names👉" + string + "👈")
#--------------------------------------------------------
elif "6pro: " in msg.text:
string = msg.text.replace("6pro: ","")
if len(string.decode('utf-8')) <= 20:
profile = ki6.getProfile()
profile.displayName = string
ki6.updateProfile(profile)
ki6.sendText(msg.to,"Update Names👉" + string + "👈")
#--------------------------------------------------------
elif msg.text.lower() == "runtime":
eltime = time.time() - mulai
dan = "「Waktu Keaktifan Bot」\n"+waktu(eltime)
cl.sendText(msg.to,dan)
elif msg.text.lower() == 'runtime1':
eltime = time.time() - mulai
van = "「ᴘʟᴀsᴇ ᴡᴀɪᴛᴇ....」\nsᴇʟғʙᴏᴛ ʜᴀs ʙᴇᴇɴ ᴀᴋᴛɪᴠᴇ "+waktu(eltime)
cl.sendText(msg.to,van)
#--------------------------------------------------------
elif "Update sambutan:" in msg.text:
#if msg.from_ in admin + owner + creator:
wait["Sambutan"] = msg.text.replace("Update sambutan:","")
cl.sendText(msg.to,"ˢᵃᵐᵇᵘᵗᵃⁿ ᴬᵏᵗᶦᵛᵉ ᶜʰᵃⁿᵍᵉᵈ"+ datetime.today().strftime('%H:%M:%S'))
elif msg.text in ["Check welcome message"]:
#if msg.from_ in admin + creator + owner:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ʸᵒᵘʳ ᴮᵒᵗ ᴹᵉˢˢᵃᵍᵉ\n\n" + wait["Sambutan"])
else:
cl.sendText(msg.to,"The automatic appending information is set as follows。\n\n" + wait["Sambutan"])
#--------------------------------------------------------
elif msg.text == ".Speed":
cl.sendText(msg.to,"「ʟᴏᴀᴅɪɴɢ....」\nᴛᴇsᴛ sᴘᴇᴇᴅ sᴇʟғʙᴏᴛ")
start = time.time()
for i in range(3000):
1+1
elsp = time.time() - start
cl.sendText(msg.to,"%sᴅᴇᴛɪᴋ" % (elsp))
elif 'Crash' in msg.text:
if msg.from_ in admsa:
msg.contentType = 13
msg.contentMetadata = {'mid': "mid,'"}
cl.sendMessage(msg)
elif msg.text.lower() == 'me':
msg.contentType = 13
msg.contentMetadata = {'mid': mid}
#cl.sendText(msg.to,("@"+cl.getProfile().displayName,"ɪɴɪʟᴀʜ ᴀᴋᴜ ᴀᴘᴀ ᴀᴅᴀ ɴʏᴀ \nʏᴀɴɢ sᴇʀʙᴀ sᴇᴅᴇʀʜᴀɴᴀ \nᴅᴀɴ sᴇʀʙᴀ ᴋᴇᴋᴜʀᴀɴɢᴀɴ \nʙᴀʜᴋᴀɴ ᴛɪᴅᴀᴋ ᴍᴇᴍɪʟɪᴋɪ \nᴋᴇʟᴇʙɪʜᴀɴ ᴀᴘᴀᴘᴜɴ"))
cl.sendMessage(msg)
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
try:
cl.sendMessage(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendMessage(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithURL(msg.to,path)
except:
pass
#cl.sendMessage(msg.to,("@"+cl.getProfile().displayName,"ɪɴɪʟᴀʜ ᴀᴋᴜ ᴀᴘᴀ ᴀᴅᴀ ɴʏᴀ \nʏᴀɴɢ sᴇʀʙᴀ sᴇᴅᴇʀʜᴀɴᴀ \nᴅᴀɴ sᴇʀʙᴀ ᴋᴇᴋᴜʀᴀɴɢᴀɴ \nʙᴀʜᴋᴀɴ ᴛɪᴅᴀᴋ ᴍᴇᴍɪʟɪᴋɪ \nᴋᴇʟᴇʙɪʜᴀɴ ᴀᴘᴀᴘᴜɴ"))
elif ".fb" in msg.text:
a = msg.text.replace(".fb","")
b = urllib.quote(a)
cl.sendText(msg.to,"「 Mencari 」\n" "Type:Mencari Info\nStatus: Proses")
cl.sendText(msg.to, "https://www.facebook.com" + b)
cl.sendText(msg.to,"「 Mencari 」\n" "Type:Mencari Info\nStatus: Sukses")
#--------------------------------------------------------
elif "Spam change:" in msg.text:
if msg.toType == 2:
wait["spam"] = msg.text.replace("Spam change:","")
cl.sendText(msg.to,"spam changed")
#==========================[Kris]===========================
elif "Tambah admin @" in msg.text:
print "[Command]Staff add executing"
_name = msg.text.replace("Tambah admin @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
gs = ki.getGroup(msg.to)
gs = ki2.getGroup(msg.to)
gs = ki3.getGroup(msg.to)
gs = ki4.getGroup(msg.to)
gs = ki5.getGroup(msg.to)
gs = ki6.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
admin.append(target)
cl.sendText(msg.to,"Admin Telah Ditambahkan")
except:
pass
print "[Command]Staff add executed"
#else:
# kr.sendText(msg.to,"Command Di Tolak Jangan Sedih")
# kr.sendText(msg.to,"Sudah Menjadi Admin Maka Tidak Bisa Menjadi Admin Lagi")
#==========================[Kris]===========================
elif "Hapus admin @" in msg.text:
print "[Command]Staff remove executing"
_name = msg.text.replace("Hapus admin @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
gs = ki.getGroup(msg.to)
gs = ki2.getGroup(msg.to)
gs = ki3.getGroup(msg.to)
gs = ki4.getGroup(msg.to)
gs = ki5.getGroup(msg.to)
gs = ki6.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
admin.remove(target)
cl.sendText(msg.to,"Admin Telah Dihapus")
except:
pass
print "[Command]Staff remove executed"
#else:
# kr.sendText(msg.to,"Command DiTolak")
# kr.sendText(msg.to,"Admin Tidak Bisa Menggunakan")
#==========================[Kris]===========================
elif msg.text in ["Adminlist","admlist"]:
if admin == []:
cl.sendText(msg.to,"The adminlist is empty")
else:
cl.sendText(msg.to,"Sabar Dikit Boss kris.....")
mc = ""
for mi_d in admin:
mc += "☄ " +cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,mc)
print "[Command]Stafflist executed"
#==========================[Kris]===========================
elif msg.text in ["Giftbycontact"]:
wait["gift"] = True
cl.sendText(msg.to,"Send Contact")
#===============Image====================#
elif msg.text in ["Mypict"]:
h = cl.getContact(mid)
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
#==============Image finish================#
elif "Gift10 " in msg.text:
msg.contentType = 13
nk0 = msg.text.replace("Gift10 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
cl.sendText(msg.to,_name + " Check Your Gift")
msg.contentType = 9
msg.contentMetadata= {'PRDTYPE': 'STICKER',
'STKVER': '1',
'MSGTPL': '1',
'STKPKGID': '1296261'}
msg.to = target
msg.text = None
cl.sendMessage(msg)
except:
msg.contentMetadata = {'mid': target}
#===============Image====================#
elif msg.text in ["Mycover"]:
h = cl.getContact(mid)
cu = cl.channel.getCover(mid)
path = str(cu)
cl.sendImageWithURL(msg.to, path)
#==============Image finish================#
elif msg.text in ["Backup:on"]:
if msg.from_ in admin:
if wait["Backup"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵇᵃᶜᵏᵘᵖ ʰᵃˢ ᵇᵉᵉⁿ ᵃᶜᵗᶦᵛᵉ"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"ᵇᵃᶜᵏᵘᵖ ʰᵃˢ ᵇᵉᵉⁿ ᵉⁿᵃᵇˡᵉ\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
wait["Backup"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵇᵃᶜᵏᵘᵖ ʰᵃˢ ᵇᵉᵉⁿ ᵃᶜᵗᶦᵛᵉ\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"ᵇᵃᶜᵏᵘᵖ ʰᵃˢ ᵇᵉᵉⁿ ᵉⁿᵃᵇˡᵉ\n\n"+ datetime.today().strftime('%H:%M:%S'))
elif msg.text in ["Backup:off"]:
if msg.from_ in admin:
if wait["Backup"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴮᵃᶜᵏᵘᵖ ʰᵃˢ ᵇᵉᵉⁿ ᵘⁿᵃᶜᵗᶦᵛᵉ\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"ᴮᵃᶜᵏᵘᵖ ʰᵃˢ ᵇᵉᵉⁿ ᵈᶦˢᵃᵇˡᵉ\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
wait["Backup"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴮᵃᶜᵏᵘᵖ ʰᵃˢ ᵇᵉᵉⁿ ᵘⁿᵃᶜᵗᶦᵛᵉ\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"ᴮᵃᶜᵏᵘᵖ ʰᵃˢ ᵇᵉᵉⁿ ᵈᶦˢᵃᵇˡᵉ\n\n"+ datetime.today().strftime('%H:%M:%S'))
#--------------------------------------------------------
elif "Bot Add @" in msg.text:
if msg.toType == 2:
if msg.from_ in admin:
print "[Command]Add executing"
_name = msg.text.replace("Bot Add @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
gs = ki.getGroup(msg.to)
gs = ki2.getGroup(msg.to)
gs = ki3.getGroup(msg.to)
gs = ki4.getGroup(msg.to)
gs = ki5.getGroup(msg.to)
gs = ki6.getGroup(msg.to)
#gs = ki7.getGroup(msg.to)
#gs = ko.getGroup(msg.to)
#gs = ku.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
cl.findAndAddContactsByMid(target)
ki.findAndAddContactsByMid(target)
ki2.findAndAddContactsByMid(target)
ki3.findAndAddContactsByMid(target)
ki4.findAndAddContactsByMid(target)
ki5.findAndAddContactsByMid(target)
ki6.findAndAddContactsByMid(target)
#ki7.findAndAddContactsByMid(target)
#ke.findAndAddContactsByMid(target)
#ku.findAndAddContactsByMid(target)
except:
cl.sendText(msg.to,"Error")
else:
cl.sendText(msg.to,"Perintah Ditolak")
cl.sendText(msg.to,"Perintah ini Hana Untuk Owner Kami")
#--------------------------------------------------------
elif "Mayhem" in msg.text:
if msg.toType == 2:
print "Cleanse is going."
_name = msg.text.replace("Mayhem ","")
gs = cl.getGroup(msg.to)
cl.sendText(msg.to,"ᴘᴇᴍʙᴇʀsɪʜᴀɴ ᴀᴋᴀɴ ᴅɪʟᴀᴋsᴀɴᴀᴋᴀɴ")
cl.sendText(msg.to,"sᴀʏ ɢᴏᴏᴅ ʙʏᴇ ᴛᴏ ᴍᴇ")
cl.sendText(msg.to,"ᴘᴇᴍʙᴇʀsɪʜᴀɴ ᴅɪʟᴀᴋsᴀɴᴀᴋᴀɴ")
targets = []
for g in gs.members:
if _name in g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found.")
cl.sendText(msg.to,"Not found.")
else:
for target in targets:
if not target in Bots:
try:
klist=[cl]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
cl.sendText(msg.to,"ɢʀᴏᴜᴘ sᴜᴅᴀʜ ᴅɪʙᴇʀsɪʜᴋᴀɴ")
except:
cl.sendText(msg,to,"Group cleanse")
cl.sendText(msg,to,"Group cleanse")
#-------------------------------------------------------
elif msg.text in ["Purge"]:
if msg.from_ in admin :
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
if matched_list == []:
cl.sendText(msg.to,"group purge")
return
for jj in matched_list:
try:
klist=[cl]
kicker = random.choice(klist)
kicker.kickoutFromGroup(msg.to,[jj])
print (msg.to,[jj])
except:
pass
#--------------------------------------------------------
#--------------------------------------------------------
elif "intip on" == msg.text.lower():
if msg.to in wait2['readPoint']:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%H:%M:%S')
wait2['ROM'][msg.to] = {}
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
cl.sendText(msg.to,"ˢᵉᵗᵖᵒᶦⁿᵗ ᵃˡʳᵉᵃᵈʸ ᵃᶜᵗᶦᵛᵉ")
else:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%H:%M:%S')
wait2['ROM'][msg.to] = {}
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
cl.sendText(msg.to, "Sᴇᴛ ʀᴇᴀᴅɪɴɢ ᴘᴏɪɴᴛ:\n" + datetime.now().strftime('%H:%M:%S'))
print wait2
elif "intop off" == msg.text.lower():
if msg.to not in wait2['readPoint']:
cl.sendText(msg.to,"ˢᵉᵗ ʳᵉᵃᵈᶦⁿᵍ ᵖᵒᶦⁿᵗ ᵈᶦˢᵃᵇˡᵉ")
else:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
cl.sendText(msg.to, "ᴰᵉˡᵉᵗᵉᵈ ᴿᵉᵃᵈʸⁿᵍ ᴾᵒᶦⁿᵗ:\n" + datetime.now().strftime('%H:%M:%S'))
elif "intip" == msg.text.lower():
if msg.to in wait2['readPoint']:
if wait2["ROM"][msg.to].items() == []:
cl.sendText(msg.to, "Reader:\nNone")
else:
chiya = []
for rom in wait2["ROM"][msg.to].items():
chiya.append(rom[1])
cmem = cl.getContacts(chiya)
zx = ""
zxc = ""
zx2 = []
xpesan = ''
for x in range(len(cmem)):
xname = str(cmem[x].displayName)
pesan = ''
pesan2 = pesan+"@a\n"
xlen = str(len(zxc)+len(xpesan))
xlen2 = str(len(zxc)+len(pesan2)+len(xpesan)-1)
zx = {'S':xlen, 'E':xlen2, 'M':cmem[x].mid}
zx2.append(zx)
zxc += pesan2
msg.contentType = 0
print zxc
msg.text = xpesan+ zxc + "\nBefore: %s\nAfter: %s"%(wait2['setTime'][msg.to],datetime.now().strftime('%H:%M:%S'))
lol ={'MENTION':str('{"MENTIONEES":'+json.dumps(zx2).replace(' ','')+'}')}
print lol
msg.contentMetadata = lol
try:
cl.sendMessage(msg)
except Exception as error:
print error
pass
else:
cl.sendText(msg.to, "ᴸᵘʳᵏᶦⁿᵍ ᴴᵃˢ ᴺᵒᵗ ᴮᵉⁿ ˢᵉᵗ.")
#==============================================
elif "Spamcontact @" in msg.text:
_name = msg.text.replace("Spamcontact @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(g.mid,"Spam")
cl.sendText(msg.to, "Done")
print " Spammed !"
#==============================================================================#
elif msg.text in ["Simisimi on","Simisimi:on"]:
settings["simiSimi"][msg.to] = True
cl.sendText(msg.to,"Simi mode On")
elif msg.text in ["Simisimi off","Simisimi:off"]:
settings["simiSimi"][msg.to] = False
cl.sendText(msg.to,"Simi mode Off")
#elif msg.text in ["Autoread on","Read:on"]:
#wait['alwayRead'] = True
#cl.sendText(msg.to,"Auto read On")
#elif msg.text in ["Autoread off","Read:off"]:
#wait['alwayRead'] = False
#cl.sendText(msg.to,"Auto read Off")
elif msg.text in ["Respontag on","Autorespon:on","Respon on","Respon:on"]:
wait["detectMention"] = True
cl.sendText(msg.to,"ᴀᴜᴛᴏ ʀᴇsᴘᴏɴ ᴅɪ ᴀᴋᴛɪғᴋᴀɴ")
elif msg.text in ["Respontag off","Autorespon:off","Respon off","Respon:off"]:
wait["detectMention"] = False
cl.sendText(msg.to,"ᴀᴜᴛᴏ ʀᴇsᴘᴏɴ ᴅɪ ɴᴏɴᴀᴋᴛɪғᴋᴀɴ")
elif msg.text in ["Kicktag on","Autokick:on","Responkick on","Responkick:on"]:
wait["kickMention"] = True
cl.sendText(msg.to,"ʀᴇsᴘᴏɴ ᴋɪᴄᴋ ᴅɪ ᴀᴋᴛɪғᴋᴀɴ")
elif msg.text in ["Kicktag off","Autokick:off","Responkick off","Responkick:off"]:
wait["kickMention"] = False
cl.sendText(msg.to,"ʀᴇsᴘᴏɴ ᴋɪᴄᴋ ᴅɪ ɴᴏɴᴀᴋᴛɪғᴋᴀɴ")
elif "Time" in msg.text:
if msg.toType == 2:
cl.sendText(msg.to,datetime.today().strftime('%H:%M:%S'))
#==============================================================================#
elif "Tagall" == msg.text.lower():
group = cl.getGroup(msg.to)
nama = [contact.mid for contact in group.members]
nm1, nm2, nm3, nm4, nm5, jml = [], [], [], [], [], len(nama)
if jml <= 100:
summon(msg.to, nama)
if jml > 100 and jml < 200:
for i in range(0, 99):
nm1 += [nama[i]]
summon(msg.to, nm1)
for j in range(100, len(nama)-1):
nm2 += [nama[j]]
summon(msg.to, nm2)
if jml > 200 and jml < 500:
for i in range(0, 99):
nm1 += [nama[i]]
summon(msg.to, nm1)
for j in range(100, 199):
nm2 += [nama[j]]
summon(msg.to, nm2)
for k in range(200, 299):
nm3 += [nama[k]]
summon(msg.to, nm3)
for l in range(300, 399):
nm4 += [nama[l]]
summon(msg.to, nm4)
for m in range(400, len(nama)-1):
nm5 += [nama[m]]
summon(msg.to, nm5)
if jml > 500:
print "Terlalu Banyak Men 500+"
cnt = Message()
cnt.text = "Jumlah:\n" + str(jml) + " Para Jones"
cnt.to = msg.to
cl.sendMessage(cnt)
#==============================================================================#
elif "Id@en" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'en'
kata = msg.text.replace("Id@en ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"**FROM ID**\n" + "" + kata + "\n**TO ENGLISH**\n" + "" + result + "\n**SUKSES**")
elif "En@id" in msg.text:
bahasa_awal = 'en'
bahasa_tujuan = 'id'
kata = msg.text.replace("En@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"**FROM EN**\n" + "" + kata + "\n**TO ID**\n" + "" + result + "\n**SUKSES**")
elif "Id@jp" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ja'
kata = msg.text.replace("Id@jp ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"**FROM ID**\n" + "" + kata + "\n**TO JP**\n" + "" + result + "\n**SUKSES**")
elif "Jp@id" in msg.text:
bahasa_awal = 'ja'
bahasa_tujuan = 'id'
kata = msg.text.replace("Jp@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM JP----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif "Id@th" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'th'
kata = msg.text.replace("Id@th ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO TH----\n" + "" + result + "\n------SUKSES-----")
elif "Th@id" in msg.text:
bahasa_awal = 'th'
bahasa_tujuan = 'id'
kata = msg.text.replace("Th@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM TH----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif "Id@jp" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ja'
kata = msg.text.replace("Id@jp ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO JP----\n" + "" + result + "\n------SUKSES-----")
elif "Id@ar" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ar'
kata = msg.text.replace("Id@ar ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO AR----\n" + "" + result + "\n------SUKSES-----")
elif "Ar@id" in msg.text:
bahasa_awal = 'ar'
bahasa_tujuan = 'id'
kata = msg.text.replace("Ar@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM AR----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif "Id@ko" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ko'
kata = msg.text.replace("Id@ko ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO KO----\n" + "" + result + "\n------SUKSES-----")
elif "Ko@id" in msg.text:
bahasa_awal = 'ko'
bahasa_tujuan = 'id'
kata = msg.text.replace("Ko@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM KO----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif msg.text.lower() == 'welcome':
ginfo = cl.getGroup(msg.to)
cl.sendText(msg.to,"Selamat Datang Di Grup " + str(ginfo.name))
jawaban1 = ("Selamat Datang Di Grup " + str(ginfo.name))
cl.sendText(msg.to,"Owner Grup " + str(ginfo.name) + " :\n" + ginfo.creator.displayName )
tts = gTTS(text=jawaban1, lang='id')
tts.save('tts.mp3')
cl.sendAudio(msg.to,'tts.mp3')
elif "Say-id " in msg.text:
say = msg.text.replace("Say-id ","")
lang = 'id'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-en " in msg.text:
say = msg.text.replace("Say-en ","")
lang = 'en'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-jp " in msg.text:
say = msg.text.replace("Say-jp ","")
lang = 'ja'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-ar " in msg.text:
say = msg.text.replace("Say-ar ","")
lang = 'ar'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Say-ko " in msg.text:
say = msg.text.replace("Say-ko ","")
lang = 'ko'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
elif "Kapan " in msg.text:
tanya = msg.text.replace("Kapan ","")
jawab = ("kapan kapan","besok","satu abad lagi","Hari ini","Tahun depan","Minggu depan","Bulan depan","Sebentar lagi")
jawaban = random.choice(jawab)
tts = gTTS(text=jawaban, lang='id')
tts.save('tts.mp3')
cl.sendAudio(msg.to,'tts.mp3')
elif "Apakah " in msg.text:
tanya = msg.text.replace("Apakah ","")
jawab = ("Ya","Tidak","Mungkin","Bisa jadi")
jawaban = random.choice(jawab)
tts = gTTS(text=jawaban, lang='id')
tts.save('tts.mp3')
cl.sendAudio(msg.to,'tts.mp3')
elif 'Youtubemp4 ' in msg.text:
try:
textToSearch = (msg.text).replace('Youtubemp4 ', "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class': 'yt-uix-tile-link'})
ght = ('https://www.youtube.com' + results['href'])
cl.sendVideoWithURL(msg.to, ght)
except:
cl.sendText(msg.to, "Could not find it")
elif "ytsearch " in msg.text:
query = msg.text.replace("ytsearch ","")
with requests.session() as s:
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'http://www.youtube.com/results'
params = {'search_query': query}
r = s.get(url, params=params)
soup = BeautifulSoup(r.content, 'html5lib')
hasil = ""
for a in soup.select('.yt-lockup-title > a[title]'):
if '&list=' not in a['href']:
hasil += ''.join((a['title'],'\nUrl : http://www.youtube.com' + a['href'],'\n\n'))
cl.sendText(msg.to,hasil)
print '[Command] Youtube Search'
elif "Lirik " in msg.text:
try:
songname = msg.text.lower().replace("Lirik ","")
params = {'songname': songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
hasil = 'Lyric Lagu ('
hasil += song[0]
hasil += ')\n\n'
hasil += song[5]
cl.sendText(msg.to, hasil)
except Exception as wak:
cl.sendText(msg.to, str(wak))
elif "Wikipedia " in msg.text:
try:
wiki = msg.text.lower().replace("Wikipedia ","")
wikipedia.set_lang("id")
pesan="Title ("
pesan+=wikipedia.page(wiki).title
pesan+=")\n\n"
pesan+=wikipedia.summary(wiki, sentences=1)
pesan+="\n"
pesan+=wikipedia.page(wiki).url
cl.sendText(msg.to, pesan)
except:
try:
pesan="Over Text Limit! Please Click link\n"
pesan+=wikipedia.page(wiki).url
cl.sendText(msg.to, pesan)
except Exception as e:
cl.sendText(msg.to, str(e))
elif "Music " in msg.text:
try:
songname = msg.text.lower().replace("Music ","")
params = {'songname': songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
hasil = 'This is Your Music\n'
hasil += 'Judul : ' + song[0]
hasil += '\nDurasi : ' + song[1]
hasil += '\nLink Download : ' + song[4]
cl.sendText(msg.to, hasil)
cl.sendText(msg.to, "Please Wait for audio...")
cl.sendAudioWithURL(msg.to, song[4])
except Exception as njer:
cl.sendText(msg.to, str(njer))
elif "Image " in msg.text:
search = msg.text.replace("Image ","")
url = 'https://www.google.com/search?espv=2&biw=1366&bih=667&tbm=isch&oq=kuc&aqs=mobile-gws-lite.0.0l5&q=' + search
raw_html = (download_page(url))
items = []
items = items + (_images_get_all_items(raw_html))
path = random.choice(items)
print path
try:
cl.sendImageWithURL(msg.to,path)
except:
pass
elif "Profileig " in msg.text:
try:
instagram = msg.text.replace("Profileig ","")
response = requests.get("https://www.instagram.com/"+instagram+"?__a=1")
data = response.json()
namaIG = str(data['user']['full_name'])
bioIG = str(data['user']['biography'])
mediaIG = str(data['user']['media']['count'])
verifIG = str(data['user']['is_verified'])
usernameIG = str(data['user']['username'])
followerIG = str(data['user']['followed_by']['count'])
profileIG = data['user']['profile_pic_url_hd']
privateIG = str(data['user']['is_private'])
followIG = str(data['user']['follows']['count'])
link = "LinkNya: " + "https://www.instagram.com/" + instagram
text = "Name : "+namaIG+"\nUsername : "+usernameIG+"\nBiography : "+bioIG+"\nFollowerNya : "+followerIG+"\nFollowingNya : "+followIG+"\nPost : "+mediaIG+"\nVerified : "+verifIG+"\nPrivate : "+privateIG+"" "\n" + link
cl.sendText(msg.to, str(text))
except Exception as e:
cl.sendText(msg.to, str(e))
elif 'instagram ' in msg.text.lower():
try:
instagram = msg.text.lower().replace("instagram ","")
html = requests.get('https://www.instagram.com/' + instagram + '/?')
soup = BeautifulSoup(html.text, 'html5lib')
data = soup.find_all('meta', attrs={'property':'og:description'})
text = data[0].get('content').split()
data1 = soup.find_all('meta', attrs={'property':'og:image'})
text1 = data1[0].get('content').split()
user = "Name: " + text[-2] + "\n"
user1 = "Username: " + text[-1] + "\n"
followers = "Followers: " + text[0] + "\n"
following = "Following: " + text[2] + "\n"
post = "Post: " + text[4] + "\n"
link = "Link: " + "https://www.instagram.com/" + instagram
detail = "********************\n"
details = "\n********************="
cl.sendText(msg.to, detail + user + user1 + followers + following + post + link + details)
cl.sendImageWithURL(msg.to, text1[0])
except Exception as njer:
cl.sendText(msg.to, str(njer))
#=============================================================================#
elif "hola" == msg.text.lower():
group = cl.getGroup(msg.to)
nama = [contact.mid for contact in group.members]
nm1, nm2, nm3, nm4, nm5, jml = [], [], [], [], [], len(nama)
if jml <= 100:
summon(msg.to, nama)
if jml > 100 and jml < 200:
for i in range(0, 99):
nm1 += [nama[i]]
summon(msg.to, nm1)
for j in range(100, len(nama)-1):
nm2 += [nama[j]]
summon(msg.to, nm2)
if jml > 200 and jml < 500:
for i in range(0, 99):
nm1 += [nama[i]]
summon(msg.to, nm1)
for j in range(100, 199):
nm2 += [nama[j]]
summon(msg.to, nm2)
for k in range(200, 299):
nm3 += [nama[k]]
summon(msg.to, nm3)
for l in range(300, 399):
nm4 += [nama[l]]
summon(msg.to, nm4)
for m in range(400, len(nama)-1):
nm5 += [nama[m]]
summon(msg.to, nm5)
if jml > 500:
print "Terlalu Banyak Men 500+"
cnt = Message()
cnt.text = "Jumlah:\n" + str(jml) + " ᴘᴀʀᴀ ᴍᴏɴsᴛᴇʀ ᴅᴀʀᴀᴛ ʏᴀɴɢ sᴀɴɢᴀᴛ ʙᴜᴀs"
cnt.to = msg.to
cl.sendText(msg.to,"sᴜᴋsᴇs ᴛᴏ sᴜᴍᴏɴ ᴍᴏɴsᴛᴇʀ ᴅᴇsᴛʀᴏʏᴇᴅ")
cl.sendMessage(cnt)
#==============================================================================#
elif msg.text in ["Set all on"]:
if msg.from_ in admsa:
wait["protect"] = True
wait["cancelprotect"] = True
wait["inviteprotect"] = True
wait["linkprotect"] = True
wait["Backup"] = True
wait["Contact"] = True
wait["Sambutan"] = True
cl.sendText(msg.to,"ʙᴇʀʜᴀsɪʟ ᴍᴇɴɢᴀᴋᴛɪғᴋᴀɴ sᴇᴍᴜᴀ ᴘʀᴏᴛᴇᴄᴛ")
else:
cl.sendText(msg.to,"Khusus Alvian")
elif msg.text in ["Set all off"]:
if msg.from_ in admsa:
wait["protect"] = False
wait["cancelprotect"] = False
wait["inviteprotect"] = False
wait["linkprotect"] = False
wait["Backup"] = False
wait["Contact"] = False
wait["Sambutan"] = False
cl.sendText(msg.to,"ʙᴇʀʜᴀsɪʟ ɴᴏɴᴀᴋᴛɪғᴋᴀɴ sᴇᴍᴜᴀ ᴘʀᴏᴛᴇᴄᴛ")
else:
cl.sendText(msg.to,"Khusus Alvian")
#==============================================================================#
elif msg.text.lower().startswith(".cw "):
sep = msg.text.split(" ")
location = msg.text.lower().replace(sep[0] + " ","")
with requests.session() as web:
web.headers["user-agent"] = random.choice(wait["userAgent"])
r = web.get("http://api.corrykalam.net/apicuaca.php?kota={}".format(urllib.quote(location)))
data = r.text
data = json.loads(data)
if "result" not in data:
ret_ = "╔══[ Weather Status ]"
ret_ += "\n╠ Lokasi : " + data[0].replace("Temperatur di kota ","")
ret_ += "\n╠ " + data[1]+"°C"
ret_ += "\n╠ " + data[2] + "%"
ret_ += "\n╠ " + data[3] + " milibar"
ret_ += "\n╠ " + data[4] +"km/jam"
ret_ += "\n╚══[ Complete ]"
else:
ret_ = "[ Weather Status ] Error : Lokasi tidak ditemukan"
cl.sendText(msg.to, str(ret_))
#--------------------------------------
elif ".pt " in msg.text.lower():
pisah = msg.text.split("t ")
location = msg.text.replace(pisah[0]+"t ","")
params = {'lokasi':location}
with requests.session() as web:
r = requests.get("http://api.corrykalam.net/apisholat.php?" + urllib.urlencode(params))
data = r.text
data = json.loads(data)
if data[1] != "╠ ⌬「 Subuh」: " and data[2] != "╠ ⌬「 Dzuhur」: " and data[3] != "╠ ⌬「 Ashar」: " and data[4] != "╠ ⌬「 Maghrib」: " and data[5] != "╠ ⌬「 Isya」: ":
ret_ = "╔══『 Jadwal Sholat 』"
ret_ += "\n╠ ⌬「 Lokasi 」: " + data[0]
ret_ += "\n" + data[1]
ret_ += "\n" + data[2]
ret_ += "\n" + data[3]
ret_ += "\n" + data[4]
ret_ += "\n" + data[5]
ret_ += "\n╚══『 Jadwal Sholat 』"
else:
ret_ = "[ Prayer Schedule ] Error : Location not found"
cl.sendText(msg.to, str(ret_))
#----------------------------------------
elif ".lc " in msg.text.lower():
pisah = msg.text.split("c ")
location = msg.text.replace(pisah[0]+"c ","")
params = {'lokasi':location}
with requests.session() as web:
web.headers["user-agent"] = random.choice(wait["userAgent"])
r = requests.get("http://api.corrykalam.net/apiloc.php?"+ urllib.urlencode(params))
data = r.text
data = json.loads(data)
if data[0] != "" and data[1] != "" and data[2] != "":
link = "https://www.google.co.id/maps/@{},{},15z".format(str(data[1]), str(data[2]))
ret_ = "╔══『 Detail Location 』"
ret_ += "\n╠ ⌬「 Lokasi」: " + data[0]
ret_ += "\n╠ ⌬「 Google Maps 」: " + link
ret_ += "\n╚══『 Detail Location 』"
else:
ret_ = "[ Details Location ] Error : Location not found"
cl.sendText(msg.to, str(ret_))
#--------------------------------------
elif '.wp ' in msg.text.lower():
if msg.from_ in admin:
try:
wiki = msg.text.lower().replace(".wp ","")
wikipedia.set_lang("id")
pesan="「 "
pesan+=wikipedia.page(wiki).title
pesan+=" 」\n\n"
pesan+=wikipedia.summary(wiki, sentences=1)
pesan+="\n「 "
pesan+=wikipedia.page(wiki).url
pesan+=" 」"
cl.sendText(msg.to, pesan)
except:
try:
pesan="Jumlah text kebanyakan~\nUntuk info lebih lanjut silahkan klik link dibawah ini.\n"
pesan+="「 "+wikipedia.page(wiki).url+" 」"
cl.sendText(msg.to, pesan)
except Exception as e:
cl.sendText(msg.to, str(e))
#--------------------------------------
#==============================================================================#
#----------------SEARCH SECTION----------------------------------------
elif ".ms " in msg.text:
songname = msg.text.replace(".ms ","")
params = {"songname": songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
abc = song[3].replace('https://','http://')
cl.sendText(msg.to, "Title : " + song[0] + "\nLength : " + song[1] + "\nLink download : " + song[4])
cl.sendText(msg.to, "Lagu " + song[0] + "\nSedang Di Prosses... Tunggu Sebentar ^_^ ")
cl.sendAudioWithURL(msg.to,abc)
cl.sendText(msg.to, "Selamat Mendengarkan Lagu " + song[0])
elif ".lr" in msg.text.lower():
sep = msg.text.split(" ")
search = msg.text.replace(sep[0] + " ","")
params = {'songname': search}
with requests.session() as web:
web.headers["User-Agent"] = random.choice(wait["userAgent"])
r = web.get("https://ide.fdlrcn.com/workspace/yumi-apis/joox?" + urllib.urlencode(params))
try:
data = json.loads(r.text)
for song in data:
songs = song[5]
lyric = songs.replace('ti:','Title - ')
lyric = lyric.replace('ar:','Artist - ')
lyric = lyric.replace('al:','Album - ')
removeString = "[1234567890.:]"
for char in removeString:
lyric = lyric.replace(char,'')
ret_ = "╔══[ Lyric ]"
ret_ += "\n╠ Nama lagu : {}".format(str(song[0]))
ret_ += "\n╠ Durasi : {}".format(str(song[1]))
ret_ += "\n╠ Link : {}".format(str(song[3]))
ret_ += "\n╚══[ Finish ]\n{}".format(str(lyric))
cl.sendText(msg.to, str(ret_))
except:
cl.sendText(to, "Lirik tidak ditemukan")
elif ".yt " in msg.text:
query = msg.text.replace(".yt ","")
with requests.session() as s:
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'http://www.youtube.com/results'
params = {'search_query': query}
r = s.get(url, params=params)
soup = BeautifulSoup(r.content, 'html5lib')
hasil = ""
for a in soup.select('.yt-lockup-title > a[title]'):
if '&list=' not in a['href']:
hasil += ''.join((a['title'],'\nhttp://www.youtube.com' + a['href'],'\n\n'))
cl.sendText(msg.to,hasil)
print '[Command] Youtube Search'
#------------------------------------------------
#-------------------------------------------------
elif "Mid: " in msg.text:
mmid = msg.text.replace("Mid: ","")
msg.contentType = 13
msg.contentMetadata = {"mid":mmid}
cl.sendMessage(msg)
elif msg.text.lower() == 'contact on':
if wait["contact"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᶜᵒⁿᵗᵃᶜᵗ ᴿᵉᵃᵈʸ ᵒⁿ")
else:
cl.sendText(msg.to,"ᶜᵒⁿᵗᵃᶜᵗ ᴿᵉᵃᵈʸ ᵒⁿ")
else:
wait["contact"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬˡˡ ᴿᵉᵃᵈʸ")
else:
cl.sendText(msg.to,"ᴬˡˡ ᴿᵉᵃᵈʸ")
elif msg.text.lower() == 'contact off':
if wait["contact"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴿᵉᵃᵈʸ ᵒᶠᶠ")
else:
cl.sendText(msg.to,"ᴿᵉᵃᵈʸ ᵒᶠᶠ")
else:
wait["contact"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"off ô€œô€„‰already")
else:
cl.sendText(msg.to,"already Close ô€œô€„‰👈")
elif msg.text.lower() == 'protect on':
if wait["protect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴿᵉᵃᵈʸ ᴾʳᵒᵗᵉᶜᵗ ᴬᶜᵗᶦᵛᵉᵈ")
else:
cl.sendText(msg.to,"ᴿᵉᵃᵈʸ ᴾʳᵒᵗᵉᶜᵗ ᴬᶜᵗᶦᵛᵉᵈ")
else:
wait["protect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴿᵉᵃᵈʸ ᴾʳᵒᵗᵉᶜᵗ ᴬᶜᵗᶦᵛᵉᵈ")
else:
cl.sendText(msg.to,"ᴿᵉᵃᵈʸ ᴾʳᵒᵗᵉᶜᵗ ᴬᶜᵗᶦᵛᵉᵈ")
elif msg.text.lower() == 'qrprotect on':
if wait["linkprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴸᶦⁿᵏ ᴾʳᵒᵗᵉᶜᵗ ᴬᶜᵗᶦᵛᵉᵈ")
else:
cl.sendText(msg.to,"ᴸᶦⁿᵏ ᴾʳᵒᵗᵉᶜᵗ ᴬᶜᵗᶦᵛᵉᵈ")
else:
wait["linkprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴸᶦⁿᵏ ᴾʳᵒᵗᵉᶜᵗ ᴬᶜᵗᶦᵛᵉᵈ")
else:
cl.sendText(msg.to,"ᴸᶦⁿᵏ ᴾʳᵒᵗᵉᶜᵗ ᴬᶜᵗᶦᵛᵉᵈ")
elif msg.text.lower() == 'inviteprotect on':
if wait["inviteprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ")
else:
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ")
else:
wait["inviteprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ")
else:
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ")
elif msg.text.lower() == 'cancelprotect on':
if wait["cancelprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᶜᵃⁿᶜᵉˡ ᵃˡᵉʳᵈʸ ᵃᶜᵗᶦᵛᵉ ᵖʳᵒᵗᵉᶜᵗ")
else:
cl.sendText(msg.to,"ᶜᵃⁿᶜᵉˡ ᵃˡᵉʳᵈʸ ᵃᶜᵗᶦᵛᵉ ᵖʳᵒᵗᵉᶜᵗ")
else:
wait["cancelprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᶜᵃⁿᶜᵉˡ ᵃˡᵉʳᵈʸ ᵃᶜᵗᶦᵛᵉ ᵖʳᵒᵗᵉᶜᵗ")
else:
cl.sendText(msg.to,"ᶜᵃⁿᶜᵉˡ ᵃˡᵉʳᵈʸ ᵃᶜᵗᶦᵛᵉ ᵖʳᵒᵗᵉᶜᵗ")
elif msg.text.lower() == 'auto join on':
if wait["autoJoin"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴶᵒᶦⁿ ᴿᵉᵃᵈʸ ᴬᶜᵗᶦᵛᵉ")
else:
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴶᵒᶦⁿ ᴿᵉᵃᵈʸ ᴬᶜᵗᶦᵛᵉ")
else:
wait["autoJoin"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴶᵒᶦⁿ ᴿᵉᵃᵈʸ ᴬᶜᵗᶦᵛᵉ")
else:
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴶᵒᶦⁿ ᴿᵉᵃᵈʸ ᴬᶜᵗᶦᵛᵉ")
elif msg.text in ["Allprotect on","Panick:on"]:
if msg.from_ in admin:
if wait["inviteprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ")
else:
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ")
else:
wait["inviteprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ")
if wait["cancelprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ")
else:
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ")
else:
wait["cancelprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᶜᵃⁿᶜᵉˡ ᵃˡᵉʳᵈʸ ᵃᶜᵗᶦᵛᵉ ᵖʳᵒᵗᵉᶜᵗ")
if wait["protect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᶜᵃⁿᶜᵉˡ ᵃˡᵉʳᵈʸ ᵃᶜᵗᶦᵛᵉ ᵖʳᵒᵗᵉᶜᵗ")
else:
cl.sendText(msg.to,"ᶜᵃⁿᶜᵉˡ ᵃˡᵉʳᵈʸ ᵃᶜᵗᶦᵛᵉ ᵖʳᵒᵗᵉᶜᵗ")
else:
wait["protect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵃˡᵉʳᵈʸ ᵃᶜᵗᶦᵛᵉ ᵖʳᵒᵗᵉᶜᵗ")
else:
cl.sendText(msg.to,"ᵃˡᵉʳᵈʸ ᵃᶜᵗᶦᵛᵉ ᵖʳᵒᵗᵉᶜᵗ")
if wait["linkprotect"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ ᴬᶜᵗᶦᵛᵉ")
else:
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ ᴬᶜᵗᶦᵛᵉ")
else:
wait["linkprotect"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ ᴬᶜᵗᶦᵛᵉ")
else:
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴿᵉᵃᵈʸ ᴬᶜᵗᶦᵛᵉ")
elif msg.text in ["Allprotect off","Panick:off"]:
if msg.from_ in admin:
if wait["inviteprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
wait["inviteprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
if wait["cancelprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
wait["cancelprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᶜᵃⁿᶜᵉˡ ᴰᶦˢᵃᵇˡᵉ")
if wait["protect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
wait["protect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
if wait["linkprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
wait["linkprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
elif msg.text.lower() == 'auto join off':
if wait["autoJoin"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴶᵒᶦⁿ ᵈᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴶᵒᶦⁿ ᵈᶦˢᵃᵇˡᵉ")
else:
wait["autoJoin"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴶᵒᶦⁿ ᵈᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴶᵒᶦⁿ ᵈᶦˢᵃᵇˡᵉ")
elif msg.text in ["Protect off"]:
if wait["protect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
wait["protect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
elif msg.text in ["Qrprotect off","qrprotect off"]:
if wait["linkprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
wait["linkprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᵁʳˡ ᵖʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
elif msg.text in ["Inviteprotect off"]:
if wait["inviteprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
wait["inviteprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴰᵉⁿʸᶦⁿᵛᶦᵗᵉ ᴬˡˡ ᴾʳᵒᵗᵉᶜᵗ ᴰᶦˢᵃᵇˡᵉ")
elif msg.text in ["Cancelprotect off"]:
if wait["cancelprotect"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᶜᵃⁿᶜᵉˡ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᶜᵃⁿᶜᵉˡ ᴰᶦˢᵃᵇˡᵉ")
else:
wait["cancelprotect"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"already close")
else:
cl.sendText(msg.to,"ᴾʳᵒᵗᵉᶜᵗ ᶜᵃⁿᶜᵉˡ ᴰᶦˢᵃᵇˡᵉ")
elif "Group cancel: " in msg.text:
try:
strnum = msg.text.replace("Group cancel: ","")
if strnum == "off":
wait["autoCancel"]["on"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Itu off undangan ditolak👈\nSilakan kirim dengan menentukan jumlah orang ketika Anda menghidupkan👈")
else:
cl.sendText(msg.to,"Off undangan ditolak👈Sebutkan jumlah terbuka ketika Anda ingin mengirim")
else:
num = int(strnum)
wait["autoCancel"]["on"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,strnum + "Kelompok berikut yang diundang akan ditolak secara otomatis👈")
else:
cl.sendText(msg.to,strnum + "The team declined to create the following automatic invitation")
except:
if wait["lang"] == "JP":
kk.sendText(msg.to,"Nilai tidak benar👈")
else:
cl.sendText(msg.to,"Weird value🛡")
elif msg.text in ["Leave on","Auto leave: on"]:
if wait["leaveRoom"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬˡʳᵉᵃᵈʸ ᵃᶜᵗᶦᵛᵉ")
else:
cl.sendText(msg.to,"ᴬˡʳᵉᵃᵈʸ ᵃᶜᵗᶦᵛᵉ")
else:
wait["leaveRoom"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬˡʳᵉᵃᵈʸ ᵃᶜᵗᶦᵛᵉ")
else:
cl.sendText(msg.to,"ᴬˡʳᵉᵃᵈʸ ᵃᶜᵗᶦᵛᵉ")
elif msg.text in ["Leave off","Auto leave: off"]:
if wait["leaveRoom"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴸᵉᵃᵛᵉ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴸᵉᵃᵛᵉ ᴰᶦˢᵃᵇˡᵉ")
else:
wait["leaveRoom"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴸᵉᵃᵛᵉ ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴬᵘᵗᵒ ᴸᵉᵃᵛᵉ ᴰᶦˢᵃᵇˡᵉ")
elif msg.text in ["Share on","share on"]:
if wait["timeline"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵃᶜᵗᶦᵛᵉ")
else:
cl.sendText(msg.to,"ᵃᶜᵗᶦᵛᵉ")
else:
wait["timeline"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵃᶜᵗᶦᵛᵉ")
else:
cl.sendText(msg.to,"ᵃᶜᵗᶦᵛᵉ")
elif msg.text in ["Share off","share off"]:
if wait["timeline"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴰᶦˢᵃᵇˡᵉ")
else:
wait["timeline"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴰᶦˢᵃᵇˡᵉ")
elif msg.text.lower() == 'set':
md = ""
if wait["contact"] == True: md+="Dᴀғᴛᴀʀ Sᴇᴛᴛɪɴɢ\n\n▩ ᴄᴏɴᴛᴀᴄᴛ → ᴏɴ\n"
else: md+="Dᴀғᴛᴀʀ Sᴇᴛᴛɪɴɢ\n\n▩ ᴄᴏɴᴛᴀᴄᴛ → ᴏғғ\n"
if wait["autoJoin"] == True: md+="▩ ᴀᴜᴛᴏ ᴊᴏɪɴ → ᴏɴ\n"
else: md+="▩ ᴀᴜᴛᴏ ᴊᴏɪɴ → ᴏғғ\n"
if wait["autoCancel"]["on"] == True:md+="▩ ᴀᴜᴛᴏ ᴄᴀɴᴄᴇʟ: " + str(wait["autoCancel"]["members"]) + " → ᴏɴ\n"
else: md+="▩ ᴀᴜᴛᴏ ᴄᴀɴᴄᴇʟ → ᴏғғ\n"
if wait["leaveRoom"] == True: md+="▩ ᴀᴜᴛᴏ ʟᴇᴀᴠᴇ → ᴏɴ\n"
else: md+="▩ ᴀᴜᴛᴏ ʟᴇᴀᴠᴇ → ᴏғғ\n"
if wait["timeline"] == True: md+="▩ sʜᴀʀᴇ → ᴏɴ\n"
else:md+="▩ sʜᴀʀᴇ → ᴏғғ\n"
if wait["autoAdd"] == True: md+="▩ ᴀᴜᴛᴏ ᴀᴅᴅ → ᴏɴ\n"
else:md+="▩ ᴀᴜᴛᴏ ᴀᴅᴅ → ᴏғғ\n"
if wait["commentOn"] == True: md+="▩ ᴄᴏᴍᴍᴇɴᴛ → ᴏɴ\n"
else:md+="▩ ᴄᴏᴍᴍᴇɴᴛ → ᴏғғ\n"
if wait["protect"] == True: md+="▩ ᴘʀᴏᴛᴇᴄᴛɪᴏɴ → ᴏɴ\n"
else:md+="▩ ᴘʀᴏᴛᴇᴄᴛɪᴏɴ → ᴏғғ\n"
if wait["linkprotect"] == True: md+="▩ ᴘʀᴏᴛᴇᴄᴛ ᴜʀʟ → ᴏɴ\n"
else:md+="▩ ᴘʀᴏᴛᴇᴄᴛ ᴜʀʟ → ᴏғғ\n"
if wait["inviteprotect"] == True: md+="▩ ᴘʀᴏᴛᴇᴄᴛ ɪɴᴠɪᴛᴇ → ᴏɴ\n"
else:md+="▩ ᴘʀᴏᴛᴇᴄᴛ ɪɴᴠɪᴛᴇ → ᴏғғ\n"
if wait["cancelprotect"] == True: md+="▩ ᴘʀᴏᴛᴇᴄᴛ ᴄᴀɴᴄᴇʟ → ᴏɴ\n"
else:md+="▩ ᴘʀᴏᴛᴇᴄᴛ ᴄᴀɴᴄᴇʟ → ᴏғғ\n"
if wait["kickMention"] == True: md+="▩ ʀᴇsᴘᴏɴ ᴋɪᴄᴋ → ᴏɴ\n"
else:md+="▩ ʀᴇsᴘᴏɴ ᴋɪᴄᴋ → ᴏғғ\n"
if wait["likeOn"] == True: md+="▩ ʟɪᴋᴇ → ᴏɴ\n"
else:md+="▩ ʟɪᴋᴇ → ᴏғғ\n"
if wait["Backup"] == True: md+="▩ ʙᴀᴄᴋᴜᴘ → ᴏɴ\n"
else:md+="▩ ʙᴀᴄᴋᴜᴘ → ᴏғғ\n"
if wait["Sambutan"] == True: md+="▩ ᴡᴇʟᴄᴏᴍᴇ → ᴏɴ\n"
else:md+="▩ ᴡᴇʟᴄᴏᴍᴇ → ᴏғғ\n"
if wait["tag"] == True: md+="▩ ʀᴇsᴘᴏɴ ᴛᴀɢ → ᴏɴ\n"
else:md+="▩ ʀᴇsᴘᴏɴ ᴛᴀɢ → ᴏғғ\n"
if wait["detectMention"] == True: md+="▩ Tᴀɢ → ᴏɴ\n\nPᴏᴡᴇʀᴇᴅ ʙʏ: \n==[ DAENG TEAM BOT ]=="
else:md+="▩ Tᴀɢ → ᴏғғ\n\nPᴏᴡᴇʀᴇᴅ ʙʏ:\n==[ DAENG TEAM BOT ]=="
cl.sendText(msg.to,md)
elif msg.text in ["Like:on"]:
if wait["likeOn"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬᶜᵗᶦᵛᵉᵈ")
else:
wait["likeOn"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬᶜᵗᶦᵛᵉᵈ")
elif msg.text in ["いいね:オフ","Like:off"]:
if wait["likeOn"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᶦˢᵃᵇˡᵉ")
else:
wait["likeOn"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᶦˢᵃᵇˡᵉ")
elif msg.text in ["Add on","Add auto on"]:
if wait["autoAdd"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬᶜᵗᶦᵛᵉᵈ")
else:
cl.sendText(msg.to,"ᴬᶜᵗᶦᵛᵉᵈ")
else:
wait["autoAdd"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴬᶜᵗᶦᵛᵉᵈ")
else:
cl.sendText(msg.to,"ᴬᶜᵗᶦᵛᵉᵈ")
elif msg.text in ["Add off","Add auto off"]:
if wait["autoAdd"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᴰᶦˢᵃᵇˡᵉ")
else:
wait["autoAdd"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴰᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"Untuk mengaktifkan-off👈")
elif "Message set: " in msg.text:
wait["message"] = msg.text.replace("Message set: ","")
cl.sendText(msg.to,"We changed the message👈")
elif "Help set: " in msg.text:
wait["help"] = msg.text.replace("Help set: ","")
cl.sendText(msg.to,"We changed the Help👈")
elif "Pesan add: " in msg.text:
wait["message"] = msg.text.replace("Pesan add: ","")
if wait["lang"] == "JP":
cl.sendText(msg.to,"Kami mengubah pesan🛡")
else:
cl.sendText(msg.to,"Change information")
elif msg.text in ["Pesan add cek","Message Confirmation"]:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Additional information is automatically set to the following \n\n" + wait["message"])
else:
cl.sendText(msg.to,"Pesan tambahan otomatis telah ditetapkan sebagai berikut \n\n" + wait["message"])
elif msg.text in ["Change","change"]:
if wait["lang"] =="JP":
wait["lang"] = "TW"
cl.sendText(msg.to,"I changed the language to engglis👈")
else:
wait["lang"] = "JP"
cl.sendText(msg.to,"I changed the language to indonesia👈")
elif "Message set: " in msg.text:
c = msg.text.replace("Message set: ","")
if c in [""," ","\n",None]:
cl.sendText(msg.to,"Is a string that can not be changed👈")
else:
wait["comment"] = c
cl.sendText(msg.to,"This has been changed👈\n\n" + c)
elif "Comment set: " in msg.text:
c = msg.text.replace("Comment set: ","")
if c in [""," ","\n",None]:
cl.sendText(msg.to,"Merupakan string yang tidak bisa diubah👈")
else:
wait["comment"] = c
cl.sendText(msg.to,"Ini telah diubah👈\n\n" + c)
elif msg.text in ["Com on","Com:on","Comment on"]:
if wait["commentOn"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Aku berada di👈")
else:
cl.sendText(msg.to,"To open👈")
else:
wait["commentOn"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴀᴜᴛᴏ ᴄᴏᴍᴍᴇɴᴛ ᴅɪ ᴀᴋᴛɪғᴋᴀɴ")
else:
cl.sendText(msg.to,"ᴄᴏᴍᴍᴇɴᴛ ᴅɪ ᴀᴋᴛɪғᴋᴀɴ")
elif msg.text in ["Com off"]:
if wait["commentOn"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Hal ini sudah off")
else:
cl.sendText(msg.to,"It is already turned off")
else:
wait["commentOn"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Off👈")
else:
cl.sendText(msg.to,"To turn off")
elif msg.text in ["Com","Comment"]:
cl.sendText(msg.to,"Auto komentar saat ini telah ditetapkan sebagai berikut:👈\n\n" + str(wait["comment"]))
elif msg.text in ["Com Bl"]:
wait["wblack"] = True
cl.sendText(msg.to,"Please send contacts from the person you want to add to the blacklistô€œô€…”👈")
elif msg.text in ["Com hapus Bl"]:
wait["dblack"] = True
cl.sendText(msg.to,"Please send contacts from the person you want to add from the blacklistô€œô€…”👈")
elif msg.text in ["Com Bl cek"]:
if wait["commentBlack"] == {}:
cl.sendText(msg.to,"Nothing in the blacklistô€œ🛡")
else:
cl.sendText(msg.to,"The following is a blacklistô€œ👈")
mc = ""
for mi_d in wait["commentBlack"]:
mc += "・" +cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,mc)
elif msg.text.lower() == 'jam on':
if wait["clock"] == True:
cl.sendText(msg.to,"Sudah On")
else:
wait["clock"] = True
now2 = datetime.now()
nowT = datetime.strftime(now2,"(%H:%M)")
profile = cl.getProfile()
profile.displayName = wait["cName"] + nowT
cl.updateProfile(profile)
cl.sendText(msg.to,"👉Jam on👈")
elif msg.text.lower() == 'jam off':
if wait["clock"] == False:
cl.sendText(msg.to,"Hal ini sudah off🛡")
else:
wait["clock"] = False
cl.sendText(msg.to,"Adalah Off")
elif "Jam say: " in msg.text:
n = msg.text.replace("Jam say: ","")
if len(n.decode("utf-8")) > 30:
cl.sendText(msg.to,"terlalu lama")
else:
wait["cName"] = n
cl.sendText(msg.to,"Ini telah diubah🛡\n\n" + n)
elif msg.text.lower() == 'update':
if wait["clock"] == True:
now2 = datetime.now()
nowT = datetime.strftime(now2,"(%H:%M)")
profile = cl.getProfile()
profile.displayName = wait["cName"] + nowT
cl.updateProfile(profile)
cl.sendText(msg.to,"Diperbarui👈")
else:
cl.sendText(msg.to,"Silahkan Aktifkan Nama")
elif "lurk on" == msg.text.lower():
if msg.to in wait2['readPoint']:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%H:%M:%S')
wait2['ROM'][msg.to] = {}
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
cl.sendText(msg.to," 「Lurking already on」")
else:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%H:%M:%S')
wait2['ROM'][msg.to] = {}
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
cl.sendText(msg.to, "「Set Reading Sider System」\n" + datetime.now().strftime('%H:%M:%S'))
print wait2
elif "lurk off" == msg.text.lower():
if msg.to not in wait2['readPoint']:
cl.sendText(msg.to," 「Lurking Already off」")
else:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
cl.sendText(msg.to, "「Delete reading point」\n" + datetime.now().strftime('%H:%M:%S'))
elif "lurkers" == msg.text.lower():
if msg.to in wait2['readPoint']:
if wait2["ROM"][msg.to].items() == []:
cl.sendText(msg.to, "Reader:\nNone")
else:
chiya = []
for rom in wait2["ROM"][msg.to].items():
chiya.append(rom[1])
cmem = cl.getContacts(chiya)
zx = ""
zxc = ""
zx2 = []
xpesan = ''
for x in range(len(cmem)):
xname = str(cmem[x].displayName)
pesan = ''
pesan2 = pesan+"@a\n"
xlen = str(len(zxc)+len(xpesan))
xlen2 = str(len(zxc)+len(pesan2)+len(xpesan)-1)
zx = {'S':xlen, 'E':xlen2, 'M':cmem[x].mid}
zx2.append(zx)
zxc += pesan2
msg.contentType = 0
print zxc
msg.text = xpesan+ zxc + "\nBefore: %s\nAfter: %s"%(wait2['setTime'][msg.to],datetime.now().strftime('%H:%M:%S'))
lol ={'MENTION':str('{"MENTIONEES":'+json.dumps(zx2).replace(' ','')+'}')}
print lol
msg.contentMetadata = lol
try:
cl.sendMessage(msg)
except Exception as error:
print error
pass
#-----------------------[Add Staff Section]------------------------
elif "About1" in msg.text: #ABOUT BOT#
if msg.from_ in admin:
x = "「ᴀʙᴏᴜᴛ ʙᴏᴛ」\nsᴇʟғʙᴏᴛ “ʙᴏᴛ” ᴇᴅɪᴛɪᴏɴ♪\n"
x+="ᴛɪᴍᴇ: " + datetime.today().strftime('%H:%M:%S') + " \n\n"
x+="「ʙᴏᴛ Iɴғᴏʀᴍᴀᴛɪᴏɴ」\n"
x+="╠═════════════════════\n"
x+="╠ ʙᴏᴛ ᴄʀᴇᴀᴛᴇᴅ ɪɴ: 06-01-2018\n"
x+="╠️ ʙᴏᴛ ᴄʀᴇᴀᴛᴏʀ: 「ᵃˡᵛᶦᵃⁿᴘᴜᴛʀᴀ」\n"
x+="╠️ ʙᴏᴛ Oᴡɴᴇʀ: "+cl.getProfile().displayName+"\n"
x+="╠️ ʙᴏᴛ ᴛʏᴘᴇ: Sᴇʟғʙᴏᴛ\n"
x+="╠️ ᴍʏᴛᴇᴀᴍ: sᴋɪʟᴇʀʀ ʜᴏʟʟᴏᴡ\n"
x+="╠️ Vᴇʀsɪᴏɴ: 1.01\n"
x+="╠═════════════════════\n"
x+= "「Cᴏɴᴛᴀᴄᴛ Pᴇʀsᴏɴᴀʟ」\n"
x+="「ID LINE: line://ti/p/~alvian_putra777 」\n\n"
x+="「ᵃˡᵛᶦᵃⁿᴘᴜᴛʀᴀ」"
cl.sendText(msg.to,x)
msg.contentType = 13
msg.contentMetadata = {'mid': admin}
cl.sendMessage(msg)
#-----------------------------------------------------------
elif '/ti/g/' in msg.text.lower():
link_re = re.compile('(?:line\:\/|line\.me\/R)\/ti\/g\/([a-zA-Z0-9_-]+)?')
links = link_re.findall(msg.text)
n_links=[]
for l in links:
if l not in n_links:
n_links.append(l)
for ticket_id in n_links:
if wait["atjointicket"] == True:
group=cl.findGroupByTicket(ticket_id)
cl.acceptGroupInvitationByTicket(group.id,ticket_id)
cl.sendText(msg.to,"Sukses join ke grup %s" % str(group.name))
#---------------------------------------------------
elif ("Maav!" in msg.text):
if msg.from_ in admin:
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
cl.kickoutFromGroup(msg.to,[target])
except:
cl.sendText(msg.to,"Error")
#-----------------------------------------------------------
elif "Gift @" in msg.text:
_name = msg.text.replace("Gift @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
msg.contentType = 9
msg.contentMetadata={'PRDID': '89131c1a-e549-4bd5-9e60-e24de0d2e252',
'PRDTYPE': 'THEME',
'MSGTPL': '10'}
msg.text = None
cl.sendMessage(msg,g)
cl.sendMessage(msg,g)
#----------------------------------------------------------------
elif "Mode on" in msg.text:
try:
del cctv['point'][msg.to]
del cctv['sidermem'][msg.to]
del cctv['cyduk'][msg.to]
except:
pass
cctv['point'][msg.to] = msg.id
cctv['sidermem'][msg.to] = ""
cctv['cyduk'][msg.to]=True
wait["Sider"] = True
cl.sendText(msg.to,"「ᴍᴏᴅᴇ ᴏɴ sɪᴅᴇʀ ᴅɪ ᴀᴋᴛɪғᴋᴀɴ」")
elif "Mode off" in msg.text:
if msg.to in cctv['point']:
cctv['cyduk'][msg.to]=False
wait["Sider"] = False
cl.sendText(msg.to, "「ᴍᴏᴅᴇ ᴏɴ sɪᴅᴇʀ ᴅɪ ɴᴏɴᴀᴋᴛɪғᴋᴀɴ」")
else:
cl.sendText(msg.to, "「ᴍᴏᴅᴇ ᴏɴ ᴅᴜʟᴜ ᴅᴏᴅᴏʟ」")
#-----------------------------------------------------------
elif msg.text in ["Friendlist"]:
contactlist = cl.getAllContactIds()
kontak = cl.getContacts(contactlist)
num=1
msgs="═════════List Friend═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Friend═════════\n\nTotal Friend : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text in ["Memlist"]:
kontak = cl.getGroup(msg.to)
group = kontak.members
num=1
msgs="═════════List Member═════════-"
for ids in group:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Member═════════\n\nTotal Members : %i" % len(group)
cl.sendText(msg.to, msgs)
elif "Friendinfo: " in msg.text:
saya = msg.text.replace('Friendinfo: ','')
gid = cl.getAllContactIds()
for i in gid:
h = cl.getContact(i).displayName
contact = cl.getContact(i)
cu = cl.channel.getCover(i)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
if h == saya:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithURL(msg.to,path)
elif "Friendpict: " in msg.text:
saya = msg.text.replace('Friendpict: ','')
gid = cl.getAllContactIds()
for i in gid:
h = cl.getContact(i).displayName
gna = cl.getContact(i)
if h == saya:
cl.sendImageWithURL(msg.to,"http://dl.profile.line.naver.jp/"+ gna.pictureStatus)
#-----------------------------------------------------------
elif msg.text in ["Kalender","Time","Waktu"]:
timeNow = datetime.now()
timeHours = datetime.strftime(timeNow,"(%H:%M)")
day = ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday","Friday", "Saturday"]
hari = ["Minggu", "Senin", "Selasa", "Rabu", "Kamis", "Jumat", "Sabtu"]
bulan = ["Januari", "Februari", "Maret", "April", "Mei", "Juni", "Juli", "Agustus", "September", "Oktober", "November", "Desember"]
inihari = datetime.today()
hr = inihari.strftime('%A')
bln = inihari.strftime('%m')
for i in range(len(day)):
if hr == day[i]: hasil = hari[i]
for k in range(0, len(bulan)):
if bln == str(k): bln = bulan[k-1]
rst = hasil + ", " + inihari.strftime('%d') + " - " + bln + " - " + inihari.strftime('%Y') + "\nJam : [ " + inihari.strftime('%H:%M:%S') + " ]"
cl.sendText(msg.to, rst)
#-----------------------------------------------------------
elif "tagmem" == msg.text:
group = cl.getGroup(msg.to)
mem = [contact.mid for contact in group.members]
for mm in mem:
xname = cl.getContact(mm).displayName
xlen = str(len(xname)+1)
msg.contentType = 0
msg.text = "@"+xname+" "
msg.contentMetadata = {'MENTION':'{"MENTIONEES":[{"S":"0","E":'+json.dumps(xlen)+',"M":'+json.dumps(mm)+'}]}','EMTVER':'4'}
try:
cl.sendMessage(msg)
except Exception as e:
print str(e)
elif msg.text in ["Tagall","Tag all"]:
group = cl.getGroup(msg.to)
nama = [contact.mid for contact in group.members]
cb = ""
cb2 = ""
strt = int(0)
akh = int(0)
for md in nama:
akh = akh + int(6)
cb += """{"S":"""+json.dumps(str(strt))+""","E":"""+json.dumps(str(akh))+""","M":"""+json.dumps(md)+"},"""
strt = strt + int(7)
akh = akh + 1
cb2 += "@nrik \n"
cb = (cb[:int(len(cb)-1)])
msg.contentType = 0
msg.text = cb2
msg.contentMetadata ={'MENTION':'{"MENTIONEES":['+cb+']}','EMTVER':'4'}
try:
cl.sendMessage(msg)
#cl.sendText(receiver, "Members :"+str(jml))
except Exception as error:
print error
#-----------------------------------------------------------)
elif msg.text in ["Papay"]:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
cl.sendText(msg.to,"Bye Bye😘 " + str(ginfo.name) + "")
cl.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------------------
elif msg.text in ["About"]:
today = datetime.today()
future = datetime(2020,01,31)
days = (str(future - today))
comma = days.find(",")
days = days[:comma]
cl.sendText(msg.to,"「ᴀʙᴏᴜᴛ 」\n「ᴛᴇᴀᴍ sᴋɪʟᴇʀʀ ʜᴏʟʟᴏᴡ」 \n 「ᴘᴏᴡᴇʀᴇᴅ ʙʏ : ᴀʟᴠɪᴀɴ ᴘᴜᴛʀᴀ」 \n 「sᴇʟғʙᴏᴛ ᴇᴅɪᴛɪᴏɴ」 \n\n「Subscription」\nTeam bot skiller hollow \nDont kick me from groups \nᴍᴀsᴀ ᴀᴋᴛɪғ sᴇʟғʙᴏᴛ \n Expired: " + "\n In days: " + days + "\n\n「Contact」\n・ LINE me \n http://line.me/ti/p/~alvian_putra777")
msg.contentType = 13
msg.contentMetadata = {'mid':mid}
cl.sendMessage(msg)
#-----------------------------------------------------------
#----------------------ADMIN COMMAND------------------------------#
elif ("Fuck " in msg.text):
if msg.from_ in admin:
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
cl.kickoutFromGroup(msg.to,[target])
#cl.sendText(msg.to,"Fᴜᴄᴋ Fᴏʀ Yᴏᴜ Iᴅɪᴏᴛs"
except:
cl.sendText(msg.to,"Error")
elif "Halo" in msg.text:
group = cl.getGroup(msg.to)
k = len(group.members)//100
for j in xrange(k+1):
msg = Message(to=msg.to)
txt = u''
s=0
d=[]
for i in group.members[j*100 : (j+1)*100]:
d.append({"S":str(s), "E" :str(s+8), "M":i.mid})
s += 9
txt += u'@Krampus\n'
msg.text = txt
msg.contentMetadata = {u'MENTION':json.dumps({"MENTIONEES":d})}
cl.sendMessage(msg)
elif "Kickall" in msg.text:
if msg.from_ in admin:
nk0 = msg.text.replace("Kickall","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("all","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _name in g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"user does not exist")
pass
else:
for target in targets:
if not target in Bots:
if not target in admin:
try:
klist=[cl]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
cl.sendText(msg.to,"Sukses Bosqu")
cl.sendText(msg.to,"masih mauko sundala")
elif msg.text in ["List grup"]:
if msg.from_ in admin:
gid = cl.getGroupIdsJoined()
h = "===[List Groups]==="
total = str(len(gid))
for i in gid:
if i is not None:
try:
groups = cl.getGroup(i)
if groups.members is not None:
members = str(len(groups.members))
else:
members = "0"
if groups.invitee is not None:
pendings = str(len(groups.invitee))
else:
pendings = "0"
h += "\n[" + groups.name + "] ->(" + members +")\n -+GroupID : " + i
except:
break
else:
break
if gid is not None:
cl.sendText(msg.to,h + "\n|[Total Groups]| : " + str(total))
else:
cl.sendText(msg.to,"Tidak ada grup saat ini")
ginv = cl.getGroupIdsInvited()
j = "===[List Groups Invited]==="
totals = str(len(ginv))
for z in ginv:
if z is not None:
try:
groups = cl.getGroup(z)
if groups.members is not None:
members = str(len(groups.members))
else:
members = "0"
if groups.invitee is not None:
pendings = str(len(groups.invitee))
else:
pendings = "0"
j += "\n[" + groups.name + "] ->(" + members + ")\n -+GroupID : " + i
except:
break
else:
break
if ginv is not None:
cl.sendText(msg.to,j + "\n|[Total Groups Invited]| : " + str(totals))
else:
cl.sendText(msg.to,"Tidak ada grup tertunda saat ini")
elif msg.text in ["Info grup"]:
if msg.from_ in admin:
gid = cl.getGroupIdsJoined()
cl.sendText(msg.to,"===[List Details Group]===")
total = str(len(gid))
for i in gid:
if i is not None:
try:
groups = ki.getGroup(i)
if groups.members is not None:
members = str(len(groups.members))
else:
members = "0"
if groups.invitee is not None:
pendings = str(len(groups.invitee))
else:
pendings = "0"
h = "[" + groups.name + "]\n -+GroupID : " + i + "\n -+Members : " + members + "\n -+MembersPending : " + pendings + "\n -+Creator : " + groups.creator.displayName
except:
break
else:
break
if gid is not None:
cl.sendText(msg.to,h)
cl.sendText(msg.to,"|[Total Groups]| : " + str(total))
else:
cl.sendText(msg.to,"Tidak ada grup saat ini")
ginv = cl.getGroupIdsInvited()
cl.sendText(msg.to,"===[List Details Groups Invited]===")
totals = str(len(ginv))
for z in ginv:
if z is not None:
try:
groups = cl.getGroup(z)
if groups.members is not None:
members = str(len(groups.members))
else:
members = "0"
if groups.invitee is not None:
pendings = str(len(groups.invitee))
else:
pendings = "0"
j = "[" + groups.name + "]\n -+GroupID : " + i + "\n -+Members : " + members + "\n -+MembersPending : " + pendings + "\n -+Creator : " + groups.creator.displayName
except:
break
else:
break
if ginv is not None:
cl.sendText(msg.to,j)
cl.sendText(msg.to,"|[Total Groups Invited]| : " + str(totals))
else:
cl.sendText(msg.to,"Tidak ada grup tertunda saat ini")
elif "Details grup: " in msg.text:
if msg.from_ in admin:
gid = msg.text.replace("/DetailsGroup: ","")
if gid in [""," "]:
cl.sendText(msg.to,"Grup id tidak valid")
else:
try:
groups = cl.getGroup(gid)
if groups.members is not None:
members = str(len(groups.members))
else:
members = "0"
if groups.invitee is not None:
pendings = str(len(groups.invitee))
else:
pendings = "0"
h = "[" + groups.name + "]\n -+GroupID : " + gid + "\n -+Members : " + members + "\n -+MembersPending : " + pendings + "\n -+Creator : " + groups.creator.displayName + "\n -+GroupPicture : http://dl.profile.line.naver.jp/" + groups.pictureStatus
cl.sendText(msg.to,h)
except Exception as error:
cl.sendText(msg.to,(error))
elif "Cancel invite: " in msg.text:
if msg.from_ in admin:
gids = msg.text.replace("Cancel invite: ","")
gid = cl.getGroup(gids)
for i in gid:
if i is not None:
try:
cl.rejectGroupInvitation(i)
except:
cl.sendText(msg.to,"Error!")
break
else:
break
if gid is not None:
cl.sendText(msg.to,"Berhasil tolak undangan dari grup " + gid.name)
else:
cl.sendText(msg.to,"Grup tidak ditemukan")
elif msg.text in ["Accept invite"]:
if msg.from_ in admin:
gid = cl.getGroupIdsInvited()
_list = ""
for i in gid:
if i is not None:
gids = cl.getGroup(i)
_list += gids.name
cl.acceptGroupInvitation(i)
else:
break
if gid is not None:
cl.sendText(msg.to,"Berhasil terima semua undangan dari grup :\n" + _list)
else:
cl.sendText(msg.to,"Tidak ada grup yang tertunda saat ini")
elif "Myname: " in msg.text:
string = msg.text.replace("Myname: ","")
if len(string.decode('utf-8')) <= 20:
profile = cl.getProfile()
profile.displayName = string
cl.updateProfile(profile)
cl.sendText(msg.to,"Update Bio" + string)
elif "Mybio: " in msg.text:
string = msg.text.replace("Mybio: ","")
if len(string.decode('utf-8')) <= 500:
profile = cl.getProfile()
profile.statusMessage = string
cl.updateProfile(profile)
cl.sendText(msg.to,"Update Bio" + string)
elif ("Gname: " in msg.text):
if msg.toType == 2:
group = cl.getGroup(msg.to)
group.name = msg.text.replace("Gname: ","")
cl.updateGroup(group)
else:
cl.sendText(msg.to,"Tidak Dapat Mengubah Nama Grup")
elif "Kick: " in msg.text:
if msg.from_ in admin:
midd = msg.text.replace("Kick: ","")
cl.kickoutFromGroup(msg.to,[midd])
elif "Invite: " in msg.text:
if msg.from_ in admin:
midd = msg.text.replace("Invite: ","")
cl.findAndAddContactsByMid(midd)
cl.inviteIntoGroup(msg.to,[midd])
elif "Mysteal @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Mysteal @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendImageWithURL(msg.to, path)
except:
pass
print "[Command]dp executed"
elif "Mycopy @" in msg.text:
if msg.toType == 2:
if msg.from_ in admin:
print "[COPY] Ok"
_name = msg.text.replace("Mycopy @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
cl.cloneContactProfile(target)
cl.sendText(msg.to, "Sukses Copy Profile")
except Exception as e:
print e
elif "Copy @" in msg.text:
if msg.toType == 2:
if msg.from_ in admin:
print "[COPY] Ok"
_name = msg.text.replace("Copy @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to, "Tidak Ada Target Copy")
else:
for target in targets:
try:
ki.cloneContactProfile(target)
ki2.cloneContactProfile(target)
ki3.cloneContactProfile(target)
ki4.cloneContactProfile(target)
ki5.cloneContactProfile(target)
ki6.cloneContactProfile(target)
ki7.cloneContactProfile(target)
ki8.cloneContactProfile(target)
#ki9.cloneContactProfile(target)
#k1.cloneContactProfile(target)
#k2.cloneContactProfile(target)
#k3.cloneContactProfile(target)
#k4.cloneContactProfile(target)
#k5.cloneContactProfile(target)
#k6.cloneContactProfile(target)
#k7.cloneContactProfile(target)
#k8.cloneContactProfile(target)
#k9.cloneContactProfile(target)
#w1.cloneContactProfile(target)
#w2.cloneContactProfile(target)
#w3.cloneContactProfile(target)
#w4.cloneContactProfile(target)
#w5.cloneContactProfile(target)
#w6.cloneContactProfile(target)
#w7.cloneContactProfile(target)
#w8.cloneContactProfile(target)
#w9.cloneContactProfile(target)
#l1.cloneContactProfile(target)
#l2.cloneContactProfile(target)
#l3.cloneContactProfile(target)
#l4.cloneContactProfile(target)
#k5.cloneContactProfile(target)
cl.sendText(msg.to, "Sukses Copy Profile")
except Exception as e:
print e
elif msg.text in ["Mybackup"]:
try:
cl.updateDisplayPicture(mybackup.pictureStatus)
cl.updateProfile(mybackup)
cl.sendText(msg.to, "Backup Sukses Bosqu")
except Exception as e:
cl.sendText(msg.to, str (e))
elif msg.text in ["Backup"]:
try:
ki.updateDisplayPicture(backup.pictureStatus)
ki.updateProfile(backup)
ki2.updateDisplayPicture(backup.pictureStatus)
ki2.updateProfile(backup)
ki3.updateDisplayPicture(backup.pictureStatus)
ki3.updateProfile(backup)
ki4.updateDisplayPicture(backup.pictureStatus)
ki4.updateProfile(backup)
ki5.updateDisplayPicture(backup.pictureStatus)
ki5.updateProfile(backup)
ki6.updateDisplayPicture(backup.pictureStatus)
ki6.updateProfile(backup)
#ki7.updateDisplayPicture(backup.pictureStatus)
#ki7.updateProfile(backup)
#ki8.updateDisplayPicture(backup.pictureStatus)
#ki8.updateProfile(backup)
#ki9.updateDisplayPicture(backup.pictureStatus)
#ki9.updateProfile(backup)
#k1.updateDisplayPicture(backup.pictureStatus)
#k1.updateProfile(backup)
#k2.updateDisplayPicture(backup.pictureStatus)
#k2.updateProfile(backup)
#k3.updateDisplayPicture(backup.pictureStatus)
#k3.updateProfile(backup)
#k4.updateDisplayPicture(backup.pictureStatus)
#k4.updateProfile(backup)
#k5.updateDisplayPicture(backup.pictureStatus)
#k5.updateProfile(backup)
#k6.updateDisplayPicture(backup.pictureStatus)
#k6.updateProfile(backup)
#k7.updateDisplayPicture(backup.pictureStatus)
#k7.updateProfile(backup)
#k8.updateDisplayPicture(backup.pictureStatus)
#k8.updateProfile(backup)
#k9.updateDisplayPicture(backup.pictureStatus)
#k9.updateProfile(backup)
#w1.updateDisplayPicture(backup.pictureStatus)
#w1.updateProfile(backup)
#w2.updateDisplayPicture(backup.pictureStatus)
#w2.updateProfile(backup)
#w3.updateDisplayPicture(backup.pictureStatus)
#w3.updateProfile(backup)
#w4.updateDisplayPicture(backup.pictureStatus)
#w4.updateProfile(backup)
#w5.updateDisplayPicture(backup.pictureStatus)
#w5.updateProfile(backup)
#w6.updateDisplayPicture(backup.pictureStatus)
#w6.updateProfile(backup)
#w7.updateDisplayPicture(backup.pictureStatus)
#w7.updateProfile(backup)
#w8.updateDisplayPicture(backup.pictureStatus)
#w8.updateProfile(backup)
#w9.updateDisplayPicture(backup.pictureStatus)
#w9.updateProfile(backup)
#l1.updateDisplayPicture(backup.pictureStatus)
#wl1.updateProfile(backup)
#l2.updateDisplayPicture(backup.pictureStatus)
#l2.updateProfile(backup)
#l3.updateDisplayPicture(backup.pictureStatus)
#l3.updateProfile(backup)
#l4.updateDisplayPicture(backup.pictureStatus)
#l4.updateProfile(backup)
#l5.updateDisplayPicture(backup.pictureStatus)
#l5.updateProfile(backup)
#cl.sendText(msg.to, "Backup Sukses Bosqu")
except Exception as e:
cl.sendText(msg.to, str (e))
elif "Bc:ct " in msg.text:
bctxt = msg.text.replace("Bc:ct ", "")
a = cl.getAllContactIds()
for manusia in a:
cl.sendText(manusia, (bctxt))
elif "Bot:ct " in msg.text:
if msg.from_ in admin:
bctxt = msg.text.replace("Bot:ct ", "")
b = ki.getAllContactIds()
for manusia in b:
ki.sendText(manusia, (bctxt))
c = ki2.getAllContactIds()
for manusia in c:
ki2.sendText(manusia, (bctxt))
d = ki3.getAllContactIds()
for manusia in d:
ki3.sendText(manusia, (bctxt))
e = ki4.getAllContactIds()
for manusia in e:
ki4.sendText(manusia, (bctxt))
f = ki5.getAllContactIds()
for manusia in f:
ki5.sendText(manusia, (bctxt))
g = ki6.getAllContactIds()
for manusia in g:
ki6.sendText(manusia, (bctxt))
elif "Bc:grup " in msg.text:
bctxt = msg.text.replace("Bc:grup ", "")
a = cl.getGroupIdsJoined()
for manusia in a:
cl.sendText(manusia, (bctxt))
elif "Bot:grup " in msg.text:
if msg.from_ in admin:
bctxt = msg.text.replace("Bot:grup ", "")
b = ki.getGroupIdsJoined()
for manusia in b:
ki.sendText(manusia, (bctxt))
c = ki2.getGroupIdsJoined()
for manusia in c:
ki2.sendText(manusia, (bctxt))
d = ki3.getGroupIdsJoined()
for manusia in d:
ki3.sendText(manusia, (bctxt))
e = ki4.getGroupIdsJoined()
for manusia in e:
ki4.sendText(manusia, (bctxt))
f = ki5.getGroupIdsJoined()
for manusia in f:
ki5.sendText(manusia, (bctxt))
g = ki6.getGroupIdsJoined()
for manusia in g:
ki6.sendText(manusia, (bctxt))
elif "Spam " in msg.text:
txt = msg.text.split(" ")
jmlh = int(txt[2])
teks = msg.text.replace("Spam "+str(txt[1])+" "+str(jmlh)+" ","")
tulisan = jmlh * (teks+"\n")
if txt[1] == "on":
if jmlh <= 100000:
for x in range(jmlh):
cl.sendText(msg.to, teks)
else:
cl.sendText(msg.to, "Out of Range!")
elif txt[1] == "off":
if jmlh <= 100000:
cl.sendText(msg.to, tulisan)
else:
cl.sendText(msg.to, "Out Of Range!")
elif "Speed" in msg.text:
start = time.time()
cl.sendText(msg.to, "WAIT▒▒▒▓▓▓LOAD...99% \nsᴛᴀʀᴛɪɴɢ sᴘᴇᴇᴅ sᴇʟғʙᴏᴛ \nɪɴᴄʟᴏᴜᴅɪɴɢ.....")
elapsed_time = time.time() - start
cl.sendText(msg.to, "「%sDetik」" % (elapsed_time))
elif msg.text.lower() == ".me":
msg.contentType = 13
msg.contentMetadata = {'mid': mid}
cl.sendMessage(msg)
profile = cl.getProfile()
xname = profile.displayName
xlen = str(len(xname)+1)
msg.contentType = 0
msg.text = "@"+xname+" :)"
msg.contentMetadata ={'MENTION':'{"MENTIONEES":[{"S":"0","E":'+json.dumps(xlen)+',"M":'+json.dumps(mid)+'}]}','EMTVER':'4'}
cl.sendMessage(msg)
elif msg.text.lower() == "Tag me":
profile = cl.getProfile()
xname = profile.displayName
xlen = str(len(xname)+1)
msg.contentType = 0
msg.text = "@"+xname+" 👍"
msg.contentMetadata ={'MENTION':'{"MENTIONEES":[{"S":"0","E":'+json.dumps(xlen)+',"M":'+json.dumps(mid)+'}]}','EMTVER':'4'}
cl.sendMessage(msg)
#----------------------------------------
elif cms(msg.text,["creator","Creator"]):
msg.contentType = 13
msg.contentMetadata = {'mid': admsa}
cl.sendText(msg.to,"Mʏ ᴄʀᴇᴀᴛᴏʀ ɪɴ ʙᴏᴛs")
cl.sendMessage(msg)
cl.sendText(msg.to,"Nɪᴄᴇ ᴄᴏᴏʟʟ ᴀɴᴅ ʜᴀɴᴅsᴏᴍᴇ")
elif "Inviteme: " in msg.text:
if msg.from_ in admin:
gid = msg.text.replace("Inviteme: ","")
if gid == "":
cl.sendText(msg.to,"Invalid group id")
else:
try:
cl.findAndAddContactsByMid(msg.from_)
cl.inviteIntoGroup(gid,[msg.from_])
except:
cl.sendText(msg.to,"Mungkin saya tidak di dalaam grup itu")
elif msg.text in ["Clear grup"]:
if msg.from_ in admin:
gid = cl.getGroupIdsJoined()
gid = ki.getGroupIdsJoined()
gid = ki2.getGroupIdsJoined()
gid = ki3.getGroupIdsJoined()
gid = ki4.getGroupIdsJoined()
gid = ki5.getGroupIdsJoined()
gid = ki6.getGroupIdsJoined()
#gid = ki7.getGroupIdsJoined()
for i in gid:
ki.leaveGroup(i)
ki2.leaveGroup(i)
ki3.leaveGroup(i)
ki4.leaveGroup(i)
ki5.leaveGroup(i)
ki6.leaveGroup(i)
#ki7.leaveGroup(i)
if wait["lang"] == "JP":
cl.sendText(msg.to,"[Complite Leave All Groups]")
else:
cl.sendText(msg.to,"He declined all invitations")
elif "Ginfo" in msg.text:
group = cl.getGroup(msg.to)
try:
gCreator = group.creator.displayName
except:
gCreator = "Tidak ditemukan"
if group.invitee is None:
gPending = "0"
else:
gPending = str(len(group.invitee))
if group.preventJoinByTicket == True:
gQr = "Tertutup"
gTicket = "Tidak ada"
else:
gQr = "Terbuka"
gTicket = "https://line.me/R/ti/g/{}".format(str(cl.reissueGroupTicket(group.id)))
ret_ = "╔════════Grup Info═════════"
ret_ += "\n╠Nama Grup : {}".format(group.name)
ret_ += "\n╠ID Grup : {}".format(group.id)
ret_ += "\n╠Pembuat Grup : {}".format(gCreator)
ret_ += "\n╠Jumlah Member : {}".format(str(len(group.members)))
ret_ += "\n╠Jumlah Pending : {}".format(gPending)
ret_ += "\n╠Grup QR : {}".format(gQr)
ret_ += "\n╠Grup URL : {}".format(gTicket)
ret_ += "\n╚════════Grup Info═════════"
cl.sendText(msg.to, str(ret_))
cl.sendImageWithURL(msg.to,"http://dl.profile.line.naver.jp/"+ group.pictureStatus)
elif msg.text == "Uni":
cl.sendText(msg.to,"Hai Perkenalkan.....\nNama saya siapa ya?\n\n1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1.1\n\nMakasih Sudah Dilihat :)\nJangan Dikick ampun mzz :v")
elif ".music" in msg.text.lower():
songname = msg.text.lower().replace(".music","")
params = {"songname":" songname"}
r = requests.get('https://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
cl.sendMessage(msg.to, song[4])
elif ".Youtube " in msg.text:
query = msg.text.replace(".Youtube ","")
with requests.session() as s:
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'http://www.youtube.com/results'
params = {'search_query': query}
r = s.get(url, params=params)
soup = BeautifulSoup(r.content, 'html5lib')
for a in soup.select('.yt-lockup-title > a[title]'):
if '&List' not in a['href']:
cl.sendText(msg.to,'http://www.youtube.com' + a['href'] + a['title'])
elif "Block @" in msg.text:
if msg.toType == 2:
print "[block] OK"
_name = msg.text.replace("Block @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
cl.blockContact(target)
cl.sendText(msg.to, "Success block contact~")
except Exception as e:
print e
elif msg.text.lower() == 'blocklist':
blockedlist = cl.getBlockedContactIds()
cl.sendText(msg.to, "Please wait...")
kontak = cl.getContacts(blockedlist)
num=1
msgs="User Blocked List\n"
for ids in kontak:
msgs+="\n%i. %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n\nTotal %i blocked user(s)" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text in ["Glist"]:
gid = cl.getGroupIdsJoined()
h = ""
for i in gid:
h += "[★] %s\n" % (cl.getGroup(i).name +"→["+str(len(cl.getGroup(i).members))+"]")
cl.sendText(msg.to,"▒▒▓█[List Group]█▓▒▒\n"+ h +"Total Group =" +"["+str(len(gid))+"]")
elif msg.text in ["Invite"]:
if msg.from_ in admin:
wait["ricoinvite"] = True
cl.sendText(msg.to,"send contact 😉")
elif ("Cek " in msg.text):
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
mi = cl.getContact(key1)
cl.sendText(msg.to,"Mid:" + key1)
elif "Mid @" in msg.text:
if msg.from_ in admin:
_name = msg.text.replace("Mid @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
cl.sendText(msg.to, g.mid)
else:
pass
elif "Mymid" == msg.text:
cl.sendText(msg.to,mid)
elif msg.text in ["Link on"]:
if msg.from_ in admin:
if msg.toType == 2:
group = cl.getGroup(msg.to)
group.preventJoinByTicket = False
cl.updateGroup(group)
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵁʳˡ ᴳʳᵒᵘᵖˢ ᴬᶜᵗᶦᵛᵉ")
else:
cl.sendText(msg.to,"ᵁʳˡ ᴳʳᵒᵘᵖˢ ᴬᶜᵗᶦᵛᵉ")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"It can not be used outside the group ô€œô€„‰👈")
else:
cl.sendText(msg.to,"Can not be used for groups other than ô€œô€„‰")
elif msg.text in ["Link off"]:
if msg.toType == 2:
group = cl.getGroup(msg.to)
group.preventJoinByTicket = True
cl.updateGroup(group)
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᵁʳˡ ᴳʳᵒᵘᵖˢ ᵈᶦˢᵃᵇˡᵉ")
else:
cl.sendText(msg.to,"ᵁʳˡ ᴳʳᵒᵘᵖˢ ᵈᶦˢᵃᵇˡᵉ")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"It can not be used outside the group 👈")
else:
cl.sendText(msg.to,"Can not be used for groups other than ô€œ")
elif msg.text in ["url","Url"]:
if msg.toType == 2:
g = cl.getGroup(msg.to)
if g.preventJoinByTicket == True:
g.preventJoinByTicket = False
cl.updateGroup(g)
gurl = cl.reissueGroupTicket(msg.to)
cl.sendText(msg.to,"ᴜʀʟ: line://ti/g/" + gurl)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Hal ini tidak dapat digunakan di luar kelompok")
else:
cl.sendText(msg.to,"Tidak dapat digunakan untuk kelompok selain")
elif msg.text in ["Gurl"]:
if msg.toType == 2:
x = cl.getGroup(msg.to)
if x.preventJoinByTicket == True:
x.preventJoinByTicket = False
cl.updateGroup(x)
gurl = cl.reissueGroupTicket(msg.to)
cl.sendText(msg.to,"ᴜʀʟ: line://ti/g/" + gurl)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can't be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
elif msg.text in ["S1glist"]:
gs = ki.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (ki.getGroup(i).name + " | [ " + str(len (ki.getGroup(i).members)) + " ]")
ki.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S2glist"]:
gs = ki2.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (ki2.getGroup(i).name + " | [ " + str(len (ki2.getGroup(i).members)) + " ]")
ki2.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S3glist"]:
gs = ki3.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (ki3.getGroup(i).name + " | [ " + str(len (ki3.getGroup(i).members)) + " ]")
ki3.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S4glist"]:
gs = ki4.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (ki4.getGroup(i).name + " | [ " + str(len (ki4.getGroup(i).members)) + " ]")
ki4.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S5glist"]:
gs = ki5.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (ki5.getGroup(i).name + " | [ " + str(len (ki5.getGroup(i).members)) + " ]")
ki5.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S6glist"]:
gs = ki6.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (ki6.getGroup(i).name + " | [ " + str(len (ki6.getGroup(i).members)) + " ]")
ki6.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S7glist"]:
gs = ki7.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (ki7.getGroup(i).name + " | [ " + str(len (ki7.getGroup(i).members)) + " ]")
ki7.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S8glist"]:
gs = ki8.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (ki8.getGroup(i).name + " | [ " + str(len (ki8.getGroup(i).members)) + " ]")
ki8.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S9glist"]:
gs = ki9.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (ki9.getGroup(i).name + " | [ " + str(len (ki9.getGroup(i).members)) + " ]")
ki9.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S10glist"]:
gs = k1.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (k1.getGroup(i).name + " | [ " + str(len (k1.getGroup(i).members)) + " ]")
k1.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S11glist"]:
gs = k2.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (k2.getGroup(i).name + " | [ " + str(len (k2.getGroup(i).members)) + " ]")
k2.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S12glist"]:
gs = k3.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (k3.getGroup(i).name + " | [ " + str(len (k3.getGroup(i).members)) + " ]")
k3.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S13glist"]:
gs = k4.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (k4.getGroup(i).name + " | [ " + str(len (k4.getGroup(i).members)) + " ]")
k4.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S14glist"]:
gs = k5.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (k5.getGroup(i).name + " | [ " + str(len (k5.getGroup(i).members)) + " ]")
k5.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S15glist"]:
gs = k6.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (k6.getGroup(i).name + " | [ " + str(len (k6.getGroup(i).members)) + " ]")
k6.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S16glist"]:
gs = k7.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (k7.getGroup(i).name + " | [ " + str(len (k7.getGroup(i).members)) + " ]")
k7.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S17glist"]:
gs = k8.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (k8.getGroup(i).name + " | [ " + str(len (k8.getGroup(i).members)) + " ]")
k8.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S18glist"]:
gs = k9.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (k9.getGroup(i).name + " | [ " + str(len (k9.getGroup(i).members)) + " ]")
k9.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S19glist"]:
gs = w1.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (w1.getGroup(i).name + " | [ " + str(len (w1.getGroup(i).members)) + " ]")
w1.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["S20glist"]:
gs = w2.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (w2.getGroup(i).name + " | [ " + str(len (w2.getGroup(i).members)) + " ]")
w2.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text == "Link bokep":
ki.sendText(msg.to,"nekopoi.host")
ki.sendText(msg.to,"sexvideobokep.com")
ki.sendText(msg.to,"memek.com")
ki.sendText(msg.to,"pornktube.com")
ki.sendText(msg.to,"faketaxi.com")
ki.sendText(msg.to,"videojorok.com")
ki.sendText(msg.to,"watchmygf.mobi")
ki.sendText(msg.to,"xnxx.com")
ki.sendText(msg.to,"pornhd.com")
ki.sendText(msg.to,"xvideos.com")
ki.sendText(msg.to,"vidz7.com")
ki.sendText(msg.to,"m.xhamster.com")
ki.sendText(msg.to,"xxmovies.pro")
ki.sendText(msg.to,"youporn.com")
ki.sendText(msg.to,"pornhub.com")
ki.sendText(msg.to,"anyporn.com")
ki.sendText(msg.to,"hdsexdino.com")
ki.sendText(msg.to,"rubyourdick.com")
ki.sendText(msg.to,"anybunny.mobi")
ki.sendText(msg.to,"cliphunter.com")
ki.sendText(msg.to,"sexloving.net")
ki.sendText(msg.to,"free.goshow.tv")
ki.sendText(msg.to,"eporner.com")
ki.sendText(msg.to,"Pornhd.josex.net")
ki.sendText(msg.to,"m.hqporner.com")
ki.sendText(msg.to,"m.spankbang.com")
ki.sendText(msg.to,"m.4tube.com")
ki.sendText(msg.to,"brazzers.com")
#----------------------------------------------------------
elif msg.text in ["Conban","Contactban","Contact ban"]:
if wait["blacklist"] == {}:
cl.sendText(msg.to,"Bʟᴀᴄᴋʟɪsᴛ ᴇᴍᴘᴛʏ")
else:
cl.sendText(msg.to,"Lɪsᴛ ᴄᴏɴᴛᴀᴄᴛ ʙʟᴀᴄᴋʟɪsᴛ")
h = ""
for i in wait["blacklist"]:
h = cl.getContact(i)
M = Message()
M.to = msg.to
M.contentType = 13
M.contentMetadata = {'mid': i}
cl.sendMessage(M)
#------------------------------
elif msg.text in ["List favorite"]:
dj = cl.getFavoriteMids()
kontak = cl.getContacts(dj)
num = 1
family = str(len(dj))
msgs = "[List Favorite Friends]"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\nTotal Friend : %i" % len(kontak)
cl.sendText(msg.to, msgs)
#-------------------------------
elif "Gift " in msg.text:
strnum = msg.text.replace("Gift ","")
num = int(strnum)
for var in range(0,num):
try:
msg.contentType = 9
msg.contentMetadata={'PRDID': '89131c1a-e549-4bd5-9e60-e24de0d2e252',
'PRDTYPE': 'THEME',
'MSGTPL': '10'}
msg.text = None
cl.sendMessage(msg)
print "SEND STICKER"
except:
pass
#-----------------------------------------------------------
elif "!salam" in msg.text:
if msg.from_ in admin:
cl.sendText(msg.to,"السَّلاَمُ عَلَيْكُمْ وَرَحْمَةُ اللهِ وَبَرَكَاتُهُ")
cl.sendText(msg.to,"Assalamu'alaikum")
cl.sendText(msg.to,"وَعَلَيْكُمْ السَّلاَمُ وَرَحْمَةُ اللهِوَبَرَكَاتُهُ")
cl.sendText(msg.to,"Wa'alaikumsallam.Wr,Wb")
if msg.toType == 2:
print "ok"
_name = msg.text.replace("!Salam","")
gs = cl.getGroup(msg.to)
cl.sendText(msg.to,"maaf kalo gak sopan")
cl.sendText(msg.to,"Qo salamnya gak ada yang jawab ya..!!")
cl.sendText(msg.to,"hehehhehe")
targets = []
for g in gs.members:
if _name in g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
if target not in admin:
try:
klist=[cl]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
cl.sendText(msg.to,"السَّلاَمُ عَلَيْكُمْ وَرَحْمَةُ اللهِ وَبَرَكَاتُهُ")
cl.sendText(msg.to,"وَعَلَيْكُمْ السَّلاَمُ وَرَحْمَةُ اللهِوَبَرَكَاتُهُ")
cl.sendText(msg.to,"Nah salamnya jawab sendiri dah")
#-----------------------------------------------------------
elif "Detail" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
try:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithURL(msg.to,path)
except:
pass
#-----------------------------------------------------------
elif "Ppgroup" in msg.text:
group = cl.getGroup(msg.to)
path =("http://dl.profile.line-cdn.net/" + group.pictureStatus)
cl.sendImageWithURL(msg.to, path)
#-----------------------------------------------------------
elif "#leave" in msg.text:
try:
import sys
sys.exit()
except:
pass
#-----------------------------------------------------------
elif "Hay @" in msg.text:
_name = msg.text.replace("Hay @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
cl.sendText(g.mid,"Your Account Has Been Spammed !")
cl.sendText(g.mid,"Your Account Has Been Spammed !")
cl.sendText(g.mid,"Your Account Has Been Spammed !")
cl.sendText(g.mid,"Your Account Has Been Spammed !")
cl.sendText(g.mid,"Your Account Has Been Spammed !")
cl.sendText(g.mid,"Your Account Has Been Spammed !")
cl.sendText(g.mid,"Your Account Has Been Spammed !")
cl.sendText(g.mid,"Your Account Has Been Spammed !")
cl.sendText(g.mid,"Your Account Has Been Spammed !")
cl.sendText(g.mid,"Your Account Has Been Spammed !")
cl.sendText(msg.to, "Target sudah di spam ")
print " Spammed !"
#-----------------------------------------------------------
elif "Mayhem1" in msg.text:
if msg.from_ in admin:
if msg.toType == 2:
print "ok"
_name = msg.text.replace("Mayhem","")
gs = cl.getGroup(msg.to)
cl.sendText(msg.to,"「 Mayhem 」\nMayhem is STARTING♪\n' abort' to abort♪")
cl.sendText(msg.to,"「 Mayhem 」\n46 victims shall yell hul·la·ba·loo♪\n/ˌhələbəˈlo͞o,ˈhələbəˌlo͞o/")
targets = []
for g in gs.members:
if _name in g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Tidak ditemukan")
else:
for target in targets:
if not target in Bots:
try:
klist=[cl]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
cl.sendText(msg.to,"Mayhem done")
#------------------------------------------------------------
elif msg.text in ["Bot sp","Bot speed"]:
start = time.time()
ki.sendText(msg.to, "Loading speed bot..")
elapsed_time = time.time() - start
ki.sendText(msg.to, "%sseconds" % (elapsed_time))
elapsed_time = time.time() - start
ki2.sendText(msg.to, "%sseconds" % (elapsed_time))
elapsed_time = time.time() - start
ki3.sendText(msg.to, "%sseconds" % (elapsed_time))
elapsed_time = time.time() - start
ki4.sendText(msg.to, "%sseconds" % (elapsed_time))
elapsed_time = time.time() - start
ki5.sendText(msg.to, "%sseconds" % (elapsed_time))
elapsed_time = time.time() - start
ki6.sendText(msg.to, "%sseconds" % (elapsed_time))
elapsed_time = time.time() - start
ki7.sendText(msg.to, "%sseconds" % (elapsed_time))
elapsed_time = time.time() - start
elif msg.text.lower() == 'responsname':
profile = ki.getProfile()
text = profile.displayName
ki.sendText(msg.to, text)
profile = ki2.getProfile()
text = profile.displayName
ki2.sendText(msg.to, text)
profile = ki3.getProfile()
text = profile.displayName
ki3.sendText(msg.to, text)
profile = ki4.getProfile()
text = profile.displayName
ki4.sendText(msg.to, text)
profile = ki5.getProfile()
text = profile.displayName
ki5.sendText(msg.to, text)
profile = ki6.getProfile()
text = profile.displayName
ki6.sendText(msg.to, text)
#profile = ki7.getProfile()
#text = profile.displayName
#ki7.sendText(msg.to, text)
#------------------------------------------------------------------
elif "Steal home @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Steal home @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
ki.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
cl.sendImageWithURL(msg.to, path)
except:
pass
print "[Command]dp executed"
#-------------------------------------------------------------------
elif "youtube " in msg.text.lower():
query = msg.text.split(" ")
try:
if len(query) == 3:
isi = yt(query[2])
hasil = isi[int(query[1])-1]
cl.sendText(msg.to, hasil)
else:
isi = yt(query[1])
cl.sendText(msg.to, isi[0])
except Exception as e:
cl.sendText(msg.to, str(e))
elif 'Vidio ' in msg.text:
try:
textToSearch = (msg.text).replace('Vidio ', "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class':'yt-uix-tile-link'})
ght=('https://www.youtube.com' + results['href'])
cl.sendVideoWithURL(msg.to,ght)
except:
cl.sendText(msg.to,"Could not find it")
#==============================================================================#
elif "/musik " in msg.text:
songname = msg.text.replace("/musik ","")
params = {"songname": songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
abc = song[3].replace('https://','http://')
cl.sendText(msg.to, "Title : " + song[0] + "\nLength : " + song[1] + "\nLink download : " + song[4])
cl.sendText(msg.to, "Lagu " + song[0] + "\nSedang Di Prosses... Tunggu Sebentar ^_^ ")
cl.sendAudioWithURL(msg.to,abc)
cl.sendText(msg.to, "ˢᵉˡᵃᵐᵃᵗ ᴹᵉⁿᵈᵉⁿᵍᵃʳᵏᵃⁿ ᴸᵃᵍᵘ ᴾᶦˡᶦʰᵃⁿ ᴬⁿᵈᵃ " + song[0])
elif "/musrik " in msg.text:
songname = msg.text.replace("/musrik ","")
params = {"songname": songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
abc = song[3].replace('https://','http://')
hasil = 'Lyric Lagu ('
hasil += song[0]
hasil += ')\n\n'
hasil += song[5]
cl.sendText(msg.to, "Lagu " + song[0] + "\nSedang Di Prosses... Tunggu Sebentar ^_^ ")
cl.sendAudioWithURL(msg.to,abc)
cl.sendText(msg.to, "Title : " + song[0] + "\nLength : " + song[1] + "\nLink download : " + song[4] +"\n\n" + hasil)
cl.sendText(msg.to, "ˢᵉˡᵃᵐᵃᵗ ᴹᵉⁿᵈᵉⁿᵍᵃʳᵏᵃⁿ ᴸᵃᵍᵘ ᴾᶦˡᶦʰᵃⁿ ᴬⁿᵈᵃ " + song[0])
#==============================================================================#
elif 'Yvideo: ' in msg.text:
try:
textToSearch = (msg.text).replace('Youtube ', "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class':'yt-uix-tile-link'})
cl.sendText(msg.to,'https://www.youtube.com' + results['href'])
except:
cl.sendText(msg.to,"Could not find it")
#==============================================================================#
elif "Spamtag @" in msg.text:
_name = msg.text.replace("Spamtag @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
xname = g.displayName
xlen = str(len(xname)+1)
msg.contentType = 0
msg.text = "@"+xname+" "
msg.contentMetadata ={'MENTION':'{"MENTIONEES":[{"S":"0","E":'+json.dumps(xlen)+',"M":'+json.dumps(g.mid)+'}]}','EMTVER':'4'}
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
cl.sendMessage(msg)
else:
pass
elif "Spam" in msg.text:
txt = msg.text.split(" ")
jmlh = int(txt[2])
teks = msg.text.replace("Spam "+str(txt[1])+" "+str(jmlh)+" ","")
tulisan = jmlh * (teks+"\n")
if txt[1] == "on":
if jmlh <= 100000:
for x in range(jmlh):
cl.sendText(msg.to, teks)
else:
cl.sendText(msg.to, "Out of Range!")
elif txt[1] == "off":
if jmlh <= 100000:
cl.sendText(msg.to, tulisan)
else:
cl.sendText(msg.to, "Out Of Range!")
#==============================================================================#
elif "cover @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("cover @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.channel.getHome(target)
objId = h["result"]["homeInfo"]["objectId"]
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/myhome/c/download.nhn?userid=" + target + "&oid=" + objId)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
elif "Cover @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("Cover @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.channel.getHome(target)
objId = h["result"]["homeInfo"]["objectId"]
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/myhome/c/download.nhn?userid=" + target + "&oid=" + objId)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
#==============================================================================#
elif "pp @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("pp @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.getContact(target)
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
elif "Pp @" in msg.text:
if msg.toType == 2:
cover = msg.text.replace("Pp @","")
_nametarget = cover.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
h = cl.getContact(target)
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
except Exception as error:
print error
cl.sendText(msg.to,"Upload image failed.")
elif msg.text.lower() in ["pap owner","pap creator"]:
cl.sendImageWithURL(msg.to,"http://dl.profile.line-cdn.net/0hNPsZGNvyEX9OIz0w4GxuKHJmHxI5DRc3NkJaETwkRklqGwQoJkNbTGklHRo2G1B7cxFXH2NxSU03")
#==============================================================================#
elif "Woy! @" in msg.text:
_name = msg.text.replace("Woy! @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(g.mid,"Ꭲ̡̦͎͇͈̘̻̎̉̅́̒͗ͅϵѧᴍ̸̩̟̗͎̯͙̺̺̜̬̙̟̀̑̓͋̐͆͌̓̒́̒͗͒͑̚͟͜ᎶʀҽѧᴛᏴøᴛ̢͓̹̗̘̠̪̖͗̃̄̅̆̽̀̕͜͞ NIH CIKA~")
cl.sendText(msg.to,"Selesai Mengspam Akun Target")
#=============================================================================
elif msg.text.lower().startswith("imagetext "):
sep = msg.text.split(" ")
textnya = msg.text.replace(sep[0] + " ","")
urlnya = "http://chart.apis.google.com/chart?chs=480x80&cht=p3&chtt=" + textnya + "&chts=FFFFFF,70&chf=bg,s,000000"
cl.sendImageWithURL(msg.to, urlnya)
elif ".ps " in msg.text.lower():
tob = msg.text.lower().replace(".ps ","")
dan = urllib.quote(tob)
cl.sendText(msg.to,"「 Searching 」\n" "Type: Play Store Search\nStatus: Processing...")
cl.sendText(msg.to,"Title : "+tob+"\nhttps://play.google.com/store/search?q=" + dan)
cl.sendText(msg.to,"「 Searching 」\n" "Type: Play Store Search\nStatus: Success")
elif ".gl " in msg.text:
a = msg.text.replace(".gl ","")
b = urllib.quote(a)
cl.sendText(msg.to,"「 Searching 」\n" "Type: Google Search\nStatus: Processing...")
cl.sendText(msg.to,"Title: " + a + "\nhttps://google.co.id/search?dcr=0&source=hp&ei=iysxWvH4JcbwvgSa5IqYDg&q=" + b + "&oq=" + b + "&gs_l=mobile-gws-hp.3..0i203k1l3j0j0i203k1.2672.5074.0.5502.18.11.2.5.6.0.190.1542.0j10.10.0....0...1c.1j4.64.mobile-gws-hp..3.15.1347.3..35i39k1j0i131k1j0i10i203k1j0i10k1.140.cERNZUVYbV8#scso=uid_WjEr7gABqeYKj7vFEw8Fug_7:228")
cl.sendText(msg.to,"「 Searching 」\n" "Type: Google Search\nStatus: Success")
elif ".fb" in msg.text:
a = msg.text.replace(".fb","")
replace = urllib.quote(a)
cl.sendText(msg.to,"「 Searching 」\n" "Type: Facebook Search\nStatus: Processing...")
cl.sendText(msg.to, "Title: " + a + "\nhttps://m.facebook.com/search/top/?q="+replace+"&ref=content_filter&tsid=0.7682944806717842&source=typeahead")
cl.sendText(msg.to,"「 Searching 」\n" "Type: Search Info\nStatus: Success")
elif ".git " in msg.text:
a = msg.text.replace(".git ","")
b = urllib.quote(a)
cl.sendText(msg.to,"「 Searching 」\n" "Type: GitHub Search\nStatus: Processing...")
cl.sendText(msg.to, "Title: " + a + "\nhttps://github.com/search?utf8=✓&q="+b)
cl.sendText(msg.to,"「 Searching 」\n" "Type: GitHub Search\nStatus: Success")
elif ".gi" in msg.text.lower():
start = time.time()
separate = msg.text.split(" ")
search = msg.text.replace(separate[0] + " ","")
url = 'https://www.google.com/search?q=' + search.replace(" ","+") + '&espv=2&biw=1366&bih=667&site=webhp&source=lnms&tbm=isch&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg'
raw_html = (download_page(url))
items = []
items = items + (_images_get_all_items(raw_html))
path = random.choice(items)
cl.sendImageWithURL(msg.to,path)
a = items.index(path)
b = len(items)
elapsed_time = time.time() - start
cl.sendText(msg.to,"Gambar #%s dari #%s gambar.\nMendapatkan gambar selama %s detik." %(str(a), str(b), elapsed_time))
elif "say " in msg.text.lower():
say = msg.text.lower().replace("say ","")
lang = 'id'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudio(msg.to,"hasil.mp3")
#------------------------------------------------------------------
elif ("Gn: " in msg.text):
if msg.from_ in admin:
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.name = msg.text.replace("Gn: ","")
cl.updateGroup(X)
else:
cl.sendText(msg.to,"Tidak dapat dilakukan diluar group")
else:
msg.contentType = 13
msg.contentMetadata = {'mid': "u923fca3dc907e047572ad25c24f1d29b"}
cl.sendText(msg.to,"ɴᴀᴍᴇ ɢʀᴏᴜᴘs ɪᴛs ᴄʜᴀɴɢᴇᴅ")
cl.sendMessage(msg)
#-------------------------------------------------------------------
elif msg.text in ["Invite user"]:
if msg.from_ in admin:
wait["winvite"] = True
cl.sendText(msg.to,"send contact")
#-----------------------------------------------------
elif "Fuck!" in msg.text:
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
cl.kickoutFromGroup(msg.to,[target])
cl.inviteIntoGroup(msg.to,[target])
cl.cancelGroupInvitation(msg.to,[target])
except:
cl.sendText(msg.to, "Error")
#-------------------------------------------------------
elif "Picturl @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Picturl @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendText(msg.to, path)
except Exception as e:
raise e
print "[Command]dp executed"
#-------------------------------------------------------------------
elif "Blacklist @" in msg.text:
if msg.from_ in admin:
if msg.toType == 2:
print "[BL]ok"
_name = msg.text.replace("Blacklist @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found.")
else:
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Success Boss")
except:
cl.sendText(msg.to,"Error")
elif "Blacklist all" in msg.text:
if msg.from_ in admin:
if msg.toType == 2:
print "ok"
_name = msg.text.replace("Blacklist all","")
gs = cl.getGroup(msg.to)
cl.sendText(msg.to,"Semua Telah Di Hapus")
targets = []
for g in gs.members:
if _name in g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Maaf")
else:
for target in targets:
if not target in Bots:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Success Boss")
except:
cl.sentText(msg.to,"Berhasil Dihapus")
elif "Unban @" in msg.text:
if msg.from_ in admin:
if msg.toType == 2:
print "[WL]ok"
_name = msg.text.replace("Unban @","")
_nametarget = _name.rstrip(' ')
gs = ki.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found.")
else:
for target in targets:
try:
del wait["blacklist"][target]
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"ᵁⁿᵇᵃⁿᵉᵈ ᴴᵃˢ ᴮᵉᵉⁿ ᴰᵉˡᵉᵗᵉᵈ")
except:
cl.sendText(msg.to,"There was no blacklist user")
elif "Blacklist: " in msg.text:
if msg.from_ in admin:
nk0 = msg.text.replace("Blacklist: ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Target Locked")
except:
cl.sendText(msg.to,"Error")
elif "Whitelist: " in msg.text:
if msg.from_ in admin:
nk0 = msg.text.replace("Whitelist: ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
del wait["blacklist"][target]
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Target Unlocked")
except:
cl.sendText(msg.to,"Error")
elif msg.text in ["Clear ban"]:
cl.sendText(msg.to,"ʙʟᴀᴄᴋʟɪsᴛ ʜᴀs ʙᴇᴇɴ ᴅᴇʟᴇᴛᴇᴅ "+ str(len(wait["blacklist"]))+ " ᴜsᴇʀs")
wait["blacklist"] = {}
cl.sendText(msg.to,"sᴜᴋsᴇs ʜᴀᴘᴜs ʙᴀɴᴇᴅ")
elif msg.text in ["Unban:on"]:
if msg.from_ in admin:
wait["dblacklist"] = True
cl.sendText(msg.to,"ˢᵉⁿᵈ ᶜᵒⁿᵗᵃᶜᵗ ᵀᵒ ᵁⁿᵇᵃⁿ")
elif msg.text in ["Blacklist"]:
if msg.from_ in admin:
wait["wblacklist"] = True
cl.sendText(msg.to,"ˢᵉⁿᵈ ᶜᵒⁿᵗᵃᶜᵗ ᵀᵒ ᵇᵃⁿ")
elif msg.text in ["Banlist"]:
if wait["blacklist"] == {}:
cl.sendText(msg.to,"ɴᴏᴛʜɪɴɢ ʙʟᴀᴄᴋʟɪsᴛ")
else:
mc = ""
for mi_d in wait["blacklist"]:
mc += "• " +cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,"「 ʙʟᴀᴄᴋʟɪsᴛ ᴜsᴇʀ 」\n" + mc +"\nᴛᴏᴛᴀʟ : "+ str(len(wait["blacklist"])))
elif msg.text in ["Midban","Mid ban"]:
if msg.from_ in admin:
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
num=1
cocoa = "══════════ʟɪsᴛ ʙʟᴀᴄᴋʟɪsᴛ═════════"
for mm in matched_list:
cocoa+="\n[%i] %s" % (num, mm)
num=(num+1)
cocoa+="\n═════════ʟɪsᴛ ʙʟᴀᴄᴋʟɪsᴛ═════════\n\nᴛᴏᴛᴀʟ ʙʟᴀᴄᴋʟɪsᴛ : %i" % len(matched_list)
cl.sendText(msg.to,cocoa)
elif msg.text.lower() == 'kill':
if msg.from_ in admin:
if msg.toType == 2:
group = ki.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
if matched_list == []:
ki.sendText(msg.to,"Tidak ada Daftar Blacklist")
return
for jj in matched_list:
try:
cl.kickoutFromGroup(msg.to,[jj])
ki.kickoutFromGroup(msg.to,[jj])
ki2.kickoutFromGroup(msg.to,[jj])
ki3.kickoutFromGroup(msg.to,[jj])
ki4.kickoutFromGroup(msg.to,[jj])
ki5.kickoutFromGroup(msg.to,[jj])
ki6.kickoutFromGroup(msg.to,[jj])
#ki7.kickoutFromGroup(msg.to,[jj])
#ki8.kickoutFromGroup(msg.to,[jj])
#ki9.kickoutFromGroup(msg.to,[jj])
#k1.kickoutFromGroup(msg.to,[jj])
#k2.kickoutFromGroup(msg.to,[jj])
#k3.kickoutFromGroup(msg.to,[jj])
#k4.kickoutFromGroup(msg.to,[jj])
#k5.kickoutFromGroup(msg.to,[jj])
print (msg.to,[jj])
except:
pass
elif "Nuke" in msg.text:
if msg.from_ in admin:
if msg.toType == 2:
print "ok"
_name = msg.text.replace("Nuke","")
gs = cl.getGroup(msg.to)
cl.sendText(msg.to,"Fᴜᴄᴋ Yᴏᴜʀ Gʀᴏᴜᴘs Iᴅɪᴏᴛs")
targets = []
for g in gs.members:
if _name in g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Tidak ada Member")
cl.sendText(msg.to,"Nothing Bosqu")
else:
for target in targets:
if not target in Bots:
try:
klist=[cl]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki.sendText(msg,to,"Hahaha")
ki2.sendText(msg,to,"Fakyu Sundala")
#-----------------------------------------------
#-----------------------------------------------
elif msg.text.lower() == 'reboot':
print "[Command]Restart"
try:
cl.sendText(msg.to,"ʀᴇsᴛᴀʀᴛɪɴɢ...")
cl.sendText(msg.to,"ʀᴇsᴛᴀʀᴛɪɴɢ ᴍʏ sᴇʟғʙᴏᴛ sᴜᴋsᴇs")
restart_program()
except:
cl.sendText(msg.to,"ᴘʟᴇᴀsᴇ ᴡᴀɪᴛ...")
restart_program()
pass
#-----------------------------------------------
#-----------------------------------------------
#-----------------------------------------------
elif msg.text.lower() == ["join all"]:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
ki6.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
ki7.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
#ki8.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#ki9.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#k1.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#k2.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#k3.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#k4.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#k5.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#k6.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#k7.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#k8.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#k9.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#w1.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#w2.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
random.choice(KAC).updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
random.choice(KAC).updateGroup(G)
#-----------------------------------------------
elif msg.text in ["-","Y","Rejoice"]:
if msg.from_ in admsa:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
ki6.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.2)
#ki7.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#ke.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
#ku.acceptGroupInvitationByTicket(msg.to,Ticket)
#time.sleep(0.01)
G = cl.getGroup(msg.to)
G.preventJoinByTicket = True
random.choice(KAC).updateGroup(G)
print "Asist joined"
G.preventJoinByTicket(G)
random.choice(KAC).updateGroup(G)
elif msg.text.lower() == 'Sp come':
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
ki6.acceptGroupInvitationByTicket(msg.to,Ticket)
ki7.acceptGroupInvitationByTicket(msg.to,Ticket)
#ki8.acceptGroupInvitationByTicket(msg.to,Ticket)
#ki9.acceptGroupInvitationByTicket(msg.to,Ticket)
#k1.acceptGroupInvitationByTicket(msg.to,Ticket)
#k2.acceptGroupInvitationByTicket(msg.to,Ticket)
#k3.acceptGroupInvitationByTicket(msg.to,Ticket)
#k4.acceptGroupInvitationByTicket(msg.to,Ticket)
#k5.acceptGroupInvitationByTicket(msg.to,Ticket)
#k6.acceptGroupInvitationByTicket(msg.to,Ticket)
#k7.acceptGroupInvitationByTicket(msg.to,Ticket)
#k8.acceptGroupInvitationByTicket(msg.to,Ticket)
#k9.acceptGroupInvitationByTicket(msg.to,Ticket)
#w1.acceptGroupInvitationByTicket(msg.to,Ticket)
#w2.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki.updateGroup(G)
#-----------------------------------------------
elif "Pro1 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki.updateGroup(G)
#-----------------------------------------------
elif "Pro2 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki2.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki2.updateGroup(G)
#-----------------------------------------------
elif "Pro3 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki2.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki2.updateGroup(G)
#-----------------------------------------------
elif "Pro4 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki3.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki3.updateGroup(G)
#-----------------------------------------------
elif "Pro5 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki5.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki5.updateGroup(G)
#-----------------------------------------------
elif "Pro6 in" in msg.text:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki6.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki6.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki6.updateGroup(G)
#-----------------------------------------------
#elif msg.text in ["Jinlip"]:
# if msg.toType == 2:
# ginfo = cl.getGroup(msg.to)
# try:
# cl.sendText(msg.to,"Pamit Dulu Ya😘 " + str(ginfo.name) + "")
# cl.leaveGroup(msg.to)
#-----------------------------------------------
elif msg.text in ["V","Off","Back","O"]:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
cl.sendText(msg.to,"Bye All " + str(ginfo.name) + "")
ki.leaveGroup(msg.to)
ki2.leaveGroup(msg.to)
ki3.leaveGroup(msg.to)
ki4.leaveGroup(msg.to)
ki5.leaveGroup(msg.to)
ki6.leaveGroup(msg.to)
#ki7.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif msg.text.lower() == 'minggat':
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki.sendText(msg.to,"Bye Bye " + str(ginfo.name) + "")
ki.leaveGroup(msg.to)
ki2.sendText(msg.to,"Bye Bye " + str(ginfo.name) + "")
ki2.leaveGroup(msg.to)
ki3.sendText(msg.to,"Bye Bye " + str(ginfo.name) + "")
ki3.leaveGroup(msg.to)
ki4.sendText(msg.to,"Bye Bye " + str(ginfo.name) + "")
ki4.leaveGroup(msg.to)
ki5.sendText(msg.to,"Bye Bye " + str(ginfo.name) + "")
ki5.leaveGroup(msg.to)
ki6.sendText(msg.to,"Bye Bye " + str(ginfo.name) + "")
ki6.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "Pro1 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "Pro2 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki2.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "Pro3 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki3.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "Pro4 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki4.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "Pro5 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki5.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif "Pro6 bye" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki6.leaveGroup(msg.to)
except:
pass
#-----------------------------------------------
elif msg.text in ["Sambutan on"]:
if wait["Sambutan"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sᴀᴍʙᴜᴛᴀɴ Wᴇʟᴄᴏᴍᴇ Aʟʀᴇᴀᴅʏ ɪɴ ᴀᴄᴛɪᴠᴇ")
else:
wait["Sambutan"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ʀᴇᴀᴅʏ ɪɴ ᴀᴄᴛɪᴠᴇ")
elif msg.text in ["Sambutan off"]:
if wait["Sambutan"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sᴀᴍʙᴜᴛᴀɴ Wᴇʟᴄᴏᴍᴇ Rᴇᴀᴅʏ Uɴᴀᴄᴛɪᴠᴇ")
else:
wait["Sambutan"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Rᴇᴀᴅʏ Uɴᴀᴄᴛɪᴠᴇ")
#-----------------------------------------------
elif msg.text in ["Sticker on"]:
if wait["sticker"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Aʟʀᴇᴀᴅʏ ɪɴ ᴀᴄᴛɪᴠᴇ")
else:
wait["sticker"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ɪɴ ᴀᴄᴛɪᴠᴇ")
elif msg.text in ["Sticker off"]:
if wait["sticker"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Rᴇᴀᴅʏ Uɴᴀᴄᴛɪᴠᴇ")
else:
wait["sticker"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Uɴᴀᴄᴛɪᴠᴇ")
#-----------------------------------------------
elif msg.text in ["Tag on"]:
if wait["tag"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴀᴜᴛᴏ ʀᴇsᴘᴏɴ ᴛᴀɢ ᴅɪ ᴀᴋᴛɪғᴋᴀɴ")
else:
wait["tag"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"ʀᴇᴀᴅʏ ɪɴ ᴀᴄᴛɪᴠᴇ")
elif msg.text in ["Tag off"]:
if wait["tag"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"ᴀᴜᴛᴏ ʀᴇsᴘᴏɴ ᴛᴀɢ ᴅɪ ɴᴏɴᴀᴋᴛɪғᴋᴀɴ")
else:
wait["tag"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Rᴇᴀᴅʏ Uɴᴀᴄᴛɪᴠᴇ")
#-----------------------------------------------
#-----------------------------------------------
if op.type == 19:
try:
if op.param3 in mid:
if op.param2 in kimid:
G = ki.getGroup(op.param1)
G.preventJoinByTicket = False
ki.updateGroup(G)
Ticket = ki.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
cl.updateGroup(G)
else:
G = ki.getGroup(op.param1)
ki.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki.updateGroup(G)
Ticket = ki.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
k1.acceptGroupInvitationByTicket(op.param1,Ticket)
k2.acceptGroupInvitationByTicket(op.param1,Ticket)
k3.acceptGroupInvitationByTicket(op.param1,Ticket)
k4.acceptGroupInvitationByTicket(op.param1,Ticket)
k5.acceptGroupInvitationByTicket(op.param1,Ticket)
k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
cl.updateGroup(G)
ki.updateGroup(G)
wait["blacklist"][op.param2] = True
elif op.param3 in kimid:
if op.param2 in ki2mid:
G = ki2.getGroup(op.param1)
G.preventJoinByTicket = False
ki2.updateGroup(G)
Ticket = ki2.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki2.updateGroup(G)
else:
G = ki2.getGroup(op.param1)
ki2.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki2.updateGroup(G)
Ticket = ki2.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki.updateGroup(G)
elif op.param3 in ki3mid:
if op.param2 in ki2mid:
G = ki2.getGroup(op.param1)
G.preventJoinByTicket = False
ki2.updateGroup(G)
Ticket = ki2.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki2.updateGroup(G)
else:
G = cl.getGroup(op.param1)
ki2.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki2.updateGroup(G)
Ticket = ki2.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki2.updateGroup(G)
elif op.param3 in ki2mid:
if op.param2 in ki3mid:
G = ki3.getGroup(op.param1)
G.preventJoinByTicket = False
ki3.updateGroup(G)
Ticket = ki3.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki3.updateGroup(G)
else:
G = cl.getGroup(op.param1)
ki3.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki3.updateGroup(G)
Ticket = ki3.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki3.updateGroup(G)
elif op.param3 in ki4mid:
if op.param2 in ki5mid:
G = ki5.getGroup(op.param1)
G.preventJoinByTicket = False
ki5.updateGroup(G)
Ticket = ki5.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
cl.updateGroup(G)
else:
G = ki5.getGroup(op.param1)
ki5.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki5.updateGroup(G)
Ticket = ki5.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki5.updateGroup(G)
elif op.param3 in ki5mid:
if op.param2 in ki4mid:
G = ki4.getGroup(op.param1)
G.preventJoinByTicket = False
ki4.updateGroup(G)
Ticket = ki4.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#14.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki4.updateGroup(G)
else:
G = ki4.getGroup(op.param1)
ki4.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki4.updateGroup(G)
Ticket = ki4.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki4.updateGroup(G)
elif op.param3 in ki6mid:
if op.param2 in ki5mid:
G = ki5.getGroup(op.param1)
G.preventJoinByTicket = False
ki5.updateGroup(G)
Ticket = ki5.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki5.updateGroup(G)
else:
G = ki5.getGroup(op.param1)
ki5.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = False
ki5.updateGroup(G)
Ticket = ki5.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
ki.acceptGroupInvitationByTicket(op.param1,Ticket)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
ki6.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki7.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki8.acceptGroupInvitationByTicket(op.param1,Ticket)
#ki9.acceptGroupInvitationByTicket(op.param1,Ticket)
#k1.acceptGroupInvitationByTicket(op.param1,Ticket)
#k2.acceptGroupInvitationByTicket(op.param1,Ticket)
#k3.acceptGroupInvitationByTicket(op.param1,Ticket)
#k4.acceptGroupInvitationByTicket(op.param1,Ticket)
#k5.acceptGroupInvitationByTicket(op.param1,Ticket)
#k6.acceptGroupInvitationByTicket(op.param1,Ticket)
#k7.acceptGroupInvitationByTicket(op.param1,Ticket)
#k8.acceptGroupInvitationByTicket(op.param1,Ticket)
#k9.acceptGroupInvitationByTicket(op.param1,Ticket)
#w1.acceptGroupInvitationByTicket(op.param1,Ticket)
#w2.acceptGroupInvitationByTicket(op.param1,Ticket)
#w3.acceptGroupInvitationByTicket(op.param1,Ticket)
#w4.acceptGroupInvitationByTicket(op.param1,Ticket)
#w5.acceptGroupInvitationByTicket(op.param1,Ticket)
#w6.acceptGroupInvitationByTicket(op.param1,Ticket)
#w7.acceptGroupInvitationByTicket(op.param1,Ticket)
#w8.acceptGroupInvitationByTicket(op.param1,Ticket)
#w9.acceptGroupInvitationByTicket(op.param1,Ticket)
#l1.acceptGroupInvitationByTicket(op.param1,Ticket)
#l2.acceptGroupInvitationByTicket(op.param1,Ticket)
#l3.acceptGroupInvitationByTicket(op.param1,Ticket)
#l4.acceptGroupInvitationByTicket(op.param1,Ticket)
#l5.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki5.updateGroup(G)
except:
pass
if op.type == 17:
if op.param2 not in Bots:
if op.param2 in Bots:
pass
if wait["protect"] == True:
if wait["blacklist"][op.param2] == True:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
G = random.choice(KAC).getGroup(op.param1)
G.preventJoinByTicket = True
ki4.updateGroup(G)
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
pass
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
G = random.choice(KAC).getGroup(op.param1)
G.preventJoinByTicket = True
random.choice(KAC).updateGroup(G)
random.choice(KAK).kickoutFromGroup(op.param1,[op.param2])
except:
pass
elif op.param2 not in admin + Bots:
random.choice(KAC).sendText(op.param1,"Welcome. Don't Play Bots. I can kick you!")
else:
pass
if op.type == 19:
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["protect"] == True:
wait ["blacklist"][op.param2] = True
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
else:
cl.sendText(op.param1,"")
else:
cl.sendText(op.param1,"")
if op.type == 13:
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["inviteprotect"] == True:
wait ["blacklist"][op.param2] = True
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
else:
cl.sendText(op.param1,"")
else:
cl.sendText(op.param1,"")
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["inviteprotect"] == True:
wait ["blacklist"][op.param2] = True
cl.cancelGroupInvitation(op.param1,[op.param3])
else:
cl.sendText(op.param1,"")
else:
cl.sendText(op.param1,"")
if op.param2 not in Bots:
if op.param2 in Bots:
pass
elif wait["cancelprotect"] == True:
wait ["blacklist"][op.param2] = True
cl.cancelGroupInvitation(op.param1,[op.param3])
else:
cl.sendText(op.param1,"")
else:
cl.sendText(op.param1,"")
#Buka QR asisst masuk kick pelaku trs leave + akun utama share kontak pelaku..
if op.type == 11:
if wait["linkprotect"] == True:
if op.param2 not in admin and Bots:
G = cl.getGroup(op.param1)
G.preventJoinByTicket = True
invsend = 0
Ticket = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1, Ticket)
time.sleep(0.01)
cl.kickoutFromGroup(op.param1,[op.param2])
#ki7.leaveGroup(op.param1)
cl.updateGroup(G)
x = Message(to=op.param1, from_=None, text=None, contentType=13)
x.contentMetadata={'mid':op.param2}
cl.sendMessage(x)
if op.param2 in wait["blacklist"]:
pass
#if op.param2 in wait["whitelist"]:
#pass
else:
wait["blacklist"][op.param2] = True
#sesuai in ma sc masing2 (Har Har)(Har Har)(Har Har)
#--------------------------NOTIFIED_UPDATE_GROUP---------------------
if op.type == 11:
if wait["linkprotect"] == True:
if op.param2 in Bots:
pass
else:
X = cl.getGroup(op.param1)
X.preventJoinByTicket = False
cl.updateGroup(X)
Ti = cl.reissueGroupTicket(op.param1)
random.choice(KAC).acceptGroupInvitationByTicket(op.param1,Ti) #kicker join
X.preventJoinByTicket = True
cl.updateGroup(X)
cl.kickoutFromGroup(op.param1,[op.param2])
#cl.leaveGroup(op.param1)
else:
pass
#------------------------[Welcome]----------------------------
#-------------------------------------------------------
if op.type == 19:
if op.param2 in Bots:
return
cl.sendText(op.param1,cl.getContact(op.param2).displayName + "\nWᴀʜ ʙᴀʜᴀʏᴀ ɴɪʜ ᴏʀᴀɴɢ ᴋɪᴄᴋᴇʀ")
print "Kicker Tuh Asal Kick"
#------Open QR Kick start------#
if op.type == 11:
if wait["linkprotect"] == True:
# if wait["protect"] == False
if op.param2 not in Bots:
G = cl.getGroup(op.param1)
G.preventJoinByTicket = True
cl.updateGroup(G)
cl.sendText(op.param1,"please do not open link group-_-")
Ticket = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
time.sleep(0.01)
cl.updateGroup(G)
cl.kickoutFromGroup(op.param1,[op.param3])
# ki7.updateGroup(G)
cl.leaveGroup(op.param1)
wait["blacklist"][op.param2] = True
#------Open QR Kick finish-----#
#------------------------------------------------------------------------------------
#------------------------------------------------------------------------------------
if op.type == 17:
if wait["Sambutan"] == True:
if op.param2 in admin:
return
ginfo = cl.getGroup(op.param1)
contact = cl.getContact(op.param2)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendText(op.param1,"Hallo " + cl.getContact(op.param2).displayName + "\n╔═════════════\n║Hᴀɪ Sᴀʏ Wᴇʟᴄᴏᴍᴇ ᴛᴏ " + str(ginfo.name) + "\n╠═════════════\n" + "║Fᴏᴜɴᴅᴇʀ ɢʀᴏᴜᴘ =>>> " + str(ginfo.name) + " :\n║" + ginfo.creator.displayName + "\n╠═════════════\n" + "║😊Sᴇᴍᴏɢᴀ Bᴇᴛᴀʜ ʏᴀ 😘 \n╚═════════════")
cl.sendImageWithURL(op.param1,image)
print "MEMBER JOIN TO GROUP"
if op.type == 15:
if wait["Sambutan"] == True:
if op.param2 in admin:
return
#cl.sendText(op.param1,"Hᴜsss Hᴜssss Sᴀɴᴀᴀ Pᴇʀɢɪ " \n + cl.getContact(op.param2).displayName + "\n╔═════════════\n║Jᴀɴɢᴀɴ Kᴇᴍʙᴀʟɪ ʟᴀɢɪ Yᴀ \n║Gᴋ ᴘᴀɴᴛᴀs Lᴏ ᴀᴅᴀ ᴅɪ sɪɴɪ..!! \n╚═════════════")
cl.sendText(op.param1,"Hᴜsss Hᴜssss Sᴀɴᴀᴀ Pᴇʀɢɪ " + cl.getContact(op.param2).displayName + "\n╔═════════════\n Jᴀɴɢᴀɴ Kᴇᴍʙᴀʟɪ ʟᴀɢɪ Yᴀ :v \n║Gᴋ ᴘᴀɴᴛᴀs Lᴏ ᴀᴅᴀ ᴅɪ sɪɴɪ..!! \n╚═════════════")
print "MEMBER HAS LEFT THE GROUP"
if op.type == 55:
print "[NOTIFIED_READ_MESSAGE]"
try:
if op.param1 in wait2['readPoint']:
Nama = cl.getContact(op.param2).displayName
if Nama in wait2['readMember'][op.param1]:
pass
else:
wait2['readMember'][op.param1] += "\n🐶 " + Nama
wait2['ROM'][op.param1][op.param2] = "🐷 " + Nama
wait2['setTime'][msg.to] = datetime.strftime(now2,"%H:%M")
else:
cl.sendText
except:
pass
#--------------------------------------------------------------
#------Cek Sider-----#
if op.type == 55:
try:
if cctv['cyduk'][op.param1]==True:
if op.param1 in cctv['point']:
Name = cl.getContact(op.param2).displayName
Name = cl.getContact(op.param2).displayName
Name = cl.getContact(op.param2).displayName
Name = cl.getContact(op.param2).displayName
Name = cl.getContact(op.param2).displayName
Np = cl.getContact(op.param2).pictureStatus
Np = cl.getContact(op.param2).pictureStatus
Np = cl.getContact(op.param2).pictureStatus
Np = cl.getContact(op.param2).pictureStatus
Np = cl.getContact(op.param2).pictureStatus
if Name in cctv['sidermem'][op.param1]:
pass
else:
cctv['sidermem'][op.param1] += "\n• " + Name
if " " in Name:
nick = Name.split(' ')
if len(nick) == 2:
cl.sendText(op.param1, "ʜᴀʟᴏ " + "☞ " + nick[0] + " ☜" + "\nɴɢᴀᴘᴀɪɴ ʟᴏ ᴄᴄᴛᴠ ᴅᴏᴀɴᴋ \nᴄᴄᴛᴠ ʙᴀʏᴀʀ sɪɴɪ 😉")
cl.sendImageWithURL(op.param1, "http://dl.profile.line-cdn.net/" + Np)
summon(op.param1, [op.param2])
else:
cl.sendText(op.param1, "ɴᴀʜ ᴋᴀɴ " + "☞ " + nick[1] + " ☜" + "\nᴋᴇᴛᴀᴜᴡᴀɴ ɴɢɪɴᴛɪᴘ 😏. . .\nMᴀsᴜᴋ sɪɴɪ ɢᴀʙᴜɴɢ ᴄʜᴀᴛ 😆😂😛")
cl.sendImageWithURL(op.param1, "http://dl.profile.line-cdn.net/" + Np)
summon(op.param1, [op.param2])
else:
cl.sendText(op.param1, "ᴡᴏɪ..!! " + "☞ " + Name + " ☜" + "\nʙᴇᴛᴀʜ ᴀᴍᴀᴛ ᴊᴀᴅɪ sɪᴅᴇʀ \nʙɪɴᴛɪᴛᴀɴ ᴀᴡᴀs ʟᴏʜ 😝")
cl.sendImageWithURL(op.param1, "http://dl.profile.line-cdn.net/" + Np)
summon(op.param1, [op.param2])
else:
pass
else:
pass
except:
pass
else:
pass
#-------------------------------------------------------------------------
if op.type == 55:
try:
if op.param1 in wait2['readPoint']:
if op.param2 in wait2['readMember'][op.param1]:
pass
else:
wait2['readMember'][op.param1] += op.param2
wait2['ROM'][op.param1][op.param2] = op.param2
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
else:
pass
except:
pass
except Exception as error:
print error
def a2():
now2 = datetime.now()
nowT = datetime.strftime(now2,"%M")
if nowT[14:] in ["10","20","30","40","50","00"]:
return False
else:
return True
def nameUpdate():
while True:
try:
#while a2():
#pass
if wait["clock"] == True:
now2 = datetime.now()
nowT = datetime.strftime(now2,"(%H:%M)")
profile = cl.getProfile()
profile.displayName = wait["cName"] + nowT
cl.updateProfile(profile)
time.sleep(600)
except:
pass
thread2 = threading.Thread(target=nameUpdate)
thread2.daemon = True
thread2.start()
while True:
try:
Ops = cl.fetchOps(cl.Poll.rev, 5)
except EOFError:
raise Exception("It might be wrong revision\n" + str(cl.Poll.rev))
for Op in Ops:
if (Op.type != OpType.END_OF_OPERATION):
cl.Poll.rev = max(cl.Poll.rev, Op.revision)
bot(Op)
| 60.072633
| 5,653
| 0.390472
| 40,423
| 416,844
| 5.245553
| 0.048685
| 0.026033
| 0.038992
| 0.051971
| 0.847407
| 0.812296
| 0.793644
| 0.773784
| 0.751001
| 0.740748
| 0
| 0.025772
| 0.368051
| 416,844
| 6,938
| 5,654
| 60.081291
| 0.590185
| 0.097492
| 0
| 0.68933
| 0
| 0.008392
| 0.287396
| 0.170941
| 0.000685
| 0
| 0
| 0
| 0
| 0
| null | null | 0.011132
| 0.002398
| null | null | 0.012845
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b748a3cc3285ef24b55771ad910c03e9fc22692b
| 161
|
py
|
Python
|
ExPyBR/ExESPy001.py
|
us19861229c/Meu-aprendizado-Python
|
575c0714ac5377ff3122f4cb57952969e07ba89b
|
[
"Unlicense"
] | 1
|
2021-12-11T19:53:41.000Z
|
2021-12-11T19:53:41.000Z
|
ExPyBR/ExESPy001.py
|
us19861229c/Meu-aprendizado-Python
|
575c0714ac5377ff3122f4cb57952969e07ba89b
|
[
"Unlicense"
] | null | null | null |
ExPyBR/ExESPy001.py
|
us19861229c/Meu-aprendizado-Python
|
575c0714ac5377ff3122f4cb57952969e07ba89b
|
[
"Unlicense"
] | null | null | null |
"""
1.Faça um Programa que mostre a mensagem "Alo mundo" na tela.
"""
print("Faça um Programa que mostre a mensagem 'Alo mundo' na tela.")
print("Olá mundo!")
| 20.125
| 68
| 0.689441
| 27
| 161
| 4.111111
| 0.518519
| 0.108108
| 0.252252
| 0.306306
| 0.918919
| 0.918919
| 0.918919
| 0.918919
| 0.918919
| 0.918919
| 0
| 0.007519
| 0.173913
| 161
| 7
| 69
| 23
| 0.827068
| 0.378882
| 0
| 0
| 0
| 0
| 0.758242
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 13
|
b7a4e2f3cee59f7116f4802eab2b319ea0a41ced
| 116
|
py
|
Python
|
chitra/import_utils.py
|
aniketmaurya/Chitra
|
e040311c25ccf2e101df5596662450ae532bee08
|
[
"Apache-2.0"
] | 158
|
2020-01-27T05:35:35.000Z
|
2021-12-24T16:15:23.000Z
|
chitra/import_utils.py
|
aniketmaurya/Chitra
|
e040311c25ccf2e101df5596662450ae532bee08
|
[
"Apache-2.0"
] | 112
|
2020-02-15T15:12:38.000Z
|
2021-12-22T13:18:14.000Z
|
chitra/import_utils.py
|
aniketmaurya/Chitra
|
e040311c25ccf2e101df5596662450ae532bee08
|
[
"Apache-2.0"
] | 33
|
2020-09-25T13:49:17.000Z
|
2021-12-01T13:05:23.000Z
|
import importlib
def is_installed(module_name: str):
return importlib.util.find_spec(module_name) is not None
| 19.333333
| 60
| 0.793103
| 18
| 116
| 4.888889
| 0.777778
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 116
| 5
| 61
| 23.2
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.666667
| 0.333333
| 1.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
b7bb022e4dc3d6ffc2cb21f4a8136d6152cc60af
| 23
|
py
|
Python
|
stepik/3559/66578/step_8/script.py
|
tshemake/Software-Development
|
e6dff82dffa49e66dc5fa5650af775437ebb6a9b
|
[
"Unlicense"
] | null | null | null |
stepik/3559/66578/step_8/script.py
|
tshemake/Software-Development
|
e6dff82dffa49e66dc5fa5650af775437ebb6a9b
|
[
"Unlicense"
] | null | null | null |
stepik/3559/66578/step_8/script.py
|
tshemake/Software-Development
|
e6dff82dffa49e66dc5fa5650af775437ebb6a9b
|
[
"Unlicense"
] | null | null | null |
print('2 + 2 =', 2 + 2)
| 23
| 23
| 0.391304
| 5
| 23
| 1.8
| 0.4
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 0.26087
| 23
| 1
| 23
| 23
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
4d67b494d9c1fc452f91a2bdcc88950041e1c07f
| 68,652
|
py
|
Python
|
benchmarks/SimResults/combinations_spec_ml_fulltrained/oldstuff/cmp_bwavesgccmcfleslie3d/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_ml_fulltrained/oldstuff/cmp_bwavesgccmcfleslie3d/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/combinations_spec_ml_fulltrained/oldstuff/cmp_bwavesgccmcfleslie3d/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.063025,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.252192,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.336758,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.28979,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.501812,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.287803,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.07941,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.234815,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.93592,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0636208,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0105051,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0997069,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0776918,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.163328,
'Execution Unit/Register Files/Runtime Dynamic': 0.0881969,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.258199,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.653394,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.47857,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00171471,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00171471,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00149793,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000582287,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00111605,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00604339,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0162826,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0746871,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.75074,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.220284,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.253671,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.20335,
'Instruction Fetch Unit/Runtime Dynamic': 0.570969,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0912664,
'L2/Runtime Dynamic': 0.0149542,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.91288,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.30685,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0865673,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0865674,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.32334,
'Load Store Unit/Runtime Dynamic': 1.82034,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.21346,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.426921,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0757577,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0770213,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.295384,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0364302,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.584853,
'Memory Management Unit/Runtime Dynamic': 0.113451,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 22.7004,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.221959,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0174892,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.146944,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.386393,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 5.38467,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0248217,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.222185,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.132187,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.123111,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.198574,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.100234,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.421919,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.120538,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.29263,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0249729,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00516385,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0467096,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0381898,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0716825,
'Execution Unit/Register Files/Runtime Dynamic': 0.0433536,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.104611,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.268124,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.36096,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00096861,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00096861,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000870559,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00035172,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000548599,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00335638,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00832581,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0367128,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.33525,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.106878,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.124693,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.6671,
'Instruction Fetch Unit/Runtime Dynamic': 0.279967,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0412946,
'L2/Runtime Dynamic': 0.00689639,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.56248,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.647909,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0428786,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0428785,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.76496,
'Load Store Unit/Runtime Dynamic': 0.902249,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.105731,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.211462,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0375244,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0380905,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.145197,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0176811,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.365766,
'Memory Management Unit/Runtime Dynamic': 0.0557716,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.7212,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0656927,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00635391,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0618182,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.133865,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.73971,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0146909,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.214227,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0762629,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.103046,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.166209,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0838969,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.353152,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.106162,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.16664,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0144077,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00432221,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0368798,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0319654,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0512875,
'Execution Unit/Register Files/Runtime Dynamic': 0.0362876,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.081369,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.218818,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.22786,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000754142,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000754142,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000672222,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000268632,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000459186,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00263969,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00668167,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0307292,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.95464,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0862819,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.10437,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.26802,
'Instruction Fetch Unit/Runtime Dynamic': 0.230703,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0400426,
'L2/Runtime Dynamic': 0.0100873,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.34645,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.549699,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0358894,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0358894,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.51592,
'Load Store Unit/Runtime Dynamic': 0.762582,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0884971,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.176994,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0314079,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0319777,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.121532,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0142383,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.331594,
'Memory Management Unit/Runtime Dynamic': 0.046216,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.9117,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0378998,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00511039,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0524499,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0954601,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.37291,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0118559,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.212,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.060521,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0715576,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.11542,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.05826,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.245237,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0725621,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.07335,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0114337,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00300145,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0262853,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0221975,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.037719,
'Execution Unit/Register Files/Runtime Dynamic': 0.025199,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0583403,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.160581,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.0484,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000374453,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000374453,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000332464,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000132157,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000318869,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00140024,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00336457,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0213391,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.35735,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0530035,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0724771,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.64174,
'Instruction Fetch Unit/Runtime Dynamic': 0.151584,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0384802,
'L2/Runtime Dynamic': 0.00959312,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.0509,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.406273,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0263276,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0263275,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.17522,
'Load Store Unit/Runtime Dynamic': 0.562439,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0649195,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.129838,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0230401,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0236093,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0843949,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00871477,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.280083,
'Memory Management Unit/Runtime Dynamic': 0.0323241,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.7983,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0300766,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00359451,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0366169,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.070288,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.87462,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 6.7133013220344875,
'Runtime Dynamic': 6.7133013220344875,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.334987,
'Runtime Dynamic': 0.0855449,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 67.4667,
'Peak Power': 100.579,
'Runtime Dynamic': 12.4575,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 67.1317,
'Total Cores/Runtime Dynamic': 12.3719,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.334987,
'Total L3s/Runtime Dynamic': 0.0855449,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.111597
| 124
| 0.682282
| 8,082
| 68,652
| 5.789656
| 0.067805
| 0.12344
| 0.11284
| 0.093349
| 0.938109
| 0.930244
| 0.916866
| 0.886369
| 0.862156
| 0.841533
| 0
| 0.132649
| 0.224189
| 68,652
| 914
| 125
| 75.111597
| 0.745893
| 0
| 0
| 0.642232
| 0
| 0
| 0.657
| 0.048068
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d67b9f4c0a74112bc719ff63b7ae344e9dd1f5d
| 27,052
|
py
|
Python
|
run_nerf_helpers.py
|
songrise/nerf
|
474786b476a8b51f960481d50c8136b4782239d3
|
[
"MIT"
] | null | null | null |
run_nerf_helpers.py
|
songrise/nerf
|
474786b476a8b51f960481d50c8136b4782239d3
|
[
"MIT"
] | null | null | null |
run_nerf_helpers.py
|
songrise/nerf
|
474786b476a8b51f960481d50c8136b4782239d3
|
[
"MIT"
] | null | null | null |
# %%
import os
import sys
import tensorflow as tf
import numpy as np
import imageio
import json
# Misc utils
def img2mse(x, y): return tf.reduce_mean(tf.square(x - y))
def mse2psnr(x): return -10.*tf.log(x)/tf.log(10.)
def to8b(x): return (255*np.clip(x, 0, 1)).astype(np.uint8)
# Positional encoding
class Embedder:
#! Re should be the \gamma for positional encoding?
def __init__(self, **kwargs):
self.kwargs = kwargs
self.create_embedding_fn()
def create_embedding_fn(self):
embed_fns = []
d = self.kwargs['input_dims']
out_dim = 0
if self.kwargs['include_input']:
embed_fns.append(lambda x: x)
out_dim += d
max_freq = self.kwargs['max_freq_log2']
N_freqs = self.kwargs['num_freqs']
if self.kwargs['log_sampling']:
freq_bands = 2.**tf.linspace(0., max_freq, N_freqs)
else:
freq_bands = tf.linspace(2.**0., 2.**max_freq, N_freqs)
for freq in freq_bands:
for p_fn in self.kwargs['periodic_fns']:
embed_fns.append(lambda x, p_fn=p_fn,
freq=freq: p_fn(x * freq))
out_dim += d
self.embed_fns = embed_fns
self.out_dim = out_dim
def embed(self, inputs):
return tf.concat([fn(inputs) for fn in self.embed_fns], -1)
def get_embedder(multires, i=0):
if i == -1:
# return itself without any embedding
return tf.identity, 3
embed_kwargs = {
'include_input': True,
'input_dims': 3,
'max_freq_log2': multires-1,
'num_freqs': multires,
'log_sampling': True,
'periodic_fns': [tf.math.sin, tf.math.cos],
}
embedder_obj = Embedder(**embed_kwargs)
def embed(x, eo=embedder_obj): return eo.embed(x)
return embed, embedder_obj.out_dim
class SphericalBasis(tf.keras.Model):
def __init__(self, n_order=8, inputch_views=27, **kwargs):
"""
Args:
n_order: the order of the basis function
"""
super(SphericalBasis, self).__init__()
self.n_order = n_order
self.basis = []
for _ in range(n_order):
if _ <= n_order//4:
self.basis.append(self.create_basis(
inputch_views=inputch_views, width=8))
elif _ <= n_order//2:
self.basis.append(self.create_basis(
inputch_views=inputch_views, width=16))
else:
self.basis.append(self.create_basis(
inputch_views=inputch_views, width=32))
def call(self, inputs, coeff=None):
"""
Args:
inputs: the embedded view direction
coeff: the coefficients of the basis function [batch_size, n_order, 1]
Returns:
rgb: (None, 3) predicted rgb in the range [-1, 1]
"""
if coeff is None:
coeff = tf.ones_like(inputs)
print("coeff is empty")
# duplicate the coeff for 3 channels
coeff = tf.tile(tf.expand_dims(coeff, axis=2), [1, 1, 3])
x = inputs
rgb = self.basis[0](x)
for i in range(1, self.n_order):
# linear combination of basis functions
rgb += coeff[:, i]*self.basis[i](x)
# rgb in [-1, 1] since the view-dependent effect maybe black
# rgb = tf.keras.activations.(rgb)
return rgb
def create_basis(self, width=8, inputch_views=27):
"""
Create the basis function
"""
basis = tf.keras.Sequential([
tf.keras.layers.Dense(width, activation=tf.keras.activations.relu,
input_shape=(inputch_views,)),
tf.keras.layers.Dense(width, activation=tf.keras.activations.relu),
tf.keras.layers.Dense(3, activation=None)
])
return basis
# Model architecture
# def init_nerf_model(D=8, W=256, input_ch=3, input_ch_views=3, output_ch=4, skips=[4], use_viewdirs=False):
# #!Re see p.18 of the paper for the model architecture
# relu = tf.keras.layers.ReLU()
# def dense(W, act=relu): return tf.keras.layers.Dense(W, activation=act)
# print('MODEL', input_ch, input_ch_views, type(
# input_ch), type(input_ch_views), use_viewdirs)
# input_ch = int(input_ch)
# input_ch_views = int(input_ch_views)
# inputs = tf.keras.Input(shape=(input_ch + input_ch_views))
# inputs_pts, inputs_views = tf.split(inputs, [input_ch, input_ch_views], -1)
# inputs_pts.set_shape([None, input_ch])
# inputs_views.set_shape([None, input_ch_views])
# print(inputs.shape, inputs_pts.shape, inputs_views.shape)
# outputs = inputs_pts
# for i in range(D):
# outputs = dense(W)(outputs)
# if i in skips:
# outputs = tf.concat([inputs_pts, outputs], -1)
# if use_viewdirs:
# # !Re alpha is view-independent, if not used, then it simulates Lambertian。
# alpha_out = dense(1, act=None)(outputs) # !Re shouldn't this be relu?
# bottleneck = dense(256, act=None)(outputs)
# # !Re input view direction here
# inputs_viewdirs = tf.concat(
# [bottleneck, inputs_views], -1) # concat viewdirs
# outputs = inputs_viewdirs
# # The supplement to the paper states there are 4 hidden layers here, but this is an error since
# # the experiments were actually run with 1 hidden layer, so we will leave it as 1.
# # ! Re in CVPR paper, there is only one layer.
# for i in range(1):
# outputs = dense(W//2)(outputs)
# # !Re: rgb, shoudn't this be sigmoid?
# outputs = dense(3, act=None)(outputs)
# outputs = tf.concat([outputs, alpha_out], -1) # !Re rgb+a
# else:
# outputs = dense(output_ch, act=None)(outputs)
# model = tf.keras.Model(inputs=inputs, outputs=outputs)
# return model
# # modified architecture
# def init_nerf_model(D=8, W=256, input_ch=3, input_ch_views=3, output_ch=4, skips=[4], use_viewdirs=False):
# #!Re see p.18 of the paper for the model architecture
# relu = tf.keras.layers.ReLU()
# def dense(W, act=relu): return tf.keras.layers.Dense(W, activation=act)
# print('MODEL', input_ch, input_ch_views, type(
# input_ch), type(input_ch_views), use_viewdirs)
# input_ch = int(input_ch)
# input_ch_views = int(input_ch_views)
# inputs = tf.keras.Input(shape=(input_ch + input_ch_views))
# inputs_pts, inputs_views = tf.split(inputs, [input_ch, input_ch_views], -1)
# inputs_pts.set_shape([None, input_ch])
# inputs_views.set_shape([None, input_ch_views])
# print(inputs.shape, inputs_pts.shape, inputs_views.shape)
# outputs = inputs_pts
# for i in range(D):
# outputs = dense(W)(outputs)
# if i in skips:
# outputs = tf.concat([inputs_pts, outputs], -1)
# n_order = 8 # neural expansion order
# if use_viewdirs:
# #! Re modified this following NeX and refNerf
# alpha_out = dense(1, act=None)(outputs)
# diffuse_out = dense(3, act="sigmoid")(outputs)
# bottleneck = dense(256, act=None)(outputs)
# inputs_viewdirs = tf.concat(
# [bottleneck, inputs_views], -1) # concat viewdirs
# outputs = inputs_viewdirs
# # The supplement to the paper states there are 4 hidden layers here, but this is an error since
# # the experiments were actually run with 1 hidden layer, so we will leave it as 1.
# for i in range(1): # !Re: Modified for 4 layers in order to get better view-dependent results
# outputs = dense(W//2)(outputs)
# outputs = dense(n_order, act=None)(outputs) # coeff
# basis = SphericalBasis(n_order, input_ch_views)
# outputs = basis(inputs_views, coeff=outputs) # specular
# outputs = diffuse_out + outputs # diffuse + specular
# outputs = tf.concat([outputs, alpha_out], -1) # !Re rgb+a
# else:
# outputs = dense(output_ch, act=None)(outputs)
# model = tf.keras.Model(inputs=inputs, outputs=outputs)
# return model
# %%
# Misc utils
def img2mse(x, y): return tf.reduce_mean(tf.square(x - y))
def mse2psnr(x): return -10.*tf.log(x)/tf.log(10.)
def to8b(x): return (255*np.clip(x, 0, 1)).astype(np.uint8)
def to8b_vec(x):
""""
Encode vector in [-1,1] in to 8 bit color space
"""
x = np.clip(x, -1, 1)
x = (255*(x + 1)/2).astype(np.uint8)
return x
# Positional encoding
class Embedder:
#! Re should be the \gamma for positional encoding?
def __init__(self, **kwargs):
self.kwargs = kwargs
self.create_embedding_fn()
def create_embedding_fn(self):
embed_fns = []
d = self.kwargs['input_dims']
out_dim = 0
if self.kwargs['include_input']:
embed_fns.append(lambda x: x)
out_dim += d
max_freq = self.kwargs['max_freq_log2']
N_freqs = self.kwargs['num_freqs']
if self.kwargs['log_sampling']:
freq_bands = 2.**tf.linspace(0., max_freq, N_freqs)
else:
freq_bands = tf.linspace(2.**0., 2.**max_freq, N_freqs)
for freq in freq_bands:
for p_fn in self.kwargs['periodic_fns']:
embed_fns.append(lambda x, p_fn=p_fn,
freq=freq: p_fn(x * freq))
out_dim += d
self.embed_fns = embed_fns
self.out_dim = out_dim
def embed(self, inputs):
return tf.concat([fn(inputs) for fn in self.embed_fns], -1)
def get_embedder(multires, i=0):
if i == -1:
# return itself without any embedding
return tf.identity, 3
embed_kwargs = {
'include_input': True,
'input_dims': 3,
'max_freq_log2': multires-1,
'num_freqs': multires,
'log_sampling': True,
'periodic_fns': [tf.math.sin, tf.math.cos],
}
embedder_obj = Embedder(**embed_kwargs)
def embed(x, eo=embedder_obj): return eo.embed(x)
return embed, embedder_obj.out_dim
class SphericalBasis(tf.keras.Model):
def __init__(self, n_order=8, inputch_views=27, **kwargs):
"""
Args:
n_order: the order of the basis function
"""
super(SphericalBasis, self).__init__()
self.n_order = n_order
self.basis = []
for _ in range(n_order):
if _ <= n_order//4:
self.basis.append(self.create_basis(
inputch_views=inputch_views, width=8))
elif _ <= n_order//2:
self.basis.append(self.create_basis(
inputch_views=inputch_views, width=32))
else:
self.basis.append(self.create_basis(
inputch_views=inputch_views, width=64))
def call(self, inputs, coeff=None):
"""
Args:
inputs: the embedded view direction
coeff: the coefficients of the basis function [batch_size, n_order, 1]
Returns:
rgb: (None, 3) predicted rgb in the range [-1, 1]
"""
if coeff is None:
coeff = tf.ones_like(inputs)
print("coeff is empty")
# duplicate the coeff for 3 channels
coeff = tf.tile(tf.expand_dims(coeff, axis=2), [1, 1, 3])
x = inputs
rgb = self.basis[0](x)
for i in range(1, self.n_order):
# linear combination of basis functions
rgb += coeff[:, i]*self.basis[i](x)
# rgb in [-1, 1] since the view-dependent effect maybe black
# rgb = tf.keras.activations.(rgb)
return rgb
def create_basis(self, width=8, inputch_views=27):
"""
Create the basis function
"""
basis = tf.keras.Sequential([
tf.keras.layers.Dense(width, activation=tf.keras.activations.elu,
input_shape=(inputch_views,)),
tf.keras.layers.Dense(width, activation=tf.keras.activations.elu),
tf.keras.layers.Dense(3, activation=None)
])
return basis
# Model architecture
# def init_nerf_model(D=8, W=256, input_ch=3, input_ch_views=3, output_ch=4, skips=[4], use_viewdirs=False):
# #!Re see p.18 of the paper for the model architecture
# relu = tf.keras.layers.ReLU()
# def dense(W, act=relu): return tf.keras.layers.Dense(W, activation=act)
# print('MODEL', input_ch, input_ch_views, type(
# input_ch), type(input_ch_views), use_viewdirs)
# input_ch = int(input_ch)
# input_ch_views = int(input_ch_views)
# inputs = tf.keras.Input(shape=(input_ch + input_ch_views))
# inputs_pts, inputs_views = tf.split(inputs, [input_ch, input_ch_views], -1)
# inputs_pts.set_shape([None, input_ch])
# inputs_views.set_shape([None, input_ch_views])
# print(inputs.shape, inputs_pts.shape, inputs_views.shape)
# outputs = inputs_pts
# for i in range(D):
# outputs = dense(W)(outputs)
# if i in skips:
# outputs = tf.concat([inputs_pts, outputs], -1)
# if use_viewdirs:
# # !Re alpha is view-independent, if not used, then it simulates Lambertian。
# alpha_out = dense(1, act=None)(outputs) # !Re shouldn't this be relu?
# bottleneck = dense(256, act=None)(outputs)
# # !Re input view direction here
# inputs_viewdirs = tf.concat(
# [bottleneck, inputs_views], -1) # concat viewdirs
# outputs = inputs_viewdirs
# # The supplement to the paper states there are 4 hidden layers here, but this is an error since
# # the experiments were actually run with 1 hidden layer, so we will leave it as 1.
# # ! Re in CVPR paper, there is only one layer.
# for i in range(1):
# outputs = dense(W//2)(outputs)
# # !Re: rgb, shoudn't this be sigmoid?
# outputs = dense(3, act=None)(outputs)
# outputs = tf.concat([outputs, alpha_out], -1) # !Re rgb+a
# else:
# outputs = dense(output_ch, act=None)(outputs)
# model = tf.keras.Model(inputs=inputs, outputs=outputs)
# return model
# modified architecture v2
def init_nerf_model(D=8, W=256, input_ch=3, input_ch_views=3, output_ch=4, skips=[4], use_viewdirs=False):
#!Re see p.18 of the paper for the model architecture
relu = tf.keras.layers.ReLU()
def dense(W, act=relu): return tf.keras.layers.Dense(W, activation=act)
print('MODEL', input_ch, input_ch_views, type(
input_ch), type(input_ch_views), use_viewdirs)
input_ch = int(input_ch)
input_ch_views = int(input_ch_views)
inputs = tf.keras.Input(shape=(input_ch + input_ch_views))
inputs_pts, inputs_views = tf.split(inputs, [input_ch, input_ch_views], -1)
#! assume the embedding always keeps raw input in first 3 elem
input_views_raw, input_views_embedded = tf.split(
inputs_views, [3, input_ch_views-3], -1)
inputs_pts.set_shape([None, input_ch])
input_views_raw.set_shape([None, 3])
input_views_embedded.set_shape([None, input_ch_views-3])
print(inputs.shape, inputs_pts.shape, inputs_views.shape)
outputs = inputs_pts
for i in range(D):
outputs = dense(W)(outputs)
if i in skips:
outputs = tf.concat([inputs_pts, outputs], -1)
n_order = 8 # neural expansion order
if use_viewdirs:
#! Re modified this following NeX and refNerf
alpha_out = dense(1, act=None)(outputs)
# diffuse_out = dense(3, act='sigmoid')(outputs)
norm_outputs = dense(3, act=None)(outputs) # pred normal
bottleneck = dense(256, act=None)(outputs)
# compute the angle between the view direction and the normal
nDotv = tf.reduce_sum(norm_outputs * input_views_raw, axis=-1)
nDotv = tf.expand_dims(nDotv, -1)
inputs_viewdirs = tf.concat(
[bottleneck, norm_outputs, nDotv, input_views_embedded], -1) # concat viewdirs
outputs = inputs_viewdirs
# The supplement to the paper states there are 4 hidden layers here, but this is an error since
# the experiments were actually run with 1 hidden layer, so we will leave it as 1.
outputs = dense(W//2)(outputs)
outputs = dense(W//2)(outputs)
#! use tanh to express the idea that specular could be black
#! Feb 3. modified back to vanilla nerf
rgb_outputs = dense(3, act=None)(outputs)
# isOutline = tf.pow(isOutline, 6)
# rgb_outputs = diffuse_out + specular_out
# specular_out # diffuse + specular
# !Re rgb+a+norm, 7 channels
outputs = tf.concat([rgb_outputs, alpha_out, norm_outputs], -1)
else:
outputs = dense(output_ch, act=None)(outputs)
model = tf.keras.Model(inputs=inputs, outputs=outputs)
return model
# Ray helpers
def get_rays(H, W, focal, c2w):
"""Get ray origins, directions from a pinhole camera."""
#!Re: c2w is the camera to world matrix, which is the inverse of the world to camera matrix.
#!Re: https://en.wikipedia.org/wiki/Pinhole_camera_model
i, j = tf.meshgrid(tf.range(W, dtype=tf.float32),
tf.range(H, dtype=tf.float32), indexing='xy')
#!Re multiply by .5 is just dividing by 2.
#!Re i,j is the mapped into [-W/2, W/2], see pinhole camera model for intuition.
#!Re dirs are the normalized ray directions in camera space. the z axis is fixed.
dirs = tf.stack([(i-W*.5)/focal, -(j-H*.5)/focal, -tf.ones_like(i)], -1)
#!Re: c2w[:3, :3] is rotation part of the camera to world matrix.
rays_d = tf.reduce_sum(dirs[..., np.newaxis, :] * c2w[:3, :3], -1)
#! Re: c2w[:3, -1] is the translation part.
rays_o = tf.broadcast_to(c2w[:3, -1], tf.shape(rays_d))
return rays_o, rays_d
#!Re see https://github.com/bmild/nerf/issues/92
def get_rays_np(H, W, focal, c2w):
"""Get ray origins, directions from a pinhole camera."""
if None:
return get_rays_np_sub_pix(H, W, focal, c2w)
i, j = np.meshgrid(np.arange(W, dtype=np.float32),
np.arange(H, dtype=np.float32), indexing='xy')
dirs = np.stack([(i-W*.5)/focal, -(j-H*.5)/focal, -np.ones_like(i)], -1)
rays_d = np.sum(dirs[..., np.newaxis, :] * c2w[:3, :3], -1)
rays_o = np.broadcast_to(c2w[:3, -1], np.shape(rays_d))
return rays_o, rays_d
def get_rays_np_sub_pix(H, W, focal, c2w):
"""
Get ray origins, directions from a pinhole camera.
Sample 4 rays for a single pixel
"""
i, j = np.meshgrid(np.arange(2*W, dtype=np.float32),
np.arange(2*H, dtype=np.float32), indexing='xy')
dirs = np.stack([(i-W)/(focal), -(j-H) /
(focal), -np.ones_like(i)], -1)
rays_d = np.sum(dirs[..., np.newaxis, :] * c2w[:3, :3], -1)
# just divide them by 2 since the rays are centered wrt the principal ray
rays_d /= np.array([2, 2, 1])
rays_o = np.broadcast_to(c2w[:3, -1], np.shape(rays_d))
return rays_o, rays_d
a = get_rays_np_sub_pix(2, 2, 0.5, np.array(
[[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]]))
def ndc_rays(H, W, focal, near, rays_o, rays_d):
"""Normalized device coordinate rays.
Space such that the canvas is a cube with sides [-1, 1] in each axis.
Args:
H: int. Height in pixels.
W: int. Width in pixels.
focal: float. Focal length of pinhole camera.
near: float or array of shape[batch_size]. Near depth bound for the scene.
rays_o: array of shape [batch_size, 3]. Camera origin.
rays_d: array of shape [batch_size, 3]. Ray direction.
Returns:
rays_o: array of shape [batch_size, 3]. Camera origin in NDC.
rays_d: array of shape [batch_size, 3]. Ray direction in NDC.
"""
# Shift ray origins to near plane
#!Re todo check this later.
t = -(near + rays_o[..., 2]) / rays_d[..., 2]
rays_o = rays_o + t[..., None] * rays_d
# Projection
o0 = -1./(W/(2.*focal)) * rays_o[..., 0] / rays_o[..., 2]
o1 = -1./(H/(2.*focal)) * rays_o[..., 1] / rays_o[..., 2]
o2 = 1. + 2. * near / rays_o[..., 2]
d0 = -1./(W/(2.*focal)) * \
(rays_d[..., 0]/rays_d[..., 2] - rays_o[..., 0]/rays_o[..., 2])
d1 = -1./(H/(2.*focal)) * \
(rays_d[..., 1]/rays_d[..., 2] - rays_o[..., 1]/rays_o[..., 2])
d2 = -2. * near / rays_o[..., 2]
rays_o = tf.stack([o0, o1, o2], -1)
rays_d = tf.stack([d0, d1, d2], -1)
return rays_o, rays_d
# Hierarchical sampling helper
def sample_pdf(bins, weights, N_samples, det=False):
# Get pdf
weights += 1e-5 # prevent nans
#! Re normalize
pdf = weights / tf.reduce_sum(weights, -1, keepdims=True)
cdf = tf.cumsum(pdf, -1)
cdf = tf.concat([tf.zeros_like(cdf[..., :1]), cdf], -1)
# Take uniform samples
if det:
#! Re if deterministic, then take uniform samples
u = tf.linspace(0., 1., N_samples)
u = tf.broadcast_to(u, list(cdf.shape[:-1]) + [N_samples])
else:
#!Re else take samples from uniform distribution
u = tf.random.uniform(list(cdf.shape[:-1]) + [N_samples])
# Invert CDF
#!Re index of the bin
inds = tf.searchsorted(cdf, u, side='right')
below = tf.maximum(0, inds-1)
above = tf.minimum(cdf.shape[-1]-1, inds)
inds_g = tf.stack([below, above], -1)
cdf_g = tf.gather(cdf, inds_g, axis=-1, batch_dims=len(inds_g.shape)-2)
bins_g = tf.gather(bins, inds_g, axis=-1, batch_dims=len(inds_g.shape)-2)
denom = (cdf_g[..., 1]-cdf_g[..., 0])
denom = tf.where(denom < 1e-5, tf.ones_like(denom), denom)
t = (u-cdf_g[..., 0])/denom
samples = bins_g[..., 0] + t * (bins_g[..., 1]-bins_g[..., 0])
return samples
# %%
if __name__ == "__main__":
rays = [get_rays_np_sub_pix(4, 4, 0.5, np.array(
[[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]])) for _ in range(16)]
rays = np.stack(rays, 0)
# %%
# Ray helpers
def get_rays(H, W, focal, c2w):
"""Get ray origins, directions from a pinhole camera."""
#!Re: c2w is the camera to world matrix, which is the inverse of the world to camera matrix.
#!Re: https://en.wikipedia.org/wiki/Pinhole_camera_model
i, j = tf.meshgrid(tf.range(W, dtype=tf.float32),
tf.range(H, dtype=tf.float32), indexing='xy')
#!Re multiply by .5 is just dividing by 2.
#!Re i,j is the mapped into [-W/2, W/2], see pinhole camera model for intuition.
#!Re dirs are the normalized ray directions in camera space. the z axis is fixed.
dirs = tf.stack([(i-W*.5)/focal, -(j-H*.5)/focal, -tf.ones_like(i)], -1)
#!Re: c2w[:3, :3] is rotation part of the camera to world matrix.
rays_d = tf.reduce_sum(dirs[..., np.newaxis, :] * c2w[:3, :3], -1)
#! Re: c2w[:3, -1] is the translation part.
rays_o = tf.broadcast_to(c2w[:3, -1], tf.shape(rays_d))
return rays_o, rays_d
#!Re see https://github.com/bmild/nerf/issues/92
def get_rays_np(H, W, focal, c2w):
"""Get ray origins, directions from a pinhole camera."""
if None:
return get_rays_np_sub_pix(H, W, focal, c2w)
i, j = np.meshgrid(np.arange(W, dtype=np.float32),
np.arange(H, dtype=np.float32), indexing='xy')
dirs = np.stack([(i-W*.5)/focal, -(j-H*.5)/focal, -np.ones_like(i)], -1)
rays_d = np.sum(dirs[..., np.newaxis, :] * c2w[:3, :3], -1)
rays_o = np.broadcast_to(c2w[:3, -1], np.shape(rays_d))
return rays_o, rays_d
def get_rays_np_sub_pix(H, W, focal, c2w):
"""
Get ray origins, directions from a pinhole camera.
Sample 4 rays for a single pixel
"""
i, j = np.meshgrid(np.arange(2*W, dtype=np.float32),
np.arange(2*H, dtype=np.float32), indexing='xy')
dirs = np.stack([(i-W)/(focal), -(j-H) /
(focal), -np.ones_like(i)], -1)
rays_d = np.sum(dirs[..., np.newaxis, :] * c2w[:3, :3], -1)
# just divide them by 2 since the rays are centered wrt the principal ray
rays_d /= np.array([2, 2, 1])
rays_o = np.broadcast_to(c2w[:3, -1], np.shape(rays_d))
return rays_o, rays_d
a = get_rays_np_sub_pix(2, 2, 0.5, np.array(
[[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]]))
def ndc_rays(H, W, focal, near, rays_o, rays_d):
"""Normalized device coordinate rays.
Space such that the canvas is a cube with sides [-1, 1] in each axis.
Args:
H: int. Height in pixels.
W: int. Width in pixels.
focal: float. Focal length of pinhole camera.
near: float or array of shape[batch_size]. Near depth bound for the scene.
rays_o: array of shape [batch_size, 3]. Camera origin.
rays_d: array of shape [batch_size, 3]. Ray direction.
Returns:
rays_o: array of shape [batch_size, 3]. Camera origin in NDC.
rays_d: array of shape [batch_size, 3]. Ray direction in NDC.
"""
# Shift ray origins to near plane
#!Re todo check this later.
t = -(near + rays_o[..., 2]) / rays_d[..., 2]
rays_o = rays_o + t[..., None] * rays_d
# Projection
o0 = -1./(W/(2.*focal)) * rays_o[..., 0] / rays_o[..., 2]
o1 = -1./(H/(2.*focal)) * rays_o[..., 1] / rays_o[..., 2]
o2 = 1. + 2. * near / rays_o[..., 2]
d0 = -1./(W/(2.*focal)) * \
(rays_d[..., 0]/rays_d[..., 2] - rays_o[..., 0]/rays_o[..., 2])
d1 = -1./(H/(2.*focal)) * \
(rays_d[..., 1]/rays_d[..., 2] - rays_o[..., 1]/rays_o[..., 2])
d2 = -2. * near / rays_o[..., 2]
rays_o = tf.stack([o0, o1, o2], -1)
rays_d = tf.stack([d0, d1, d2], -1)
return rays_o, rays_d
# Hierarchical sampling helper
def sample_pdf(bins, weights, N_samples, det=False):
# Get pdf
weights += 1e-5 # prevent nans
#! Re normalize
pdf = weights / tf.reduce_sum(weights, -1, keepdims=True)
cdf = tf.cumsum(pdf, -1)
cdf = tf.concat([tf.zeros_like(cdf[..., :1]), cdf], -1)
# Take uniform samples
if det:
#! Re if deterministic, then take uniform samples
u = tf.linspace(0., 1., N_samples)
u = tf.broadcast_to(u, list(cdf.shape[:-1]) + [N_samples])
else:
#!Re else take samples from uniform distribution
u = tf.random.uniform(list(cdf.shape[:-1]) + [N_samples])
# Invert CDF
#!Re index of the bin
inds = tf.searchsorted(cdf, u, side='right')
below = tf.maximum(0, inds-1)
above = tf.minimum(cdf.shape[-1]-1, inds)
inds_g = tf.stack([below, above], -1)
cdf_g = tf.gather(cdf, inds_g, axis=-1, batch_dims=len(inds_g.shape)-2)
bins_g = tf.gather(bins, inds_g, axis=-1, batch_dims=len(inds_g.shape)-2)
denom = (cdf_g[..., 1]-cdf_g[..., 0])
denom = tf.where(denom < 1e-5, tf.ones_like(denom), denom)
t = (u-cdf_g[..., 0])/denom
samples = bins_g[..., 0] + t * (bins_g[..., 1]-bins_g[..., 0])
return samples
# %%
if __name__ == "__main__":
rays = [get_rays_np_sub_pix(4, 4, 0.5, np.array(
[[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]])) for _ in range(16)]
rays = np.stack(rays, 0)
# %%
| 35.877984
| 108
| 0.596333
| 4,067
| 27,052
| 3.812392
| 0.093189
| 0.029797
| 0.026314
| 0.014447
| 0.942083
| 0.937762
| 0.928604
| 0.924347
| 0.922477
| 0.922477
| 0
| 0.028941
| 0.260461
| 27,052
| 753
| 109
| 35.925631
| 0.746076
| 0.41106
| 0
| 0.840361
| 0
| 0
| 0.022681
| 0
| 0
| 0
| 0
| 0.002656
| 0
| 1
| 0.105422
| false
| 0
| 0.018072
| 0.033133
| 0.207831
| 0.012048
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d8fdac259eac6060900f5e627a3027a8869a495
| 337,287
|
py
|
Python
|
Tests/test_Entrez.py
|
eoc21/biopython
|
c0f8db8f55a506837c320459957a0ce99b0618b6
|
[
"PostgreSQL"
] | 3
|
2017-10-23T21:53:57.000Z
|
2019-09-23T05:14:12.000Z
|
Tests/test_Entrez.py
|
eoc21/biopython
|
c0f8db8f55a506837c320459957a0ce99b0618b6
|
[
"PostgreSQL"
] | null | null | null |
Tests/test_Entrez.py
|
eoc21/biopython
|
c0f8db8f55a506837c320459957a0ce99b0618b6
|
[
"PostgreSQL"
] | 6
|
2020-02-26T16:34:20.000Z
|
2020-03-04T15:34:00.000Z
|
'''Testing code for Bio.Entrez parsers.
'''
import unittest
from Bio import Entrez
class EInfoTest(unittest.TestCase):
'''Tests for parsing XML output returned by EInfo
'''
def test_list(self):
'''Test parsing database list returned by EInfo
'''
# To create the XML file, use
# >>> Bio.Entrez.einfo()
input = open('Entrez/einfo1.xml')
record = Entrez.read(input)
self.assertEqual(record["DbList"], ['pubmed',
'protein',
'nucleotide',
'nuccore',
'nucgss',
'nucest',
'structure',
'genome',
'books',
'cancerchromosomes',
'cdd',
'gap',
'domains',
'gene',
'genomeprj',
'gensat',
'geo',
'gds',
'homologene',
'journals',
'mesh',
'ncbisearch',
'nlmcatalog',
'omia',
'omim',
'pmc',
'popset',
'probe',
'proteinclusters',
'pcassay',
'pccompound',
'pcsubstance',
'snp',
'taxonomy',
'toolkit',
'unigene',
'unists'
])
def test_pubmed(self):
'''Test parsing database info returned by EInfo
'''
# To create the XML file, use
# >>> Bio.Entrez.einfo(db="pubmed")
input = open('Entrez/einfo2.xml')
record = Entrez.read(input)
self.assertEqual(record["DbInfo"]['DbName'], 'pubmed')
self.assertEqual(record["DbInfo"]['MenuName'], 'PubMed')
self.assertEqual(record["DbInfo"]['Description'], 'PubMed bibliographic record')
self.assertEqual(record["DbInfo"]['Count'], "17905967")
self.assertEqual(record["DbInfo"]['LastUpdate'], '2008/04/15 06:42')
self.assertEqual(len(record["DbInfo"]['FieldList']), 40)
self.assertEqual(record["DbInfo"]['FieldList'][0]['Name'], 'ALL')
self.assertEqual(record["DbInfo"]['FieldList'][0]['FullName'], 'All Fields')
self.assertEqual(record["DbInfo"]['FieldList'][0]['Description'], 'All terms from all searchable fields')
self.assertEqual(record["DbInfo"]['FieldList'][0]['TermCount'], "70792830")
self.assertEqual(record["DbInfo"]['FieldList'][0]['IsDate'], 'N')
self.assertEqual(record["DbInfo"]['FieldList'][0]['IsNumerical'], 'N')
self.assertEqual(record["DbInfo"]['FieldList'][0]['SingleToken'], 'N')
self.assertEqual(record["DbInfo"]['FieldList'][0]['Hierarchy'], 'N')
self.assertEqual(record["DbInfo"]['FieldList'][0]['IsHidden'], 'N')
self.assertEqual(len(record["DbInfo"]['LinkList']), 46)
self.assertEqual(record["DbInfo"]['LinkList'][0]['Name'], 'pubmed_books_refs')
self.assertEqual(record["DbInfo"]['LinkList'][0]['Menu'], 'Cited in Books')
self.assertEqual(record["DbInfo"]['LinkList'][0]['Description'], 'PubMed links associated with Books')
self.assertEqual(record["DbInfo"]['LinkList'][0]['DbTo'], 'books')
class ESearchTest(unittest.TestCase):
'''Tests for parsing XML output returned by ESearch
'''
def test_pubmed1(self):
'''Test parsing XML returned by ESearch from PubMed (first test)
'''
# To create the XML file, use
# >>> Bio.Entrez.esearch(db="pubmed", term="biopython")
input = open('Entrez/esearch1.xml')
record = Entrez.read(input)
self.assertEqual(record['Count'], '5')
self.assertEqual(record['RetMax'], '5')
self.assertEqual(record['RetStart'], '0')
self.assertEqual(len(record['IdList']), 5)
self.assertEqual(record['IdList'][0], '16403221')
self.assertEqual(record['IdList'][1], '16377612')
self.assertEqual(record['IdList'][2], '14871861')
self.assertEqual(record['IdList'][3], '14630660')
self.assertEqual(record['IdList'][4], '12230038')
self.assertEqual(len(record['TranslationSet']), 0)
self.assertEqual(len(record['TranslationStack']), 2)
self.assertEqual(record['TranslationStack'][0]['Term'], 'biopython[All Fields]')
self.assertEqual(record['TranslationStack'][0]['Field'], 'All Fields')
self.assertEqual(record['TranslationStack'][0]['Count'], '5')
self.assertEqual(record['TranslationStack'][0]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][1], 'GROUP')
self.assertEqual(record['QueryTranslation'], 'biopython[All Fields]')
def test_pubmed2(self):
'''Test parsing XML returned by ESearch from PubMed (second test)
'''
# Search in PubMed for the term cancer for the entrez date from
# the last 60 days and retrieve the first 100 IDs and translations
# using the history parameter.
# To create the XML file, use
# >>> Bio.Entrez.esearch(db="pubmed", term="cancer", reldate=60,
# datetype="edat", retmax=100, usehistory="y")
input = open('Entrez/esearch2.xml')
record = Entrez.read(input)
self.assertEqual(record['Count'], "10238")
self.assertEqual(record['RetMax'], "100")
self.assertEqual(record['RetStart'], "0")
self.assertEqual(record['QueryKey'], '12')
self.assertEqual(record['WebEnv'], '0rYFb69LfbTFXfG7-0HPo2BU-ZFWF1s_51WtYR5e0fAzThQCR0WIW12inPQRRIj1xUzSfGgG9ovT9-@263F6CC86FF8F760_0173SID')
self.assertEqual(len(record['IdList']), 100)
self.assertEqual(record['IdList'][0], '18411453')
self.assertEqual(record['IdList'][1], '18411431')
self.assertEqual(record['IdList'][2], '18411430')
self.assertEqual(record['IdList'][3], '18411429')
self.assertEqual(record['IdList'][4], '18411428')
self.assertEqual(record['IdList'][5], '18411402')
self.assertEqual(record['IdList'][6], '18411381')
self.assertEqual(record['IdList'][7], '18411373')
self.assertEqual(record['IdList'][8], '18411372')
self.assertEqual(record['IdList'][9], '18411371')
self.assertEqual(record['IdList'][10], '18411370')
self.assertEqual(record['IdList'][11], '18411367')
self.assertEqual(record['IdList'][12], '18411306')
self.assertEqual(record['IdList'][13], '18411292')
self.assertEqual(record['IdList'][14], '18411277')
self.assertEqual(record['IdList'][15], '18411260')
self.assertEqual(record['IdList'][16], '18411234')
self.assertEqual(record['IdList'][17], '18411200')
self.assertEqual(record['IdList'][18], '18411199')
self.assertEqual(record['IdList'][19], '18411198')
self.assertEqual(record['IdList'][20], '18411197')
self.assertEqual(record['IdList'][21], '18411195')
self.assertEqual(record['IdList'][22], '18411194')
self.assertEqual(record['IdList'][23], '18411193')
self.assertEqual(record['IdList'][24], '18411192')
self.assertEqual(record['IdList'][25], '18411191')
self.assertEqual(record['IdList'][26], '18411052')
self.assertEqual(record['IdList'][27], '18411048')
self.assertEqual(record['IdList'][28], '18411046')
self.assertEqual(record['IdList'][29], '18411019')
self.assertEqual(record['IdList'][30], '18411018')
self.assertEqual(record['IdList'][31], '18411017')
self.assertEqual(record['IdList'][32], '18411015')
self.assertEqual(record['IdList'][33], '18411014')
self.assertEqual(record['IdList'][34], '18411011')
self.assertEqual(record['IdList'][35], '18411010')
self.assertEqual(record['IdList'][36], '18411005')
self.assertEqual(record['IdList'][37], '18411003')
self.assertEqual(record['IdList'][38], '18411001')
self.assertEqual(record['IdList'][39], '18411000')
self.assertEqual(record['IdList'][40], '18410999')
self.assertEqual(record['IdList'][41], '18410998')
self.assertEqual(record['IdList'][42], '18410997')
self.assertEqual(record['IdList'][43], '18410995')
self.assertEqual(record['IdList'][44], '18410977')
self.assertEqual(record['IdList'][45], '18410975')
self.assertEqual(record['IdList'][46], '18410966')
self.assertEqual(record['IdList'][47], '18410954')
self.assertEqual(record['IdList'][48], '18410953')
self.assertEqual(record['IdList'][49], '18410934')
self.assertEqual(record['IdList'][50], '18410925')
self.assertEqual(record['IdList'][51], '18410903')
self.assertEqual(record['IdList'][52], '18410826')
self.assertEqual(record['IdList'][53], '18410739')
self.assertEqual(record['IdList'][54], '18410720')
self.assertEqual(record['IdList'][55], '18410716')
self.assertEqual(record['IdList'][56], '18410709')
self.assertEqual(record['IdList'][57], '18410705')
self.assertEqual(record['IdList'][58], '18410692')
self.assertEqual(record['IdList'][59], '18410690')
self.assertEqual(record['IdList'][60], '18410634')
self.assertEqual(record['IdList'][61], '18410618')
self.assertEqual(record['IdList'][62], '18410610')
self.assertEqual(record['IdList'][63], '18410593')
self.assertEqual(record['IdList'][64], '18410587')
self.assertEqual(record['IdList'][65], '18410567')
self.assertEqual(record['IdList'][66], '18410539')
self.assertEqual(record['IdList'][67], '18410530')
self.assertEqual(record['IdList'][68], '18410528')
self.assertEqual(record['IdList'][69], '18410461')
self.assertEqual(record['IdList'][70], '18410455')
self.assertEqual(record['IdList'][71], '18410444')
self.assertEqual(record['IdList'][72], '18410443')
self.assertEqual(record['IdList'][73], '18410442')
self.assertEqual(record['IdList'][74], '18410441')
self.assertEqual(record['IdList'][75], '18410440')
self.assertEqual(record['IdList'][76], '18410439')
self.assertEqual(record['IdList'][77], '18410437')
self.assertEqual(record['IdList'][78], '18410436')
self.assertEqual(record['IdList'][79], '18410435')
self.assertEqual(record['IdList'][80], '18410431')
self.assertEqual(record['IdList'][81], '18410430')
self.assertEqual(record['IdList'][82], '18410428')
self.assertEqual(record['IdList'][83], '18410427')
self.assertEqual(record['IdList'][84], '18410405')
self.assertEqual(record['IdList'][85], '18410404')
self.assertEqual(record['IdList'][86], '18410355')
self.assertEqual(record['IdList'][87], '18410327')
self.assertEqual(record['IdList'][88], '18410312')
self.assertEqual(record['IdList'][89], '18410311')
self.assertEqual(record['IdList'][90], '18410307')
self.assertEqual(record['IdList'][91], '18410259')
self.assertEqual(record['IdList'][92], '18410249')
self.assertEqual(record['IdList'][93], '18410245')
self.assertEqual(record['IdList'][94], '18410243')
self.assertEqual(record['IdList'][95], '18410242')
self.assertEqual(record['IdList'][96], '18410060')
self.assertEqual(record['IdList'][97], '18410013')
self.assertEqual(record['IdList'][98], '18409992')
self.assertEqual(record['IdList'][99], '18409991')
self.assertEqual(len(record['TranslationSet']), 1)
self.assertEqual(record['TranslationSet'][0]['From'], 'cancer')
self.assertEqual(record['TranslationSet'][0]['To'], '(("neoplasms"[TIAB] NOT Medline[SB]) OR "neoplasms"[MeSH Terms] OR cancer[Text Word])')
self.assertEqual(len(record['TranslationStack']), 13)
self.assertEqual(record['TranslationStack'][0]['Term'], '"neoplasms"[TIAB]')
self.assertEqual(record['TranslationStack'][0]['Field'], 'TIAB')
self.assertEqual(record['TranslationStack'][0]['Count'], "52104")
self.assertEqual(record['TranslationStack'][0]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][1]['Term'], 'Medline[SB]')
self.assertEqual(record['TranslationStack'][1]['Field'], 'SB')
self.assertEqual(record['TranslationStack'][1]['Count'], "16509514")
self.assertEqual(record['TranslationStack'][1]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][2], 'NOT')
self.assertEqual(record['TranslationStack'][3], 'GROUP')
self.assertEqual(record['TranslationStack'][4]['Term'], '"neoplasms"[MeSH Terms]')
self.assertEqual(record['TranslationStack'][4]['Field'], 'MeSH Terms')
self.assertEqual(record['TranslationStack'][4]['Count'], "1918010")
self.assertEqual(record['TranslationStack'][4]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][5], 'OR')
self.assertEqual(record['TranslationStack'][6]['Term'], 'cancer[Text Word]')
self.assertEqual(record['TranslationStack'][6]['Field'], 'Text Word')
self.assertEqual(record['TranslationStack'][6]['Count'], "638849")
self.assertEqual(record['TranslationStack'][6]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][7], 'OR')
self.assertEqual(record['TranslationStack'][8], 'GROUP')
self.assertEqual(record['TranslationStack'][9]['Term'], '2008/02/16[EDAT]')
self.assertEqual(record['TranslationStack'][9]['Field'], 'EDAT')
self.assertEqual(record['TranslationStack'][9]['Count'], "-1")
self.assertEqual(record['TranslationStack'][9]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][10]['Term'], '2008/04/16[EDAT]')
self.assertEqual(record['TranslationStack'][10]['Field'], 'EDAT')
self.assertEqual(record['TranslationStack'][10]['Count'], "-1")
self.assertEqual(record['TranslationStack'][10]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][11], 'RANGE')
self.assertEqual(record['TranslationStack'][12], 'AND')
self.assertEqual(record['QueryTranslation'], '(("neoplasms"[TIAB] NOT Medline[SB]) OR "neoplasms"[MeSH Terms] OR cancer[Text Word]) AND 2008/02/16[EDAT] : 2008/04/16[EDAT]')
def test_pubmed3(self):
'''Test parsing XML returned by ESearch from PubMed (third test)
'''
# Search in PubMed for the journal PNAS Volume 97, and retrieve
# 6 IDs starting at ID 7.
# To create the XML file, use
# >>> Bio.Entrez.esearch(db="pubmed", term="PNAS[ta] AND 97[vi]",
# retstart=6, retmax=6)
input = open('Entrez/esearch3.xml')
record = Entrez.read(input)
self.assertEqual(record['Count'], '2652')
self.assertEqual(record['RetMax'], '6')
self.assertEqual(record['RetStart'], '6')
self.assertEqual(len(record['IdList']), 6)
self.assertEqual(record['IdList'][0], '11121077')
self.assertEqual(record['IdList'][1], '11121076')
self.assertEqual(record['IdList'][2], '11121075')
self.assertEqual(record['IdList'][3], '11121074')
self.assertEqual(record['IdList'][4], '11121073')
self.assertEqual(record['IdList'][5], '11121072')
self.assertEqual(len(record['TranslationSet']), 1)
self.assertEqual(record['TranslationSet'][0]['From'], 'PNAS[ta]')
self.assertEqual(record['TranslationSet'][0]['To'], '"Proc Natl Acad Sci U S A"[Journal:__jrid6653]')
self.assertEqual(len(record['TranslationStack']), 3)
self.assertEqual(record['TranslationStack'][0]['Term'], '"Proc Natl Acad Sci U S A"[Journal]')
self.assertEqual(record['TranslationStack'][0]['Field'], 'Journal')
self.assertEqual(record['TranslationStack'][0]['Count'], '91806')
self.assertEqual(record['TranslationStack'][0]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][1]['Term'], '97[vi]')
self.assertEqual(record['TranslationStack'][1]['Field'], 'vi')
self.assertEqual(record['TranslationStack'][1]['Count'], '58681')
self.assertEqual(record['TranslationStack'][1]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][2], 'AND')
self.assertEqual(record['QueryTranslation'], '"Proc Natl Acad Sci U S A"[Journal] AND 97[vi]')
def test_journals(self):
'''Test parsing XML returned by ESearch from the Journals database
'''
# Search in Journals for the term obstetrics.
# To create the XML file, use
# >>> Bio.Entrez.esearch(db="journals", term="obstetrics")
input = open('Entrez/esearch4.xml')
record = Entrez.read(input)
self.assertEqual(record['Count'], '177')
self.assertEqual(record['RetMax'], '20')
self.assertEqual(record['RetStart'], '0')
self.assertEqual(len(record['IdList']), 20)
self.assertEqual(record['IdList'][0], '75')
self.assertEqual(record['IdList'][1], '138')
self.assertEqual(record['IdList'][2], '136')
self.assertEqual(record['IdList'][3], '137')
self.assertEqual(record['IdList'][4], '139')
self.assertEqual(record['IdList'][5], '140')
self.assertEqual(record['IdList'][6], '355')
self.assertEqual(record['IdList'][7], '354')
self.assertEqual(record['IdList'][8], '27731')
self.assertEqual(record['IdList'][9], '439')
self.assertEqual(record['IdList'][10], '564')
self.assertEqual(record['IdList'][11], '617')
self.assertEqual(record['IdList'][12], '749')
self.assertEqual(record['IdList'][13], '735')
self.assertEqual(record['IdList'][14], '815')
self.assertEqual(record['IdList'][15], '905')
self.assertEqual(record['IdList'][16], '903')
self.assertEqual(record['IdList'][17], '932')
self.assertEqual(record['IdList'][18], '933')
self.assertEqual(record['IdList'][19], '875')
self.assertEqual(len(record['TranslationSet']), 0)
self.assertEqual(len(record['TranslationStack']), 2)
self.assertEqual(record['TranslationStack'][0]['Term'], 'obstetrics[All Fields]')
self.assertEqual(record['TranslationStack'][0]['Field'], 'All Fields')
self.assertEqual(record['TranslationStack'][0]['Count'], '177')
self.assertEqual(record['TranslationStack'][0]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][0].tag, "TermSet")
self.assertEqual(record['TranslationStack'][1], 'GROUP')
self.assertEqual(record['TranslationStack'][1].tag, "OP")
self.assertEqual(record['QueryTranslation'], 'obstetrics[All Fields]')
def test_pmc(self):
'''Test parsing XML returned by ESearch from PubMed Central
'''
# Search in PubMed Central for stem cells in free fulltext articles.
# To create the XML file, use
# >>> Bio.Entrez.esearch(db="pmc",
# term="stem cells AND free fulltext[filter]")
input = open('Entrez/esearch5.xml')
record = Entrez.read(input)
self.assertEqual(record['Count'], '23492')
self.assertEqual(record['RetMax'], '20')
self.assertEqual(record['RetStart'], '0')
self.assertEqual(len(record['IdList']), 20)
self.assertEqual(record['IdList'][0], '1894783')
self.assertEqual(record['IdList'][1], '2064507')
self.assertEqual(record['IdList'][2], '520747')
self.assertEqual(record['IdList'][3], '2043120')
self.assertEqual(record['IdList'][4], '2118723')
self.assertEqual(record['IdList'][5], '1815228')
self.assertEqual(record['IdList'][6], '1253596')
self.assertEqual(record['IdList'][7], '2077853')
self.assertEqual(record['IdList'][8], '1308908')
self.assertEqual(record['IdList'][9], '2233634')
self.assertEqual(record['IdList'][10], '556262')
self.assertEqual(record['IdList'][11], '1925137')
self.assertEqual(record['IdList'][12], '1860068')
self.assertEqual(record['IdList'][13], '1626529')
self.assertEqual(record['IdList'][14], '2217616')
self.assertEqual(record['IdList'][15], '1584276')
self.assertEqual(record['IdList'][16], '2000702')
self.assertEqual(record['IdList'][17], '186324')
self.assertEqual(record['IdList'][18], '1959362')
self.assertEqual(record['IdList'][19], '1413911')
self.assertEqual(len(record['TranslationSet']), 1)
self.assertEqual(record['TranslationSet'][0]['From'], 'stem cells')
self.assertEqual(record['TranslationSet'][0]['To'], '("stem cells"[MeSH Terms] OR stem cells[Acknowledgments] OR stem cells[Figure/Table Caption] OR stem cells[Section Title] OR stem cells[Body - All Words] OR stem cells[Title] OR stem cells[Abstract])')
self.assertEqual(len(record['TranslationStack']), 16)
self.assertEqual(record['TranslationStack'][0]['Term'], '"stem cells"[MeSH Terms]')
self.assertEqual(record['TranslationStack'][0]['Field'], 'MeSH Terms')
self.assertEqual(record['TranslationStack'][0]['Count'], '12224')
self.assertEqual(record['TranslationStack'][0]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][1]['Term'], 'stem cells[Acknowledgments]')
self.assertEqual(record['TranslationStack'][1]['Field'], 'Acknowledgments')
self.assertEqual(record['TranslationStack'][1]['Count'], '79')
self.assertEqual(record['TranslationStack'][1]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][2], 'OR')
self.assertEqual(record['TranslationStack'][3]['Term'], 'stem cells[Figure/Table Caption]')
self.assertEqual(record['TranslationStack'][3]['Field'], 'Figure/Table Caption')
self.assertEqual(record['TranslationStack'][3]['Count'], '806')
self.assertEqual(record['TranslationStack'][3]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][4], 'OR')
self.assertEqual(record['TranslationStack'][5]['Term'], 'stem cells[Section Title]')
self.assertEqual(record['TranslationStack'][5]['Field'], 'Section Title')
self.assertEqual(record['TranslationStack'][5]['Count'], '522')
self.assertEqual(record['TranslationStack'][5]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][6], 'OR')
self.assertEqual(record['TranslationStack'][7]['Term'], 'stem cells[Body - All Words]')
self.assertEqual(record['TranslationStack'][7]['Field'], 'Body - All Words')
self.assertEqual(record['TranslationStack'][7]['Count'], '13936')
self.assertEqual(record['TranslationStack'][7]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][8], 'OR')
self.assertEqual(record['TranslationStack'][9]['Term'], 'stem cells[Title]')
self.assertEqual(record['TranslationStack'][9]['Field'], 'Title')
self.assertEqual(record['TranslationStack'][9]['Count'], '1005')
self.assertEqual(record['TranslationStack'][9]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][10], 'OR')
self.assertEqual(record['TranslationStack'][11]['Term'], 'stem cells[Abstract]')
self.assertEqual(record['TranslationStack'][11]['Field'], 'Abstract')
self.assertEqual(record['TranslationStack'][11]['Count'], '2503')
self.assertEqual(record['TranslationStack'][11]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][12], 'OR')
self.assertEqual(record['TranslationStack'][13], 'GROUP')
self.assertEqual(record['TranslationStack'][14]['Term'], 'free fulltext[filter]')
self.assertEqual(record['TranslationStack'][14]['Field'], 'filter')
self.assertEqual(record['TranslationStack'][14]['Count'], '1412839')
self.assertEqual(record['TranslationStack'][14]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][15], 'AND')
self.assertEqual(record['QueryTranslation'], '("stem cells"[MeSH Terms] OR stem cells[Acknowledgments] OR stem cells[Figure/Table Caption] OR stem cells[Section Title] OR stem cells[Body - All Words] OR stem cells[Title] OR stem cells[Abstract]) AND free fulltext[filter]')
def test_nucleotide(self):
'''Test parsing XML returned by ESearch from the Nucleotide database
'''
# Search in Nucleotide for a property of the sequence,
# To create the XML file, use
# >>> Bio.Entrez.esearch(db="nucleotide", term="biomol trna[prop]")
input = open('Entrez/esearch6.xml')
record = Entrez.read(input)
self.assertEqual(record['Count'], "699")
self.assertEqual(record['RetMax'], "20")
self.assertEqual(record['RetStart'], "0")
self.assertEqual(len(record['IdList']), 20)
self.assertEqual(record['IdList'][0], '220161')
self.assertEqual(record['IdList'][1], '220160')
self.assertEqual(record['IdList'][2], '220159')
self.assertEqual(record['IdList'][3], '220263')
self.assertEqual(record['IdList'][4], '220162')
self.assertEqual(record['IdList'][5], '159885659')
self.assertEqual(record['IdList'][6], '156572228')
self.assertEqual(record['IdList'][7], '2648075')
self.assertEqual(record['IdList'][8], '287595')
self.assertEqual(record['IdList'][9], '402544')
self.assertEqual(record['IdList'][10], '402506')
self.assertEqual(record['IdList'][11], '402505')
self.assertEqual(record['IdList'][12], '287594')
self.assertEqual(record['IdList'][13], '287593')
self.assertEqual(record['IdList'][14], '287592')
self.assertEqual(record['IdList'][15], '287591')
self.assertEqual(record['IdList'][16], '287590')
self.assertEqual(record['IdList'][17], '287589')
self.assertEqual(record['IdList'][18], '287588')
self.assertEqual(record['IdList'][19], '287587')
self.assertEqual(len(record['TranslationSet']), 0)
self.assertEqual(record['QueryTranslation'], '')
def test_protein(self):
'''Test parsing XML returned by ESearch from the Protein database
'''
# Search in Protein for a molecular weight
# To create the XML file, use
# >>> Bio.Entrez.esearch(db="protein", term="200020[molecular weight]")
input = open('Entrez/esearch7.xml')
record = Entrez.read(input)
self.assertEqual(record['Count'], '3')
self.assertEqual(record['RetMax'], '3')
self.assertEqual(record['RetStart'], '0')
self.assertEqual(len(record['IdList']), 3)
self.assertEqual(record['IdList'][0], '16766766')
self.assertEqual(record['IdList'][1], '16422035')
self.assertEqual(record['IdList'][2], '4104812')
self.assertEqual(len(record['TranslationSet']), 0)
self.assertEqual(len(record['TranslationStack']), 2)
self.assertEqual(record['TranslationStack'][0]['Term'], '000200020[molecular weight]')
self.assertEqual(record['TranslationStack'][0]['Field'], 'molecular weight')
self.assertEqual(record['TranslationStack'][0]['Count'], '3')
self.assertEqual(record['TranslationStack'][0]['Explode'], 'Y')
self.assertEqual(record['TranslationStack'][1], 'GROUP')
self.assertEqual(record['QueryTranslation'], '000200020[molecular weight]')
def test_notfound(self):
'''Test parsing XML returned by ESearch when no items were found
'''
# To create the XML file, use
# >>> Bio.Entrez.esearch(db="protein", term="abcXYZ")
input = open('Entrez/esearch8.xml')
record = Entrez.read(input)
self.assertEqual(record['Count'], "0")
self.assertEqual(record['RetMax'], "0")
self.assertEqual(record['RetStart'], "0")
self.assertEqual(len(record['IdList']), 0)
self.assertEqual(len(record['TranslationSet']), 0)
self.assertEqual(record['QueryTranslation'], '')
self.assertEqual(len(record['ErrorList']), 2)
self.assert_("PhraseNotFound" in record['ErrorList'])
self.assert_("FieldNotFound" in record['ErrorList'])
self.assertEqual(len(record['ErrorList']["PhraseNotFound"]), 1)
self.assertEqual(len(record['ErrorList']["FieldNotFound"]), 0)
self.assertEqual(record['ErrorList']["PhraseNotFound"][0], "abcXYZ")
self.assertEqual(len(record['WarningList']), 3)
self.assert_("PhraseIgnored" in record['WarningList'])
self.assert_("QuotedPhraseNotFound" in record['WarningList'])
self.assert_("OutputMessage" in record['WarningList'])
self.assertEqual(len(record['WarningList']["PhraseIgnored"]), 0)
self.assertEqual(len(record['WarningList']["QuotedPhraseNotFound"]), 0)
self.assertEqual(len(record['WarningList']["OutputMessage"]), 1)
self.assertEqual(record['WarningList']["OutputMessage"][0], "No items found.")
class EPostTest(unittest.TestCase):
'''Tests for parsing XML output returned by EPost
'''
# Don't know how to get an InvalidIdList in the XML returned by EPost;
# unable to test if we are parsing it correctly.
def test_epost(self):
'''Test parsing XML returned by EPost
'''
# To create the XML file, use
# >>> Bio.Entrez.epost(db="pubmed", id="11237011")
input = open('Entrez/epost1.xml')
record = Entrez.read(input)
self.assertEqual(record["QueryKey"], '1')
self.assertEqual(record["WebEnv"], '0zYsuLk3zG_lRMkblPBEqnT8nIENUGw4HAy8xXChTnoVm7GEnWY71jv3nz@1FC077F3806DE010_0042SID')
def test_wrong(self):
'''Test parsing XML returned by EPost with incorrect arguments
'''
# To create the XML file, use
# >>> Bio.Entrez.epost(db="nothing")
input = open('Entrez/epost2.xml')
self.assertRaises(RuntimeError, Entrez.read, input)
def test_invalid(self):
'''Test parsing XML returned by EPost with an invalid id (overflow tag)
'''
# To create the XML file, use
# >>> Bio.Entrez.epost(db="pubmed", id=99999999999999999999999999999999)
input = open('Entrez/epost3.xml')
record = Entrez.read(input)
self.assertEqual(record["InvalidIdList"], ["-1"])
self.assertEqual(record["QueryKey"], "1")
self.assertEqual(record["WebEnv"], "08AIUeBsfIk6BfdzKnd3GM2RtCudczC9jm5aeb4US0o7azCTQCeCsr-xg0@1EDE54E680D03C40_0011SID")
class ESummaryTest(unittest.TestCase):
'''Tests for parsing XML output returned by ESummary
'''
# Items have a type, which can be
# (Integer|Date|String|Structure|List|Flags|Qualifier|Enumerator|Unknown)
# I don't have an XML file where the type "Flags", "Qualifier",
# "Enumerator", or "Unknown" is used, so they are not tested here.
def test_pubmed(self):
'''Test parsing XML returned by ESummary from PubMed
'''
# In PubMed display records for PMIDs 11850928 and 11482001 in
# xml retrieval mode
# To create the XML file, use
# >>> Bio.Entrez.esummary(db="pubmed", id=["11850928","11482001"],
# retmode="xml")
input = open('Entrez/esummary1.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["Id"], "11850928")
self.assertEqual(record[0]["PubDate"], "1965 Aug")
self.assertEqual(record[0]["EPubDate"], "")
self.assertEqual(record[0]["Source"], "Arch Dermatol")
self.assertEqual(len(record[0]["AuthorList"]), 2)
self.assertEqual(record[0]["AuthorList"][0], "LoPresti PJ")
self.assertEqual(record[0]["AuthorList"][1], "Hambrick GW Jr")
self.assertEqual(record[0]["LastAuthor"], "Hambrick GW Jr")
self.assertEqual(record[0]["Title"], "Zirconium granuloma following treatment of rhus dermatitis.")
self.assertEqual(record[0]["Volume"], "92")
self.assertEqual(record[0]["Issue"], "2")
self.assertEqual(record[0]["Pages"], "188-91")
self.assertEqual(record[0]["LangList"], ["English"])
self.assertEqual(record[0]["NlmUniqueID"], "0372433")
self.assertEqual(record[0]["ISSN"], "0003-987X")
self.assertEqual(record[0]["ESSN"], "1538-3652")
self.assertEqual(len(record[0]["PubTypeList"]), 1)
self.assertEqual(record[0]["PubTypeList"][0], "Journal Article")
self.assertEqual(record[0]["RecordStatus"], "PubMed - indexed for MEDLINE")
self.assertEqual(record[0]["PubStatus"], "ppublish")
self.assertEqual(len(record[0]["ArticleIds"]), 2)
self.assertEqual(record[0]["ArticleIds"]["pubmed"], ["11850928"])
self.assertEqual(record[0]["ArticleIds"]["medline"], [])
self.assertEqual(len(record[0]["History"]), 2)
self.assertEqual(record[0]["History"]["pubmed"], ["1965/08/01 00:00"])
self.assertEqual(record[0]["History"]["medline"], ["2002/03/09 10:01"])
self.assertEqual(len(record[0]["References"]), 0)
self.assertEqual(record[0]["HasAbstract"], 1)
self.assertEqual(record[0]["PmcRefCount"], 0)
self.assertEqual(record[0]["FullJournalName"], "Archives of dermatology")
self.assertEqual(record[0]["ELocationID"], "")
self.assertEqual(record[0]["SO"], "1965 Aug;92(2):188-91")
self.assertEqual(record[1]["Id"], "11482001")
self.assertEqual(record[1]["PubDate"], "2001 Jun")
self.assertEqual(record[1]["EPubDate"], "")
self.assertEqual(record[1]["Source"], "Adverse Drug React Toxicol Rev")
self.assertEqual(len(record[1]["AuthorList"]), 3)
self.assertEqual(record[1]["AuthorList"][0], "Mantle D")
self.assertEqual(record[1]["AuthorList"][1], "Gok MA")
self.assertEqual(record[1]["AuthorList"][2], "Lennard TW")
self.assertEqual(record[1]["LastAuthor"], "Lennard TW")
self.assertEqual(record[1]["Title"], "Adverse and beneficial effects of plant extracts on skin and skin disorders.")
self.assertEqual(record[1]["Volume"], "20")
self.assertEqual(record[1]["Issue"], "2")
self.assertEqual(record[1]["Pages"], "89-103")
self.assertEqual(len(record[1]["LangList"]), 1)
self.assertEqual(record[1]["LangList"][0], "English")
self.assertEqual(record[1]["NlmUniqueID"], "9109474")
self.assertEqual(record[1]["ISSN"], "0964-198X")
self.assertEqual(record[1]["ESSN"], "")
self.assertEqual(len(record[1]["PubTypeList"]), 2)
self.assertEqual(record[1]["PubTypeList"][0], "Journal Article")
self.assertEqual(record[1]["PubTypeList"][1], "Review")
self.assertEqual(record[1]["RecordStatus"], "PubMed - indexed for MEDLINE")
self.assertEqual(record[1]["PubStatus"], "ppublish")
self.assertEqual(len(record[1]["ArticleIds"]), 2)
self.assertEqual(record[1]["ArticleIds"]["pubmed"], ["11482001"])
self.assertEqual(record[1]["ArticleIds"]["medline"], [])
self.assertEqual(len(record[1]["History"]), 2)
self.assertEqual(record[1]["History"]["pubmed"], ["2001/08/03 10:00"])
self.assertEqual(record[1]["History"]["medline"], ["2002/01/23 10:01"])
self.assertEqual(len(record[1]["References"]), 0)
self.assertEqual(record[1]["HasAbstract"], 1)
self.assertEqual(record[1]["PmcRefCount"], 0)
self.assertEqual(record[1]["FullJournalName"], "Adverse drug reactions and toxicological reviews")
self.assertEqual(record[1]["ELocationID"], "")
self.assertEqual(record[1]["SO"], "2001 Jun;20(2):89-103")
def test_journals(self):
'''Test parsing XML returned by ESummary from the Journals database
'''
# In Journals display records for journal IDs 27731,439,735,905
# To create the XML file, use
# >>> Bio.Entrez.esummary(db="journals", id="27731,439,735,905")
input = open('Entrez/esummary2.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["Id"], "27731")
self.assertEqual(record[0]["Title"], "The American journal of obstetrics and diseases of women and children")
self.assertEqual(record[0]["MedAbbr"], "Am J Obstet Dis Women Child")
self.assertEqual(record[0]["IsoAbbr"], "")
self.assertEqual(record[0]["NlmId"], "14820330R")
self.assertEqual(record[0]["pISSN"], "0894-5543")
self.assertEqual(record[0]["eISSN"], "")
self.assertEqual(record[0]["PublicationStartYear"], "1868")
self.assertEqual(record[0]["PublicationEndYear"], "1919")
self.assertEqual(record[0]["Publisher"], "W.A. Townsend & Adams, $c [1868-1919]")
self.assertEqual(record[0]["Language"], "eng")
self.assertEqual(record[0]["Country"], "United States")
self.assertEqual(len(record[0]["BroadHeading"]), 0)
self.assertEqual(record[0]["ContinuationNotes"], "")
self.assertEqual(record[1]["Id"], "439")
self.assertEqual(record[1]["Title"], "American journal of obstetrics and gynecology")
self.assertEqual(record[1]["MedAbbr"], "Am J Obstet Gynecol")
self.assertEqual(record[1]["IsoAbbr"], "Am. J. Obstet. Gynecol.")
self.assertEqual(record[1]["NlmId"], "0370476")
self.assertEqual(record[1]["pISSN"], "0002-9378")
self.assertEqual(record[1]["eISSN"], "1097-6868")
self.assertEqual(record[1]["PublicationStartYear"], "1920")
self.assertEqual(record[1]["PublicationEndYear"], "")
self.assertEqual(record[1]["Publisher"], "Elsevier,")
self.assertEqual(record[1]["Language"], "eng")
self.assertEqual(record[1]["Country"], "United States")
self.assertEqual(len(record[1]["BroadHeading"]), 2)
self.assertEqual(record[1]["BroadHeading"][0], "Gynecology")
self.assertEqual(record[1]["BroadHeading"][1], "Obstetrics")
self.assertEqual(record[1]["ContinuationNotes"], "Continues: American journal of obstetrics and diseases of women and children. ")
self.assertEqual(record[2]["Id"], "735")
self.assertEqual(record[2]["Title"], "Archives of gynecology and obstetrics")
self.assertEqual(record[2]["MedAbbr"], "Arch Gynecol Obstet")
self.assertEqual(record[2]["IsoAbbr"], "Arch. Gynecol. Obstet.")
self.assertEqual(record[2]["NlmId"], "8710213")
self.assertEqual(record[2]["pISSN"], "0932-0067")
self.assertEqual(record[2]["eISSN"], "1432-0711")
self.assertEqual(record[2]["PublicationStartYear"], "1987")
self.assertEqual(record[2]["PublicationEndYear"], "")
self.assertEqual(record[2]["Publisher"], "Springer Verlag")
self.assertEqual(record[2]["Language"], "eng")
self.assertEqual(record[2]["Country"], "Germany")
self.assertEqual(len(record[2]["BroadHeading"]), 2)
self.assertEqual(record[2]["BroadHeading"][0], "Gynecology")
self.assertEqual(record[2]["BroadHeading"][1], "Obstetrics")
self.assertEqual(record[2]["ContinuationNotes"], "Continues: Archives of gynecology. ")
self.assertEqual(record[3]["Id"], "905")
self.assertEqual(record[3]["Title"], "Asia-Oceania journal of obstetrics and gynaecology / AOFOG")
self.assertEqual(record[3]["MedAbbr"], "Asia Oceania J Obstet Gynaecol")
self.assertEqual(record[3]["IsoAbbr"], "")
self.assertEqual(record[3]["NlmId"], "8102781")
self.assertEqual(record[3]["pISSN"], "0389-2328")
self.assertEqual(record[3]["eISSN"], "")
self.assertEqual(record[3]["PublicationStartYear"], "1980")
self.assertEqual(record[3]["PublicationEndYear"], "1994")
self.assertEqual(record[3]["Publisher"], "University Of Tokyo Press")
self.assertEqual(record[3]["Language"], "eng")
self.assertEqual(record[3]["Country"], "Japan")
self.assertEqual(len(record[3]["BroadHeading"]), 2)
self.assertEqual(record[3]["BroadHeading"][0], "Gynecology")
self.assertEqual(record[3]["BroadHeading"][1], "Obstetrics")
self.assertEqual(record[3]["ContinuationNotes"], "Continues: Journal of the Asian Federation of Obstetrics and Gynaecology. Continued by: Journal of obstetrics and gynaecology (Tokyo, Japan). ")
def test_protein(self):
'''Test parsing XML returned by ESummary from the Protein database
'''
# In Protein display records for GIs 28800982 and 28628843 in xml retrieval mode
# To create the XML file, use
# >>> Bio.Entrez.esummary(db="protein", id="28800982,28628843", retmode="xml")
input = open('Entrez/esummary3.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["Id"], "28800982")
self.assertEqual(record[0]["Caption"], "AAO47091")
self.assertEqual(record[0]["Title"], "hemochromatosis [Homo sapiens]")
self.assertEqual(record[0]["Extra"], "gi|28800982|gb|AAO47091.1|[28800982]")
self.assertEqual(record[0]["Gi"], 28800982)
self.assertEqual(record[0]["CreateDate"], "2003/03/03")
self.assertEqual(record[0]["UpdateDate"], "2003/03/03")
self.assertEqual(record[0]["Flags"], 0)
self.assertEqual(record[0]["TaxId"], 9606)
self.assertEqual(record[0]["Length"], 268)
self.assertEqual(record[0]["Status"], "live")
self.assertEqual(record[0]["ReplacedBy"], "")
self.assertEqual(record[0]["Comment"], " ")
self.assertEqual(record[1]["Id"], "28628843")
self.assertEqual(record[1]["Caption"], "AAO49381")
self.assertEqual(record[1]["Title"], "erythroid associated factor [Homo sapiens]")
self.assertEqual(record[1]["Extra"], "gi|28628843|gb|AAO49381.1|AF485325_1[28628843]")
self.assertEqual(record[1]["Gi"], 28628843)
self.assertEqual(record[1]["CreateDate"], "2003/03/02")
self.assertEqual(record[1]["UpdateDate"], "2003/03/02")
self.assertEqual(record[1]["Flags"], 0)
self.assertEqual(record[1]["TaxId"], 9606)
self.assertEqual(record[1]["Length"], 102)
self.assertEqual(record[1]["Status"], "live")
self.assertEqual(record[1]["ReplacedBy"], "")
self.assertEqual(record[1]["Comment"], " ")
def test_nucleotide(self):
'''Test parsing XML returned by ESummary from the Nucleotide database
'''
# In Nucleotide display records for GIs 28864546 and 28800981
# in xml retrieval mode
# To create the XML file, use
# >>> Bio.Entrez.esummary(db="nucleotide", id="28864546,28800981",
# retmode="xml")
input = open('Entrez/esummary4.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["Id"], "28864546")
self.assertEqual(record[0]["Caption"], "AY207443")
self.assertEqual(record[0]["Title"], "Homo sapiens alpha hemoglobin (HBZP) pseudogene 3' UTR/AluJo repeat breakpoint junction")
self.assertEqual(record[0]["Extra"], "gi|28864546|gb|AY207443.1|[28864546]")
self.assertEqual(record[0]["Gi"], 28864546)
self.assertEqual(record[0]["CreateDate"], "2003/03/05")
self.assertEqual(record[0]["UpdateDate"], "2003/03/05")
self.assertEqual(record[0]["Flags"], 0)
self.assertEqual(record[0]["TaxId"], 9606)
self.assertEqual(record[0]["Length"], 491)
self.assertEqual(record[0]["Status"], "live")
self.assertEqual(record[0]["ReplacedBy"], "")
self.assertEqual(record[0]["Comment"], " ")
self.assertEqual(record[1]["Id"], "28800981")
self.assertEqual(record[1]["Caption"], "AY205604")
self.assertEqual(record[1]["Title"], "Homo sapiens hemochromatosis (HFE) mRNA, partial cds")
self.assertEqual(record[1]["Extra"], "gi|28800981|gb|AY205604.1|[28800981]")
self.assertEqual(record[1]["Gi"], 28800981)
self.assertEqual(record[1]["CreateDate"], "2003/03/03")
self.assertEqual(record[1]["UpdateDate"], "2003/03/03")
self.assertEqual(record[1]["Flags"], 0)
self.assertEqual(record[1]["TaxId"], 9606)
self.assertEqual(record[1]["Length"], 860)
self.assertEqual(record[1]["Status"], "live")
self.assertEqual(record[1]["ReplacedBy"], "")
self.assertEqual(record[1]["Comment"], " ")
def test_structure(self):
'''Test parsing XML returned by ESummary from the Structure database
'''
# In Nucleotide display records for GIs 28864546 and 28800981
# in xml retrieval mode
# To create the XML file, use
# >>> Bio.Entrez.esummary(db="structure", id=["19923","12120"],
# retmode="xml")
input = open('Entrez/esummary5.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["Id"], "19923")
self.assertEqual(record[0]["PdbAcc"], "1L5J")
self.assertEqual(record[0]["PdbDescr"], "Crystal Structure Of E. Coli Aconitase B")
self.assertEqual(record[0]["EC"], "4.2.1.3")
self.assertEqual(record[0]["Resolution"], "2.4")
self.assertEqual(record[0]["ExpMethod"], "X-Ray Diffraction")
self.assertEqual(record[0]["PdbClass"], "Lyase")
self.assertEqual(record[0]["PdbReleaseDate"], "2007/8/27")
self.assertEqual(record[0]["PdbDepositDate"], "2002/3/7")
self.assertEqual(record[0]["DepositDate"], "2007/10/25")
self.assertEqual(record[0]["ModifyDate"], "2007/10/25")
self.assertEqual(record[0]["LigCode"], "F3S|TRA")
self.assertEqual(record[0]["LigCount"], "2")
self.assertEqual(record[0]["ModProteinResCount"], "0")
self.assertEqual(record[0]["ModDNAResCount"], "0")
self.assertEqual(record[0]["ModRNAResCount"], "0")
self.assertEqual(record[0]["ProteinChainCount"], "2")
self.assertEqual(record[0]["DNAChainCount"], "0")
self.assertEqual(record[0]["RNAChainCount"], "0")
self.assertEqual(record[1]["Id"], "12120")
self.assertEqual(record[1]["PdbAcc"], "1B0K")
self.assertEqual(record[1]["PdbDescr"], "S642a:fluorocitrate Complex Of Aconitase")
self.assertEqual(record[1]["EC"], "4.2.1.3")
self.assertEqual(record[1]["Resolution"], "2.5")
self.assertEqual(record[1]["ExpMethod"], "X-Ray Diffraction")
self.assertEqual(record[1]["PdbClass"], "Lyase")
self.assertEqual(record[1]["PdbReleaseDate"], "2007/8/27")
self.assertEqual(record[1]["PdbDepositDate"], "1998/11/11")
self.assertEqual(record[1]["DepositDate"], "2007/10/07")
self.assertEqual(record[1]["ModifyDate"], "2007/10/07")
self.assertEqual(record[1]["LigCode"], "FLC|O|SF4")
self.assertEqual(record[1]["LigCount"], "3")
self.assertEqual(record[1]["ModProteinResCount"], "0")
self.assertEqual(record[1]["ModDNAResCount"], "0")
self.assertEqual(record[1]["ModRNAResCount"], "0")
self.assertEqual(record[1]["ProteinChainCount"], "1")
self.assertEqual(record[1]["DNAChainCount"], "0")
self.assertEqual(record[1]["RNAChainCount"], "0")
def test_taxonomy(self):
'''Test parsing XML returned by ESummary from the Taxonomy database
'''
# In Taxonomy display records for TAXIDs 9913 and 30521 in
# xml retrieval mode
# To create the XML file, use
# >>> Bio.Entrez.esummary(db="taxonomy", id=["9913","30521"],
# retmode="xml")
input = open('Entrez/esummary6.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["Id"], "9913")
self.assertEqual(record[0]["Rank"], "species")
self.assertEqual(record[0]["Division"], "even-toed ungulates")
self.assertEqual(record[0]["ScientificName"], "Bos taurus")
self.assertEqual(record[0]["CommonName"], "cattle")
self.assertEqual(record[0]["TaxId"], 9913)
self.assertEqual(record[0]["NucNumber"], 2264214)
self.assertEqual(record[0]["ProtNumber"], 55850)
self.assertEqual(record[0]["StructNumber"], 1517)
self.assertEqual(record[0]["GenNumber"], 31)
self.assertEqual(record[0]["GeneNumber"], 29651)
self.assertEqual(record[0]["Genus"], "")
self.assertEqual(record[0]["Species"], "")
self.assertEqual(record[0]["Subsp"], "")
self.assertEqual(record[1]["Id"], "30521")
self.assertEqual(record[1]["Rank"], "species")
self.assertEqual(record[1]["Division"], "even-toed ungulates")
self.assertEqual(record[1]["ScientificName"], "Bos grunniens")
self.assertEqual(record[1]["CommonName"], "domestic yak")
self.assertEqual(record[1]["TaxId"], 30521)
self.assertEqual(record[1]["NucNumber"], 560)
self.assertEqual(record[1]["ProtNumber"], 254)
self.assertEqual(record[1]["StructNumber"], 0)
self.assertEqual(record[1]["GenNumber"], 1)
self.assertEqual(record[1]["GeneNumber"], 13)
self.assertEqual(record[1]["Genus"], "")
self.assertEqual(record[1]["Species"], "")
self.assertEqual(record[1]["Subsp"], "")
def test_unists(self):
'''Test parsing XML returned by ESummary from the UniSTS database
'''
# In UniSTS display records for IDs 254085 and 254086 in xml
# retrieval mode
# To create the XML file, use
# >>> Bio.Entrez.esummary(db="unists", id=["254085","254086"],
# retmode="xml")
input = open('Entrez/esummary7.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["Id"], "254085")
self.assertEqual(record[0]["Marker_Name"], "SE234324")
self.assertEqual(len(record[0]["Map_Gene_Summary_List"]), 1)
self.assertEqual(record[0]["Map_Gene_Summary_List"][0]["Org"], "Sus scrofa")
self.assertEqual(record[0]["Map_Gene_Summary_List"][0]["Chr"], " chromosome 7")
self.assertEqual(record[0]["Map_Gene_Summary_List"][0]["Locus"], "")
self.assertEqual(record[0]["EPCR_Summary"], "Found by e-PCR in sequences from Sus scrofa.")
self.assertEqual(record[0]["LocusId"], "")
self.assertEqual(record[1]["Id"], "254086")
self.assertEqual(record[1]["Marker_Name"], "SE259162")
self.assertEqual(len(record[1]["Map_Gene_Summary_List"]), 1)
self.assertEqual(record[1]["Map_Gene_Summary_List"][0]["Org"], "Sus scrofa")
self.assertEqual(record[1]["Map_Gene_Summary_List"][0]["Chr"], " chromosome 12")
self.assertEqual(record[1]["Map_Gene_Summary_List"][0]["Locus"], "")
self.assertEqual(record[1]["EPCR_Summary"], "Found by e-PCR in sequences from Sus scrofa.")
self.assertEqual(record[1]["LocusId"], "")
def test_wrong(self):
'''Test parsing XML returned by ESummary with incorrect arguments
'''
# To create the XML file, use
# >>> Bio.Entrez.esummary()
input = open('Entrez/esummary8.xml')
self.assertRaises(RuntimeError, Entrez.read, input)
class ELinkTest(unittest.TestCase):
'''Tests for parsing XML output returned by ELink
'''
def test_pubmed1(self):
'''Test parsing pubmed links returned by ELink (first test)
'''
# Retrieve IDs from PubMed for PMID 9298984 to the PubMed database
# To create the XML file, use
# >>> Bio.Entrez.elink(dbfrom="pubmed", id="9298984", cmd="neighbor")
input = open('Entrez/elink1.xml')
record = Entrez.read(input)
self.assertEqual(len(record), 1)
self.assertEqual(record[0]["DbFrom"], "pubmed")
self.assertEqual(record[0]["IdList"], ["9298984"])
self.assertEqual(record[0]["LinkSetDb"][0]["DbTo"], "pubmed")
self.assertEqual(record[0]["LinkSetDb"][0]["LinkName"], "pubmed_pubmed")
self.assertEqual(len(record[0]["LinkSetDb"][0]["Link"]), 144)
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][0]["Id"], "9298984")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][0]["Score"], "2147483647")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][1]["Id"], "8794856")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][1]["Score"], "65259341")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][2]["Id"], "9700164")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][2]["Score"], "60347327")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][3]["Id"], "7914521")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][3]["Score"], "54343405")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][4]["Id"], "1339459")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][4]["Score"], "53014422")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][5]["Id"], "9914369")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][5]["Score"], "52741538")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][6]["Id"], "11590237")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][6]["Score"], "52493903")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][7]["Id"], "12686595")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][7]["Score"], "48734007")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][8]["Id"], "9074495")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][8]["Score"], "48220447")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][9]["Id"], "11146659")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][9]["Score"], "46604626")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][10]["Id"], "10893249")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][10]["Score"], "46254167")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][11]["Id"], "8978614")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][11]["Score"], "46166362")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][12]["Id"], "15371539")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][12]["Score"], "45060488")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][13]["Id"], "10806105")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][13]["Score"], "44825774")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][14]["Id"], "10402457")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][14]["Score"], "44338522")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][15]["Id"], "10545493")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][15]["Score"], "43860609")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][16]["Id"], "10523511")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][16]["Score"], "43268800")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][17]["Id"], "12515822")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][17]["Score"], "43215343")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][18]["Id"], "15915585")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][18]["Score"], "43029760")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][19]["Id"], "11483958")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][19]["Score"], "42348877")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][20]["Id"], "11685532")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][20]["Score"], "42262104")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][21]["Id"], "9869638")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][21]["Score"], "41143593")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][22]["Id"], "12080088")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][22]["Score"], "40849490")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][23]["Id"], "12034769")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][23]["Score"], "40841328")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][24]["Id"], "9852156")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][24]["Score"], "40793501")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][25]["Id"], "9735366")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][25]["Score"], "40661605")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][26]["Id"], "10749938")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][26]["Score"], "40486739")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][27]["Id"], "9490715")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][27]["Score"], "40311339")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][28]["Id"], "9425896")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][28]["Score"], "40056298")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][29]["Id"], "11266459")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][29]["Score"], "39883140")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][30]["Id"], "14522947")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][30]["Score"], "39683976")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][31]["Id"], "15616189")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][31]["Score"], "39518630")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][32]["Id"], "16732327")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][32]["Score"], "39425668")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][33]["Id"], "11179694")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][33]["Score"], "39183565")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][34]["Id"], "10898791")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][34]["Score"], "39159761")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][35]["Id"], "11146661")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][35]["Score"], "39116609")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][36]["Id"], "11914278")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][36]["Score"], "39028004")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][37]["Id"], "10985388")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][37]["Score"], "39002572")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][38]["Id"], "16839185")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][38]["Score"], "38916726")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][39]["Id"], "7585942")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][39]["Score"], "38747288")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][40]["Id"], "2022189")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][40]["Score"], "38717145")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][41]["Id"], "7690762")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][41]["Score"], "38647275")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][42]["Id"], "7904902")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][42]["Score"], "38557343")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][43]["Id"], "9378750")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][43]["Score"], "38484849")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][44]["Id"], "12388768")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][44]["Score"], "38454422")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][45]["Id"], "11352945")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][45]["Score"], "38449836")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][46]["Id"], "11267866")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][46]["Score"], "38419058")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][47]["Id"], "17222555")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][47]["Score"], "38368546")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][48]["Id"], "11252055")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][48]["Score"], "38257516")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][49]["Id"], "16585270")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][49]["Score"], "37800856")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][50]["Id"], "9606208")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][50]["Score"], "37669054")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][51]["Id"], "17182852")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][51]["Score"], "37621285")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][52]["Id"], "9933569")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][52]["Score"], "37398470")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][53]["Id"], "15268859")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][53]["Score"], "37340582")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][54]["Id"], "12235289")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][54]["Score"], "37247450")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][55]["Id"], "16741559")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][55]["Score"], "37198716")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][56]["Id"], "11266451")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][56]["Score"], "37142542")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][57]["Id"], "15075237")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][57]["Score"], "36897578")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][58]["Id"], "15485811")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][58]["Score"], "36804297")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][59]["Id"], "14699129")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][59]["Score"], "36782062")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][60]["Id"], "16510521")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][60]["Score"], "36724370")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][61]["Id"], "15824131")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][61]["Score"], "36695341")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][62]["Id"], "15371340")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][62]["Score"], "36685694")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][63]["Id"], "9878245")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][63]["Score"], "36684230")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][64]["Id"], "10398680")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][64]["Score"], "36573411")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][65]["Id"], "16516834")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][65]["Score"], "36525654")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][66]["Id"], "11715021")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][66]["Score"], "36518129")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][67]["Id"], "14622138")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][67]["Score"], "36496009")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][68]["Id"], "11092768")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][68]["Score"], "36457186")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][69]["Id"], "12514103")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][69]["Score"], "36385909")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][70]["Id"], "17525528")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][70]["Score"], "36316439")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][71]["Id"], "11402064")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][71]["Score"], "36172957")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][72]["Id"], "9258677")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][72]["Score"], "35989143")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][73]["Id"], "14499625")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][73]["Score"], "35978627")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][74]["Id"], "10428958")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][74]["Score"], "35924800")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][75]["Id"], "14972679")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][75]["Score"], "35915578")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][76]["Id"], "9396743")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][76]["Score"], "35883749")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][77]["Id"], "16219694")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][77]["Score"], "35870689")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][78]["Id"], "11369198")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][78]["Score"], "35838048")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][79]["Id"], "17333235")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][79]["Score"], "35815282")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][80]["Id"], "11102811")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][80]["Score"], "35783566")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][81]["Id"], "10207147")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][81]["Score"], "35594009")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][82]["Id"], "10477755")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][82]["Score"], "35589601")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][83]["Id"], "10747094")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][83]["Score"], "35548072")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][84]["Id"], "15215209")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][84]["Score"], "35526869")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][85]["Id"], "11157774")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][85]["Score"], "35510607")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][86]["Id"], "10669599")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][86]["Score"], "35462246")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][87]["Id"], "17448445")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][87]["Score"], "35398470")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][88]["Id"], "17878237")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][88]["Score"], "35231311")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][89]["Id"], "10411903")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][89]["Score"], "35202708")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][90]["Id"], "12773390")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][90]["Score"], "35171743")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][91]["Id"], "12498686")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][91]["Score"], "35131906")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][92]["Id"], "9009204")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][92]["Score"], "34993776")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][93]["Id"], "17576797")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][93]["Score"], "34988639")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][94]["Id"], "10225945")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][94]["Score"], "34950419")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][95]["Id"], "11161560")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][95]["Score"], "34912466")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][96]["Id"], "11967147")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][96]["Score"], "34900540")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][97]["Id"], "14711415")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][97]["Score"], "34883714")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][98]["Id"], "2211824")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][98]["Score"], "34843507")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][99]["Id"], "15737064")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][99]["Score"], "34828187")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][100]["Id"], "7720068")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][100]["Score"], "34811182")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][101]["Id"], "9472001")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][101]["Score"], "34799321")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][102]["Id"], "11792803")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][102]["Score"], "34697393")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][103]["Id"], "11386760")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][103]["Score"], "34684610")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][104]["Id"], "15094189")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][104]["Score"], "34684021")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][105]["Id"], "9763420")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][105]["Score"], "34666950")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][106]["Id"], "10629219")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][106]["Score"], "34422925")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][107]["Id"], "11238410")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][107]["Score"], "34318521")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][108]["Id"], "17199038")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][108]["Score"], "34255594")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][109]["Id"], "12944469")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][109]["Score"], "34249507")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][110]["Id"], "15616192")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][110]["Score"], "34110517")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][111]["Id"], "11146660")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][111]["Score"], "34063257")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][112]["Id"], "11402066")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][112]["Score"], "34012520")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][113]["Id"], "6791901")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][113]["Score"], "33311119")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][114]["Id"], "7172865")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][114]["Score"], "32934223")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][115]["Id"], "8270646")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][115]["Score"], "32898701")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][116]["Id"], "1916263")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][116]["Score"], "32707765")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][117]["Id"], "7588080")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][117]["Score"], "32503526")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][118]["Id"], "7391142")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][118]["Score"], "31632645")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][119]["Id"], "6793236")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][119]["Score"], "31522175")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][120]["Id"], "2512302")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][120]["Score"], "30339372")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][121]["Id"], "7720069")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][121]["Score"], "30024525")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][122]["Id"], "8257792")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][122]["Score"], "29834355")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][123]["Id"], "3417141")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][123]["Score"], "27920818")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][124]["Id"], "3315496")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][124]["Score"], "27422009")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][125]["Id"], "1993311")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][125]["Score"], "26763828")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][126]["Id"], "6185450")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][126]["Score"], "26100420")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][127]["Id"], "1819515")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][127]["Score"], "26036804")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][128]["Id"], "7250964")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][128]["Score"], "25738652")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][129]["Id"], "8489280")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][129]["Score"], "25587858")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][130]["Id"], "7096444")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][130]["Score"], "24642544")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][131]["Id"], "348629")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][131]["Score"], "24432498")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][132]["Id"], "2275018")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][132]["Score"], "23077593")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][133]["Id"], "1747872")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][133]["Score"], "22933494")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][134]["Id"], "3547036")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][134]["Score"], "22925639")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][135]["Id"], "18291669")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][135]["Score"], "22762310")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][136]["Id"], "1576878")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][136]["Score"], "20846041")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][137]["Id"], "6230555")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][137]["Score"], "19354488")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][138]["Id"], "7627547")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][138]["Score"], "18940607")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][139]["Id"], "17678444")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][139]["Score"], "18834135")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][140]["Id"], "3366468")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][140]["Score"], "14831756")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][141]["Id"], "1959920")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][141]["Score"], "14156414")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][142]["Id"], "13242628")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][142]["Score"], "12584732")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][143]["Id"], "17248312")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][143]["Score"], "7610436")
def test_nucleotide(self):
'''Test parsing Nucleotide to Protein links returned by ELink
'''
# Retrieve IDs from Nucleotide for GI 48819, 7140345 to Protein
# To create the XML file, use
# >>> Bio.Entrez.elink(dbfrom="nucleotide", db="protein",
# id="48819,7140345")
input = open('Entrez/elink2.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["DbFrom"], "nucleotide")
self.assertEqual(record[0]["IdList"], ["48819", "7140345"])
def test_pubmed2(self):
'''Test parsing pubmed links returned by ELink (second test)
'''
# Retrieve PubMed related articles for PMIDs 11812492 11774222
# with a publication date from 1995 to the present
# To create the XML file, use
# >>> Bio.Entrez.elink(dbfrom="pubmed", id="11812492,11774222",
# db="pubmed", mindate="1995", datetype="pdat")
input = open('Entrez/elink3.xml')
record = Entrez.read(input)
self.assertEqual(len(record), 1)
self.assertEqual(record[0]["DbFrom"], "pubmed")
self.assertEqual(len(record[0]['IdList']), 2)
self.assertEqual(record[0]['IdList'][0], "11812492")
self.assertEqual(record[0]['IdList'][1], "11774222")
self.assertEqual(record[0]["LinkSetDb"][0]["DbTo"], "pubmed")
self.assertEqual(record[0]["LinkSetDb"][0]["LinkName"], "pubmed_pubmed")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][0]["Id"], "11812492")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][0]["Score"], "2147483647")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][1]["Id"], "11774222")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][1]["Score"], "2147483647")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][2]["Id"], "11668631")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][2]["Score"], "86345306")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][3]["Id"], "15111095")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][3]["Score"], "81604359")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][4]["Id"], "10731564")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][4]["Score"], "65665112")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][5]["Id"], "15780005")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][5]["Score"], "62251079")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][6]["Id"], "17885136")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][6]["Score"], "50322134")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][7]["Id"], "17470297")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][7]["Score"], "49148434")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][8]["Id"], "16005284")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][8]["Score"], "49035508")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][9]["Id"], "10856373")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][9]["Score"], "48363137")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][10]["Id"], "15383292")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][10]["Score"], "48347159")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][11]["Id"], "17040125")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][11]["Score"], "48301243")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][12]["Id"], "10770808")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][12]["Score"], "47696325")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][13]["Id"], "11125122")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][13]["Score"], "45889695")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][14]["Id"], "15287587")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][14]["Score"], "45599733")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][15]["Id"], "15839745")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][15]["Score"], "44650620")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][16]["Id"], "10612825")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][16]["Score"], "44445812")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][17]["Id"], "15024419")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][17]["Score"], "44075047")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][18]["Id"], "12743802")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][18]["Score"], "43873158")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][19]["Id"], "15238684")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][19]["Score"], "43856864")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][20]["Id"], "12386340")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][20]["Score"], "43770229")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][21]["Id"], "16269725")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][21]["Score"], "43712594")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][22]["Id"], "10592273")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][22]["Score"], "43640108")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][23]["Id"], "15383308")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][23]["Score"], "42835474")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][24]["Id"], "15676075")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][24]["Score"], "42272663")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][25]["Id"], "11774221")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][25]["Score"], "42058380")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][26]["Id"], "10592272")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][26]["Score"], "41719917")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][27]["Id"], "15997407")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][27]["Score"], "41535461")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][28]["Id"], "15774024")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][28]["Score"], "41351079")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][29]["Id"], "11233160")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][29]["Score"], "41268965")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][30]["Id"], "14702162")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][30]["Score"], "41147661")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][31]["Id"], "16616613")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][31]["Score"], "41073100")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][32]["Id"], "17202370")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][32]["Score"], "40819600")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][33]["Id"], "15478601")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][33]["Score"], "40578911")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][34]["Id"], "15322925")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][34]["Score"], "40548101")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][35]["Id"], "11472559")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][35]["Score"], "40508356")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][36]["Id"], "11925998")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][36]["Score"], "39844751")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][37]["Id"], "12372145")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][37]["Score"], "39809277")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][38]["Id"], "17562224")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][38]["Score"], "38850094")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][39]["Id"], "15037105")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][39]["Score"], "38758229")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][40]["Id"], "14998511")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][40]["Score"], "38608049")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][41]["Id"], "10092480")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][41]["Score"], "38410463")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][42]["Id"], "7729881")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][42]["Score"], "38329800")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][43]["Id"], "12933853")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][43]["Score"], "37881850")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][44]["Id"], "16818783")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][44]["Score"], "37835096")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][45]["Id"], "16406333")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][45]["Score"], "37775136")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][46]["Id"], "11472553")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][46]["Score"], "37750745")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][47]["Id"], "11403387")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][47]["Score"], "37707525")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][48]["Id"], "17306254")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][48]["Score"], "37685833")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][49]["Id"], "11516587")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][49]["Score"], "37620966")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][50]["Id"], "9274032")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][50]["Score"], "37528832")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][51]["Id"], "12856318")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][51]["Score"], "37484650")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][52]["Id"], "14695526")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][52]["Score"], "37429895")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][53]["Id"], "12481045")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][53]["Score"], "37051674")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][54]["Id"], "11752345")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][54]["Score"], "36875760")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][55]["Id"], "12467974")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][55]["Score"], "36787103")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][56]["Id"], "11214099")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][56]["Score"], "36710749")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][57]["Id"], "14638788")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][57]["Score"], "36667774")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][58]["Id"], "16278157")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][58]["Score"], "36598908")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][59]["Id"], "11752242")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][59]["Score"], "36555638")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][60]["Id"], "14681474")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][60]["Score"], "36317853")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][61]["Id"], "15944077")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][61]["Score"], "36264027")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][62]["Id"], "12625936")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][62]["Score"], "36088314")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][63]["Id"], "16672453")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][63]["Score"], "35985060")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][64]["Id"], "14695451")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][64]["Score"], "35971708")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][65]["Id"], "12402526")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][65]["Score"], "35942170")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][66]["Id"], "10592200")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][66]["Score"], "35932875")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][67]["Id"], "17584494")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][67]["Score"], "35869907")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][68]["Id"], "17761848")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][68]["Score"], "35868206")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][69]["Id"], "16697384")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][69]["Score"], "35792791")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][70]["Id"], "8784774")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][70]["Score"], "35787497")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][71]["Id"], "18000556")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][71]["Score"], "35701408")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][72]["Id"], "15828434")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][72]["Score"], "35604052")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][73]["Id"], "10511685")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][73]["Score"], "35598319")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][74]["Id"], "15608284")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][74]["Score"], "35439627")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][75]["Id"], "11125071")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][75]["Score"], "35414962")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][76]["Id"], "11791238")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][76]["Score"], "35411948")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][77]["Id"], "15710433")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][77]["Score"], "35197152")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][78]["Id"], "16164550")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][78]["Score"], "35172458")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][79]["Id"], "17697334")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][79]["Score"], "35121478")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][80]["Id"], "12537121")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][80]["Score"], "35054632")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][81]["Id"], "12860672")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][81]["Score"], "35046651")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][82]["Id"], "15630619")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][82]["Score"], "35034076")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][83]["Id"], "15125639")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][83]["Score"], "35007338")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][84]["Id"], "11443570")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][84]["Score"], "34935553")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][85]["Id"], "12208043")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][85]["Score"], "34923107")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][86]["Id"], "11731507")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][86]["Score"], "34875290")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][87]["Id"], "11988510")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][87]["Score"], "34773036")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][88]["Id"], "11125038")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][88]["Score"], "34754724")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][89]["Id"], "16381944")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][89]["Score"], "34747225")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][90]["Id"], "17135206")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][90]["Score"], "34735015")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][91]["Id"], "17099226")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][91]["Score"], "34698054")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][92]["Id"], "15608233")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][92]["Score"], "34588400")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][93]["Id"], "16672057")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][93]["Score"], "34583177")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][94]["Id"], "15687015")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][94]["Score"], "34357840")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][95]["Id"], "10782070")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][95]["Score"], "34326746")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][96]["Id"], "14970722")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][96]["Score"], "34217911")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][97]["Id"], "18027007")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][97]["Score"], "34185436")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][98]["Id"], "12387845")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][98]["Score"], "34083368")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][99]["Id"], "16237012")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][99]["Score"], "34070163")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][100]["Id"], "16351742")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][100]["Score"], "33775198")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][101]["Id"], "12203989")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][101]["Score"], "33759170")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][102]["Id"], "15474306")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][102]["Score"], "33737675")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][103]["Id"], "15270538")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][103]["Score"], "33697306")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][104]["Id"], "17518759")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][104]["Score"], "33695140")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][105]["Id"], "16085497")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][105]["Score"], "33652537")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][106]["Id"], "16423288")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][106]["Score"], "33564554")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][107]["Id"], "16251775")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][107]["Score"], "33547325")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][108]["Id"], "12632152")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][108]["Score"], "33497998")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][109]["Id"], "11269648")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][109]["Score"], "33493800")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][110]["Id"], "16103603")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][110]["Score"], "33378796")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][111]["Id"], "12816546")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][111]["Score"], "33316167")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][112]["Id"], "10221636")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][112]["Score"], "33310814")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][113]["Id"], "16381973")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][113]["Score"], "33236048")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][114]["Id"], "15977173")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][114]["Score"], "33222497")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][115]["Id"], "16351753")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][115]["Score"], "33205084")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][116]["Id"], "15317790")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][116]["Score"], "33195439")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][117]["Id"], "17135198")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][117]["Score"], "33189951")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][118]["Id"], "12701381")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][118]["Score"], "33172200")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][119]["Id"], "12203988")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][119]["Score"], "33172077")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][120]["Id"], "11456466")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][120]["Score"], "33124900")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][121]["Id"], "16936055")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][121]["Score"], "33081742")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][122]["Id"], "17183477")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][122]["Score"], "33005068")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][123]["Id"], "9455480")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][123]["Score"], "32997067")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][124]["Id"], "12490454")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][124]["Score"], "32995041")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][125]["Id"], "12435493")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][125]["Score"], "32990122")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][126]["Id"], "11038309")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][126]["Score"], "32977663")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][127]["Id"], "10366827")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][127]["Score"], "32903347")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][128]["Id"], "10466136")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][128]["Score"], "32869387")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][129]["Id"], "16381840")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][129]["Score"], "32816923")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][130]["Id"], "11825250")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][130]["Score"], "32776183")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][131]["Id"], "12234534")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][131]["Score"], "32708547")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][132]["Id"], "14624247")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][132]["Score"], "32708542")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][133]["Id"], "12886019")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][133]["Score"], "32653276")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][134]["Id"], "12041732")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][134]["Score"], "32607185")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][135]["Id"], "15336912")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][135]["Score"], "32596453")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][136]["Id"], "12652910")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][136]["Score"], "32567397")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][137]["Id"], "14681353")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][137]["Score"], "32549157")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][138]["Id"], "12586873")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][138]["Score"], "32504063")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][139]["Id"], "11481430")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][139]["Score"], "32462602")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][140]["Id"], "15254259")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][140]["Score"], "32441737")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][141]["Id"], "16873516")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][141]["Score"], "32433603")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][142]["Id"], "17170002")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][142]["Score"], "32425626")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][143]["Id"], "12519941")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][143]["Score"], "32367760")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][144]["Id"], "11197770")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][144]["Score"], "32362623")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][145]["Id"], "11240843")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][145]["Score"], "32347064")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][146]["Id"], "11328780")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][146]["Score"], "32333807")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][147]["Id"], "11875041")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][147]["Score"], "32312036")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][148]["Id"], "11752243")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][148]["Score"], "32268199")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][149]["Id"], "16907992")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][149]["Score"], "32247019")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][150]["Id"], "15046636")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][150]["Score"], "32214942")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][151]["Id"], "10592169")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][151]["Score"], "32137798")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][152]["Id"], "17919582")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][152]["Score"], "32137767")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][153]["Id"], "18025705")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][153]["Score"], "32131322")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][154]["Id"], "11029673")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][154]["Score"], "32126363")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][155]["Id"], "9047337")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][155]["Score"], "32090163")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][156]["Id"], "11080372")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][156]["Score"], "31924475")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][157]["Id"], "18045790")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][157]["Score"], "31834367")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][158]["Id"], "10215019")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][158]["Score"], "31823989")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][159]["Id"], "14706096")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][159]["Score"], "31781977")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][160]["Id"], "17537593")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][160]["Score"], "31771566")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][161]["Id"], "12819149")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][161]["Score"], "31683943")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][162]["Id"], "17880721")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][162]["Score"], "31630816")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][163]["Id"], "14681478")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][163]["Score"], "31620457")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][164]["Id"], "11985867")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][164]["Score"], "31544318")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][165]["Id"], "15608248")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][165]["Score"], "31542256")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][166]["Id"], "17401150")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][166]["Score"], "31497289")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][167]["Id"], "10359795")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][167]["Score"], "31460779")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][168]["Id"], "15608286")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][168]["Score"], "31435112")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][169]["Id"], "15774022")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][169]["Score"], "31425851")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][170]["Id"], "9921679")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][170]["Score"], "31396086")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][171]["Id"], "17038195")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][171]["Score"], "31380822")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][172]["Id"], "15491544")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][172]["Score"], "31294370")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][173]["Id"], "10469257")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][173]["Score"], "31291548")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][174]["Id"], "15487498")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][174]["Score"], "31268351")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][175]["Id"], "15383303")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][175]["Score"], "31264596")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][176]["Id"], "15643605")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][176]["Score"], "31259953")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][177]["Id"], "16418238")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][177]["Score"], "31259003")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][178]["Id"], "15500248")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][178]["Score"], "31252080")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][179]["Id"], "15479945")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][179]["Score"], "31249988")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][180]["Id"], "16962738")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][180]["Score"], "31249405")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][181]["Id"], "15094394")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][181]["Score"], "31200337")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][182]["Id"], "11758285")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][182]["Score"], "31180435")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][183]["Id"], "15723693")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][183]["Score"], "31083464")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][184]["Id"], "16710453")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][184]["Score"], "31083136")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][185]["Id"], "15311460")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][185]["Score"], "31068402")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][186]["Id"], "16549670")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][186]["Score"], "30995148")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][187]["Id"], "18180957")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][187]["Score"], "30973190")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][188]["Id"], "14681351")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][188]["Score"], "30968930")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][189]["Id"], "10902212")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][189]["Score"], "30960861")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][190]["Id"], "15357877")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][190]["Score"], "30947680")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][191]["Id"], "12356773")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][191]["Score"], "30910321")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][192]["Id"], "17537669")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][192]["Score"], "30893205")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][193]["Id"], "16551372")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][193]["Score"], "30889080")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][194]["Id"], "15231810")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][194]["Score"], "30863616")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][195]["Id"], "12819150")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][195]["Score"], "30847027")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][196]["Id"], "15608257")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][196]["Score"], "30840234")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][197]["Id"], "17384426")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][197]["Score"], "30827754")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][198]["Id"], "15811532")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][198]["Score"], "30823185")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][199]["Id"], "10612821")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][199]["Score"], "30822187")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][200]["Id"], "17062145")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][200]["Score"], "30813605")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][201]["Id"], "11355885")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][201]["Score"], "30810648")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][202]["Id"], "15746365")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][202]["Score"], "30784209")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][203]["Id"], "16282300")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][203]["Score"], "30782807")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][204]["Id"], "15546336")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][204]["Score"], "30773578")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][205]["Id"], "11741630")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][205]["Score"], "30764995")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][206]["Id"], "15980532")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][206]["Score"], "30735790")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][207]["Id"], "12519977")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][207]["Score"], "30707395")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][208]["Id"], "12436197")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][208]["Score"], "30705501")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][209]["Id"], "11125059")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][209]["Score"], "30614888")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][210]["Id"], "11163442")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][210]["Score"], "30550965")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][211]["Id"], "12519964")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][211]["Score"], "30518025")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][212]["Id"], "12083398")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][212]["Score"], "30466595")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][213]["Id"], "11908756")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][213]["Score"], "30462080")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][214]["Id"], "15608226")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][214]["Score"], "30335152")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][215]["Id"], "16845091")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][215]["Score"], "30277120")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][216]["Id"], "17338820")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][216]["Score"], "30208452")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][217]["Id"], "10407783")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][217]["Score"], "30171504")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][218]["Id"], "17130148")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][218]["Score"], "30160136")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][219]["Id"], "14681471")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][219]["Score"], "30155757")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][220]["Id"], "17445272")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][220]["Score"], "30015229")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][221]["Id"], "11279516")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][221]["Score"], "29947199")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][222]["Id"], "17221864")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][222]["Score"], "29893674")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][223]["Id"], "15827081")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][223]["Score"], "29891924")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][224]["Id"], "11222582")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][224]["Score"], "29878915")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][225]["Id"], "11384164")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][225]["Score"], "29871698")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][226]["Id"], "17877839")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][226]["Score"], "29843765")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][227]["Id"], "17151077")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][227]["Score"], "29841695")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][228]["Id"], "16381974")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][228]["Score"], "29740312")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][229]["Id"], "10592263")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][229]["Score"], "29633946")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][230]["Id"], "15608212")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][230]["Score"], "29621479")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][231]["Id"], "9847217")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][231]["Score"], "29618439")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][232]["Id"], "17142236")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][232]["Score"], "29577611")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][233]["Id"], "17059604")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][233]["Score"], "29569767")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][234]["Id"], "16845079")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][234]["Score"], "29506663")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][235]["Id"], "14727153")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][235]["Score"], "29368276")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][236]["Id"], "18045498")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][236]["Score"], "29364312")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][237]["Id"], "17185755")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][237]["Score"], "29331905")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][238]["Id"], "18025704")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][238]["Score"], "29323161")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][239]["Id"], "15215374")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][239]["Score"], "29306559")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][240]["Id"], "17135185")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][240]["Score"], "29236297")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][241]["Id"], "10466135")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][241]["Score"], "29231855")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][242]["Id"], "17148475")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][242]["Score"], "29229044")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][243]["Id"], "15657101")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][243]["Score"], "29209567")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][244]["Id"], "14681490")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][244]["Score"], "29189708")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][245]["Id"], "15714328")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][245]["Score"], "29183488")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][246]["Id"], "14960477")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][246]["Score"], "29040531")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][247]["Id"], "11015564")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][247]["Score"], "29011368")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][248]["Id"], "18064491")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][248]["Score"], "28956740")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][249]["Id"], "12734009")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][249]["Score"], "28950064")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][250]["Id"], "17094804")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][250]["Score"], "28906953")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][251]["Id"], "17908294")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][251]["Score"], "28897717")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][252]["Id"], "16176584")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][252]["Score"], "28874470")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][253]["Id"], "14715089")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][253]["Score"], "28763886")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][254]["Id"], "14681408")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][254]["Score"], "28697827")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][255]["Id"], "14594716")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][255]["Score"], "28686075")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][256]["Id"], "16528802")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][256]["Score"], "28644452")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][257]["Id"], "16010002")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][257]["Score"], "28637570")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][258]["Id"], "17430565")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][258]["Score"], "28635513")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][259]["Id"], "16452787")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][259]["Score"], "28631832")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][260]["Id"], "11197127")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][260]["Score"], "28619225")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][261]["Id"], "8682188")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][261]["Score"], "28592521")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][262]["Id"], "12519940")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][262]["Score"], "28573991")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][263]["Id"], "17121775")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][263]["Score"], "28448726")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][264]["Id"], "16371163")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][264]["Score"], "28373394")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][265]["Id"], "15300845")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][265]["Score"], "28338477")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][266]["Id"], "15248903")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][266]["Score"], "28323328")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][267]["Id"], "11319266")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][267]["Score"], "28293166")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][268]["Id"], "16336665")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][268]["Score"], "28231249")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][269]["Id"], "14681350")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][269]["Score"], "28227327")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][270]["Id"], "16216831")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][270]["Score"], "28224610")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][271]["Id"], "15494741")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][271]["Score"], "28190925")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][272]["Id"], "17088289")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][272]["Score"], "28168901")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][273]["Id"], "17099235")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][273]["Score"], "28159766")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][274]["Id"], "15215372")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][274]["Score"], "28129693")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][275]["Id"], "9169870")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][275]["Score"], "28117392")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][276]["Id"], "10077537")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][276]["Score"], "27911205")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][277]["Id"], "18172929")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][277]["Score"], "27885172")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][278]["Id"], "9571806")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][278]["Score"], "27841468")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][279]["Id"], "11752280")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][279]["Score"], "27795833")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][280]["Id"], "11414208")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][280]["Score"], "27725996")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][281]["Id"], "9298642")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][281]["Score"], "27716027")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][282]["Id"], "18073380")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][282]["Score"], "27437383")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][283]["Id"], "14527308")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][283]["Score"], "27332641")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][284]["Id"], "9847220")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][284]["Score"], "27083894")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][285]["Id"], "10413661")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][285]["Score"], "27073030")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][286]["Id"], "10407677")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][286]["Score"], "26907635")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][287]["Id"], "11244060")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][287]["Score"], "26897688")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][288]["Id"], "10227170")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][288]["Score"], "26766431")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][289]["Id"], "8719164")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][289]["Score"], "26515360")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][290]["Id"], "18359019")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][290]["Score"], "26225983")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][291]["Id"], "10511680")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][291]["Score"], "26031196")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][292]["Id"], "9884329")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][292]["Score"], "25992564")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][293]["Id"], "17827295")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][293]["Score"], "25989152")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][294]["Id"], "10899154")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][294]["Score"], "25843128")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][295]["Id"], "11668619")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][295]["Score"], "25822950")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][296]["Id"], "18386064")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][296]["Score"], "25702942")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][297]["Id"], "11092731")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][297]["Score"], "25618899")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][298]["Id"], "9520376")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][298]["Score"], "25549761")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][299]["Id"], "11756688")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][299]["Score"], "25440634")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][300]["Id"], "10737802")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][300]["Score"], "25362744")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][301]["Id"], "9879937")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][301]["Score"], "25277089")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][302]["Id"], "17822801")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][302]["Score"], "25252984")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][303]["Id"], "10965872")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][303]["Score"], "25208185")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][304]["Id"], "10511682")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][304]["Score"], "25183443")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][305]["Id"], "10851186")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][305]["Score"], "25092764")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][306]["Id"], "9775388")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][306]["Score"], "25026910")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][307]["Id"], "10810023")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][307]["Score"], "24904718")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][308]["Id"], "18032438")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][308]["Score"], "24509777")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][309]["Id"], "18377816")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][309]["Score"], "24373788")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][310]["Id"], "11774190")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][310]["Score"], "24185658")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][311]["Id"], "10484179")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][311]["Score"], "24122767")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][312]["Id"], "9625791")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][312]["Score"], "24049917")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][313]["Id"], "11446511")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][313]["Score"], "24048253")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][314]["Id"], "10066467")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][314]["Score"], "23968405")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][315]["Id"], "11783003")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][315]["Score"], "23393870")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][316]["Id"], "10611059")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][316]["Score"], "23255298")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][317]["Id"], "10587943")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][317]["Score"], "23014503")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][318]["Id"], "10612820")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][318]["Score"], "22990878")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][319]["Id"], "9685316")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][319]["Score"], "22771348")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][320]["Id"], "11125121")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][320]["Score"], "22732820")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][321]["Id"], "10075567")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][321]["Score"], "22670427")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][322]["Id"], "11084929")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][322]["Score"], "22397665")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][323]["Id"], "11357826")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][323]["Score"], "22362882")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][324]["Id"], "17983575")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][324]["Score"], "22305320")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][325]["Id"], "11038308")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][325]["Score"], "22115670")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][326]["Id"], "18257289")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][326]["Score"], "22053176")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][327]["Id"], "10419978")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][327]["Score"], "22016184")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][328]["Id"], "9421619")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][328]["Score"], "21957407")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][329]["Id"], "10592198")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][329]["Score"], "21803908")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][330]["Id"], "11483982")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][330]["Score"], "20783817")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][331]["Id"], "11329386")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][331]["Score"], "20223493")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][332]["Id"], "10587942")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][332]["Score"], "20208799")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][333]["Id"], "10810024")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][333]["Score"], "19989188")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][334]["Id"], "11480780")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][334]["Score"], "19974101")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][335]["Id"], "11802378")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][335]["Score"], "19738532")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][336]["Id"], "10610803")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][336]["Score"], "19359100")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][337]["Id"], "10407668")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][337]["Score"], "19070525")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][338]["Id"], "18287701")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][338]["Score"], "19065945")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][339]["Id"], "10963611")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][339]["Score"], "18962273")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][340]["Id"], "10447503")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][340]["Score"], "17406980")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][341]["Id"], "9830540")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][341]["Score"], "17143709")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][342]["Id"], "11462837")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][342]["Score"], "16819799")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][343]["Id"], "10637631")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][343]["Score"], "16390796")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][344]["Id"], "11387032")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][344]["Score"], "15698695")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][345]["Id"], "18365535")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][345]["Score"], "15494816")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][346]["Id"], "15181901")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][346]["Score"], "14385628")
def test_medline(self):
'''Test parsing medline indexed articles returned by ELink
'''
# Retrieve MEDLINE indexed only related articles for PMID 12242737
# To create the XML file, use
# >>> Bio.Entrez.elink(dbfrom="pubmed", id="12242737", db="pubmed",
# term="medline[sb]")
input = open('Entrez/elink4.xml')
record = Entrez.read(input)
self.assertEqual(len(record), 1)
self.assertEqual(record[0]["DbFrom"], "pubmed")
self.assertEqual(record[0]["IdList"], ["12242737"])
self.assertEqual(record[0]["LinkSetDb"][0]["DbTo"], "pubmed")
self.assertEqual(record[0]["LinkSetDb"][0]["LinkName"], "pubmed_pubmed")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][0]["Id"], "12242737")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][0]["Score"], "2147483647")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][1]["Id"], "11218011")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][1]["Score"], "50825961")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][2]["Id"], "11329656")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][2]["Score"], "49822043")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][3]["Id"], "9757294")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][3]["Score"], "42645380")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][4]["Id"], "9456947")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][4]["Score"], "39871666")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][5]["Id"], "17193860")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][5]["Score"], "39717388")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][6]["Id"], "11274884")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][6]["Score"], "39233276")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][7]["Id"], "12878072")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][7]["Score"], "37748327")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][8]["Id"], "11125632")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][8]["Score"], "36227857")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][9]["Id"], "12822521")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][9]["Score"], "36170366")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][10]["Id"], "16999328")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][10]["Score"], "36107139")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][11]["Id"], "17875142")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][11]["Score"], "35736802")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][12]["Id"], "9510579")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][12]["Score"], "35206779")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][13]["Id"], "17354190")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][13]["Score"], "34792954")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][14]["Id"], "11702119")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][14]["Score"], "34618984")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][15]["Id"], "10024396")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][15]["Score"], "33877753")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][16]["Id"], "14650118")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][16]["Score"], "33746160")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][17]["Id"], "17243036")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][17]["Score"], "33198930")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][18]["Id"], "16580806")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][18]["Score"], "33117197")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][19]["Id"], "15278705")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][19]["Score"], "33002826")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][20]["Id"], "15236131")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][20]["Score"], "32808406")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][21]["Id"], "11368937")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][21]["Score"], "32277701")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][22]["Id"], "10688065")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][22]["Score"], "32052850")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][23]["Id"], "15635471")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][23]["Score"], "31938251")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][24]["Id"], "16357381")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][24]["Score"], "31780147")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][25]["Id"], "8153333")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][25]["Score"], "31542202")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][26]["Id"], "16284132")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][26]["Score"], "31290577")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][27]["Id"], "11329162")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][27]["Score"], "31163088")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][28]["Id"], "11973040")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][28]["Score"], "31156707")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][29]["Id"], "15143223")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][29]["Score"], "31025329")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][30]["Id"], "17040637")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][30]["Score"], "30990506")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][31]["Id"], "11016058")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][31]["Score"], "30966482")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][32]["Id"], "9317094")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][32]["Score"], "30935529")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][33]["Id"], "16133609")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][33]["Score"], "30580027")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][34]["Id"], "17325998")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][34]["Score"], "30130533")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][35]["Id"], "15505294")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][35]["Score"], "29430378")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][36]["Id"], "17268692")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][36]["Score"], "29166153")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][37]["Id"], "11329655")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][37]["Score"], "29112282")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][38]["Id"], "11775722")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][38]["Score"], "28940754")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][39]["Id"], "11907356")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][39]["Score"], "28860163")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][40]["Id"], "10222515")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][40]["Score"], "28807143")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][41]["Id"], "17174054")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][41]["Score"], "28790302")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][42]["Id"], "9314960")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][42]["Score"], "28750160")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][43]["Id"], "14661661")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][43]["Score"], "28361423")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][44]["Id"], "17879696")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][44]["Score"], "28120568")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][45]["Id"], "4818442")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][45]["Score"], "28058957")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][46]["Id"], "15141648")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][46]["Score"], "28011681")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][47]["Id"], "8855688")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][47]["Score"], "27711822")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][48]["Id"], "17875143")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][48]["Score"], "27711025")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][49]["Id"], "1481295")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][49]["Score"], "27707751")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][50]["Id"], "8599783")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][50]["Score"], "27683273")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][51]["Id"], "10499696")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][51]["Score"], "27623848")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][52]["Id"], "12733684")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][52]["Score"], "27527242")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][53]["Id"], "18021675")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][53]["Score"], "27495074")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][54]["Id"], "12226761")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][54]["Score"], "27366064")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][55]["Id"], "4808999")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][55]["Score"], "27304472")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][56]["Id"], "16988291")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][56]["Score"], "27295295")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][57]["Id"], "10575758")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][57]["Score"], "27243181")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][58]["Id"], "8903064")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][58]["Score"], "27206664")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][59]["Id"], "10811354")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][59]["Score"], "27088219")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][60]["Id"], "16096604")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][60]["Score"], "26862979")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][61]["Id"], "15788584")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][61]["Score"], "26759584")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][62]["Id"], "17376366")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][62]["Score"], "26743241")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][63]["Id"], "16566645")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][63]["Score"], "26725076")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][64]["Id"], "17259035")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][64]["Score"], "26595433")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][65]["Id"], "9314959")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][65]["Score"], "26445900")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][66]["Id"], "11895298")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][66]["Score"], "26256774")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][67]["Id"], "11740602")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][67]["Score"], "26158189")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][68]["Id"], "15022983")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][68]["Score"], "25889186")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][69]["Id"], "15300544")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][69]["Score"], "25837458")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][70]["Id"], "12719915")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][70]["Score"], "25831232")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][71]["Id"], "14661306")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][71]["Score"], "25788023")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][72]["Id"], "16362812")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][72]["Score"], "25565076")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][73]["Id"], "17320773")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][73]["Score"], "25504305")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][74]["Id"], "11762248")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][74]["Score"], "25504002")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][75]["Id"], "10665303")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][75]["Score"], "25384388")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][76]["Id"], "17453494")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][76]["Score"], "25226372")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][77]["Id"], "9575723")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][77]["Score"], "25174136")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][78]["Id"], "12744498")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][78]["Score"], "24971179")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][79]["Id"], "12352163")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][79]["Score"], "24915990")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][80]["Id"], "8290724")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][80]["Score"], "24909462")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][81]["Id"], "11973504")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][81]["Score"], "24878058")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][82]["Id"], "14661668")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][82]["Score"], "24779779")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][83]["Id"], "16552382")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][83]["Score"], "24760919")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][84]["Id"], "17709829")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][84]["Score"], "24743292")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][85]["Id"], "14528718")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][85]["Score"], "24686212")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][86]["Id"], "15008163")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][86]["Score"], "24612994")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][87]["Id"], "10051883")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][87]["Score"], "24492331")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][88]["Id"], "11027076")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][88]["Score"], "24410525")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][89]["Id"], "17543650")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][89]["Score"], "24371825")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][90]["Id"], "17658095")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][90]["Score"], "24331965")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][91]["Id"], "9193407")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][91]["Score"], "24240252")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][92]["Id"], "10578418")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][92]["Score"], "24091226")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][93]["Id"], "12592155")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][93]["Score"], "24001341")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][94]["Id"], "17157468")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][94]["Score"], "23984321")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][95]["Id"], "15094630")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][95]["Score"], "23912874")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][96]["Id"], "8794574")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][96]["Score"], "23900764")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][97]["Id"], "9125660")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][97]["Score"], "23884352")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][98]["Id"], "8819381")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][98]["Score"], "23839719")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][99]["Id"], "14661666")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][99]["Score"], "23748510")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][100]["Id"], "9658901")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][100]["Score"], "23667126")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][101]["Id"], "12744499")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][101]["Score"], "23647189")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][102]["Id"], "12164574")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][102]["Score"], "23623853")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][103]["Id"], "15136027")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][103]["Score"], "23572558")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][104]["Id"], "14872380")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][104]["Score"], "23460906")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][105]["Id"], "3905087")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][105]["Score"], "23305022")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][106]["Id"], "15642291")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][106]["Score"], "23234831")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][107]["Id"], "16928974")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][107]["Score"], "23223298")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][108]["Id"], "6072516")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][108]["Score"], "23042548")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][109]["Id"], "12949462")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][109]["Score"], "23001441")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][110]["Id"], "10761553")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][110]["Score"], "22995991")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][111]["Id"], "14661663")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][111]["Score"], "22986720")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][112]["Id"], "16338316")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][112]["Score"], "22933288")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][113]["Id"], "17464254")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][113]["Score"], "22912253")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][114]["Id"], "15529836")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][114]["Score"], "22892154")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][115]["Id"], "12361530")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][115]["Score"], "22871698")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][116]["Id"], "12876813")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][116]["Score"], "22822515")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][117]["Id"], "10749221")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][117]["Score"], "22794373")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][118]["Id"], "6482054")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][118]["Score"], "22791927")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][119]["Id"], "9016217")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][119]["Score"], "22738432")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][120]["Id"], "14702442")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][120]["Score"], "22722123")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][121]["Id"], "15279747")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][121]["Score"], "22698787")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][122]["Id"], "7892443")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][122]["Score"], "22642038")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][123]["Id"], "616459")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][123]["Score"], "22591277")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][124]["Id"], "8886718")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][124]["Score"], "22542938")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][125]["Id"], "17245521")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][125]["Score"], "22538649")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][126]["Id"], "1535863")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][126]["Score"], "22468774")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][127]["Id"], "15537403")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][127]["Score"], "22458002")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][128]["Id"], "16040910")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][128]["Score"], "22452119")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][129]["Id"], "16929028")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][129]["Score"], "22433988")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][130]["Id"], "16697589")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][130]["Score"], "22366606")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][131]["Id"], "531835")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][131]["Score"], "22366454")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][132]["Id"], "2308313")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][132]["Score"], "22330898")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][133]["Id"], "12522920")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][133]["Score"], "22178764")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][134]["Id"], "10222521")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][134]["Score"], "22135023")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][135]["Id"], "10499697")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][135]["Score"], "22130302")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][136]["Id"], "8903058")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][136]["Score"], "22113132")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][137]["Id"], "17441569")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][137]["Score"], "22085858")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][138]["Id"], "15284932")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][138]["Score"], "22075791")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][139]["Id"], "15466771")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][139]["Score"], "22075418")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][140]["Id"], "17145267")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][140]["Score"], "22033864")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][141]["Id"], "11329662")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][141]["Score"], "22012948")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][142]["Id"], "10222514")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][142]["Score"], "22009777")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][143]["Id"], "17383530")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][143]["Score"], "22003600")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][144]["Id"], "12455800")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][144]["Score"], "21992674")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][145]["Id"], "15845051")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][145]["Score"], "21946257")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][146]["Id"], "11443295")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][146]["Score"], "21908841")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][147]["Id"], "15162233")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][147]["Score"], "21903624")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][148]["Id"], "16133610")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][148]["Score"], "21872203")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][149]["Id"], "12845461")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][149]["Score"], "21864314")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][150]["Id"], "16947073")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][150]["Score"], "21832153")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][151]["Id"], "7415301")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][151]["Score"], "21822396")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][152]["Id"], "16416239")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][152]["Score"], "21820165")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][153]["Id"], "4848922")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][153]["Score"], "21786194")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][154]["Id"], "12720164")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][154]["Score"], "21785319")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][155]["Id"], "17093987")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][155]["Score"], "21750370")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][156]["Id"], "16769006")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][156]["Score"], "21735873")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][157]["Id"], "17954835")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][157]["Score"], "21733933")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][158]["Id"], "15236134")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][158]["Score"], "21640099")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][159]["Id"], "12524603")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][159]["Score"], "21636724")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][160]["Id"], "16749985")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][160]["Score"], "21628926")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][161]["Id"], "3213296")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][161]["Score"], "21490232")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][162]["Id"], "11409026")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][162]["Score"], "21061296")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][163]["Id"], "9725288")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][163]["Score"], "21053585")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][164]["Id"], "6217136")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][164]["Score"], "21042914")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][165]["Id"], "663071")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][165]["Score"], "20926141")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][166]["Id"], "10341802")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][166]["Score"], "20797282")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][167]["Id"], "6473764")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][167]["Score"], "20757680")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][168]["Id"], "2584497")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][168]["Score"], "20521350")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][169]["Id"], "8338105")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][169]["Score"], "20501334")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][170]["Id"], "18053822")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][170]["Score"], "20275078")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][171]["Id"], "4058411")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][171]["Score"], "20161667")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][172]["Id"], "11669077")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][172]["Score"], "19993282")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][173]["Id"], "11781922")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][173]["Score"], "19969425")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][174]["Id"], "9793138")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][174]["Score"], "19952972")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][175]["Id"], "9391495")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][175]["Score"], "19815538")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][176]["Id"], "10803203")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][176]["Score"], "19495693")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][177]["Id"], "7326186")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][177]["Score"], "19273989")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][178]["Id"], "11868066")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][178]["Score"], "19220137")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][179]["Id"], "10904988")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][179]["Score"], "19203510")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][180]["Id"], "3288780")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][180]["Score"], "18958114")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][181]["Id"], "2047316")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][181]["Score"], "18907473")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][182]["Id"], "12237004")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][182]["Score"], "18751474")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][183]["Id"], "5627987")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][183]["Score"], "18741903")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][184]["Id"], "9269670")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][184]["Score"], "18666426")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][185]["Id"], "8903059")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][185]["Score"], "18653874")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][186]["Id"], "5594242")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][186]["Score"], "18548780")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][187]["Id"], "7068417")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][187]["Score"], "18390022")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][188]["Id"], "7330196")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][188]["Score"], "18371587")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][189]["Id"], "7408592")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][189]["Score"], "18275541")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][190]["Id"], "8835983")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][190]["Score"], "18176923")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][191]["Id"], "6940010")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][191]["Score"], "18011066")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][192]["Id"], "10499712")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][192]["Score"], "17943586")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][193]["Id"], "4539876")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][193]["Score"], "17915154")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][194]["Id"], "1943587")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][194]["Score"], "17752606")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][195]["Id"], "9847909")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][195]["Score"], "17568386")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][196]["Id"], "11578071")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][196]["Score"], "17561413")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][197]["Id"], "11789473")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][197]["Score"], "17435433")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][198]["Id"], "9885599")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][198]["Score"], "17383598")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][199]["Id"], "7423836")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][199]["Score"], "17196872")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][200]["Id"], "10688063")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][200]["Score"], "16453112")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][201]["Id"], "11695100")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][201]["Score"], "16352760")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][202]["Id"], "11329658")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][202]["Score"], "16089885")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][203]["Id"], "11939665")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][203]["Score"], "15947974")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][204]["Id"], "5512349")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][204]["Score"], "15647685")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][205]["Id"], "2222794")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][205]["Score"], "14981157")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][206]["Id"], "5998281")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][206]["Score"], "14226588")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][207]["Id"], "10475937")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][207]["Score"], "13934390")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][208]["Id"], "5046513")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][208]["Score"], "12769605")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][209]["Id"], "1539132")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][209]["Score"], "12395064")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][210]["Id"], "4414214")
self.assertEqual(record[0]["LinkSetDb"][0]["Link"][210]["Score"], "10113539")
def test_pubmed3(self):
'''Test parsing pubmed link returned by ELink (third test)
'''
# Create a hyperlink to the first link available for PMID 10611131
# in PubMed
# To create the XML file, use
# >>> Bio.Entrez.elink(dbfrom="pubmed", id="10611131", cmd="prlinks")
input = open('Entrez/elink5.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["DbFrom"], "pubmed")
self.assertEqual(len(record[0]["IdUrlList"]), 1)
self.assertEqual(len(record[0]["IdUrlList"][0]), 2)
self.assertEqual(record[0]["IdUrlList"][0]["Id"], "10611131")
self.assertEqual(len(record[0]["IdUrlList"][0]["ObjUrl"]), 1)
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Url"], "http://brain.oxfordjournals.org/cgi/pmidlookup?view=long&pmid=10611131")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["IconUrl"], "http://www.ncbi.nlm.nih.gov/entrez/query/egifs/http:--highwire.stanford.edu-icons-externalservices-pubmed-custom-oxfordjournals_final_free.gif")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["SubjectType"], ["publishers/providers"])
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Attribute"][0], "publisher of information in URL")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Attribute"][1], "full-text online")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Provider"]["Name"], "HighWire Press")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Provider"]["NameAbbr"], "HighWire")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Provider"]["Id"], "3051")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Provider"]["Url"], "http://highwire.stanford.edu")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Provider"]["IconUrl"], "http://highwire.stanford.edu/icons/externalservices/pubmed/highwirepress.jpg")
def test_pubmed4(self):
'''Test parsing pubmed links returned by ELink (fourth test)
'''
# List all available links in PubMed, except for libraries, for
# PMIDs 12085856 and 12085853
# To create the XML file, use
# >>> Bio.Entrez.elink(dbfrom="pubmed", id="12085856,12085853", cmd="llinks")
input = open('Entrez/elink6.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["DbFrom"], "pubmed")
self.assertEqual(len(record[0]["IdUrlList"]), 2)
self.assertEqual(record[0]["IdUrlList"][0]["Id"], "12085856")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Url"], "http://symptomresearch.nih.gov/chapter_1/index.htm")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["SubjectType"], ["online tutorials/courses"])
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Provider"]["Name"], "New England Research Institutes Inc.")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Provider"]["NameAbbr"], "NERI")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Provider"]["Id"], "3291")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][0]["Provider"]["Url"], "http://www.symptomresearch.com")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][1]["Url"], "http://www.nlm.nih.gov/medlineplus/coronaryarterybypasssurgery.html")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][1]["IconUrl"], "http://www.ncbi.nlm.nih.gov/entrez/query/egifs/http:--www.nlm.nih.gov-medlineplus-images-linkout_sm.gif")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][1]["LinkName"], "Coronary Artery Bypass Surgery")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][1]["SubjectType"], ["consumer health"])
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][1]["Provider"]["Name"], "MedlinePlus Health Information")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][1]["Provider"]["NameAbbr"], "MEDPLUS")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][1]["Provider"]["Id"], "3162")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][1]["Provider"]["Url"], "http://medlineplus.gov/")
self.assertEqual(record[0]["IdUrlList"][0]["ObjUrl"][1]["Provider"]["IconUrl"], "http://www.nlm.nih.gov/medlineplus/images/linkout_sm.gif")
self.assertEqual(record[0]["IdUrlList"][1]["Id"], "12085853")
self.assertEqual(len(record[0]["IdUrlList"][1]["ObjUrl"]), 4)
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][0]["Url"], "http://www.nlm.nih.gov/medlineplus/arrhythmia.html")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][0]["IconUrl"], "http://www.ncbi.nlm.nih.gov/entrez/query/egifs/http:--www.nlm.nih.gov-medlineplus-images-linkout_sm.gif")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][0]["LinkName"], "Arrhythmia")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][0]["SubjectType"], ["consumer health"])
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][0]["Provider"]["Name"], "MedlinePlus Health Information")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][0]["Provider"]["NameAbbr"], "MEDPLUS")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][0]["Provider"]["Id"], "3162")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][0]["Provider"]["Url"], "http://medlineplus.gov/")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][0]["Provider"]["IconUrl"], "http://www.nlm.nih.gov/medlineplus/images/linkout_sm.gif")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][1]["Url"], "http://www.nlm.nih.gov/medlineplus/exerciseandphysicalfitness.html")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][1]["IconUrl"], "http://www.ncbi.nlm.nih.gov/entrez/query/egifs/http:--www.nlm.nih.gov-medlineplus-images-linkout_sm.gif")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][1]["LinkName"], "Exercise and Physical Fitness")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][1]["SubjectType"], ["consumer health"])
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][1]["Provider"]["Name"], "MedlinePlus Health Information")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][1]["Provider"]["NameAbbr"], "MEDPLUS")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][1]["Provider"]["Id"], "3162")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][1]["Provider"]["Url"], "http://medlineplus.gov/")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][1]["Provider"]["IconUrl"], "http://www.nlm.nih.gov/medlineplus/images/linkout_sm.gif")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][2]["Url"], "http://www.nlm.nih.gov/medlineplus/exerciseforchildren.html")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][2]["IconUrl"], "http://www.ncbi.nlm.nih.gov/entrez/query/egifs/http:--www.nlm.nih.gov-medlineplus-images-linkout_sm.gif")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][2]["LinkName"], "Exercise for Children")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][2]["SubjectType"], ["consumer health"])
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][2]["Provider"]["Name"], "MedlinePlus Health Information")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][2]["Provider"]["NameAbbr"], "MEDPLUS")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][2]["Provider"]["Id"], "3162")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][2]["Provider"]["Url"], "http://medlineplus.gov/")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][2]["Provider"]["IconUrl"], "http://www.nlm.nih.gov/medlineplus/images/linkout_sm.gif")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][3]["Url"], "http://www.nlm.nih.gov/medlineplus/pacemakersandimplantabledefibrillators.html")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][3]["IconUrl"], "http://www.ncbi.nlm.nih.gov/entrez/query/egifs/http:--www.nlm.nih.gov-medlineplus-images-linkout_sm.gif")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][3]["LinkName"], "Pacemakers and Implantable Defibrillators")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][3]["SubjectType"], ["consumer health"])
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][3]["Provider"]["Name"], "MedlinePlus Health Information")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][3]["Provider"]["NameAbbr"], "MEDPLUS")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][3]["Provider"]["Id"], "3162")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][3]["Provider"]["Url"], "http://medlineplus.gov/")
self.assertEqual(record[0]["IdUrlList"][1]["ObjUrl"][3]["Provider"]["IconUrl"], "http://www.nlm.nih.gov/medlineplus/images/linkout_sm.gif")
def test_pubmed5(self):
'''Test parsing pubmed links returned by ELink (fifth test)
'''
# List Entrez database links for PubMed PMIDs 12169658 and 11748140
# To create the XML file, use
# >>> Bio.Entrez.elink(dbfrom="pubmed", id="12169658,11748140",
# cmd="acheck")
input = open('Entrez/elink7.xml')
record = Entrez.read(input)
self.assertEqual(len(record), 1)
self.assertEqual(record[0]["DbFrom"], "pubmed")
self.assertEqual(len(record[0]["IdCheckList"]), 2)
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["Id"], "12169658")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][0]["DbTo"], "books")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][0]["LinkName"], "pubmed_books_refs")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][0]["MenuTag"], "Cited in Books")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][0]["HtmlTag"], "Cited in Books")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][0]["Priority"], "185")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][1]["DbTo"], "gene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][1]["LinkName"], "pubmed_gene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][1]["MenuTag"], "Gene Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][1]["HtmlTag"], "Gene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][1]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][2]["DbTo"], "geo")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][2]["LinkName"], "pubmed_geo")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][2]["MenuTag"], "GEO Profile Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][2]["HtmlTag"], "GEO Profiles")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][2]["Priority"], "170")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][3]["DbTo"], "homologene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][3]["LinkName"], "pubmed_homologene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][3]["MenuTag"], "HomoloGene Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][3]["HtmlTag"], "HomoloGene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][3]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][4]["DbTo"], "nuccore")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][4]["LinkName"], "pubmed_nuccore")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][4]["MenuTag"], "CoreNucleotide Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][4]["HtmlTag"], "CoreNucleotide")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][4]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][5]["DbTo"], "nuccore")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][5]["LinkName"], "pubmed_nuccore_refseq")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][5]["MenuTag"], "CoreNucleotide (RefSeq) Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][5]["HtmlTag"], "CoreNucleotide (RefSeq)")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][5]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][6]["DbTo"], "nucleotide")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][6]["LinkName"], "pubmed_nucleotide")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][6]["MenuTag"], "Nucleotide Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][6]["HtmlTag"], "Nucleotide")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][6]["Priority"], "135")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][7]["DbTo"], "nucleotide")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][7]["LinkName"], "pubmed_nucleotide_refseq")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][7]["MenuTag"], "Nucleotide (RefSeq) Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][7]["HtmlTag"], "Nucleotide (RefSeq)")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][7]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][8]["DbTo"], "pcsubstance")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][8]["LinkName"], "pubmed_pcsubstance_mesh")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][8]["MenuTag"], "Substance (MeSH Keyword)")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][8]["HtmlTag"], "Substance (MeSH Keyword)")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][8]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][9]["DbTo"], "pmc")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][9]["LinkName"], "pubmed_pmc_refs")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][9]["MenuTag"], "Cited in PMC")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][9]["HtmlTag"], "Cited in PMC")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][9]["Url"], "http://www.pubmedcentral.gov/tocrender.fcgi?action=cited&tool=pubmed&pubmedid=<@UID@>")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][9]["Priority"], "180")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][10]["DbTo"], "protein")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][10]["LinkName"], "pubmed_protein")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][10]["MenuTag"], "Protein Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][10]["HtmlTag"], "Protein")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][10]["Priority"], "140")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][11]["DbTo"], "protein")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][11]["LinkName"], "pubmed_protein_refseq")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][11]["MenuTag"], "Protein (RefSeq) Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][11]["HtmlTag"], "Protein (RefSeq)")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][11]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][12]["DbTo"], "pubmed")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][12]["LinkName"], "pubmed_pubmed")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][12]["MenuTag"], "Related Articles")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][12]["HtmlTag"], "Related Articles")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][12]["Priority"], "1")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][13]["DbTo"], "taxonomy")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][13]["LinkName"], "pubmed_taxonomy_entrez")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][13]["MenuTag"], "Taxonomy via GenBank")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][13]["HtmlTag"], "Taxonomy via GenBank")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][13]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][14]["DbTo"], "unigene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][14]["LinkName"], "pubmed_unigene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][14]["MenuTag"], "UniGene Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][14]["HtmlTag"], "UniGene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][14]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][15]["DbTo"], "LinkOut")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][15]["LinkName"], "ExternalLink")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][15]["MenuTag"], "LinkOut")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][15]["HtmlTag"], "LinkOut")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][0]["LinkInfo"][15]["Priority"], "255")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["Id"], "11748140")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][0]["DbTo"], "books")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][0]["LinkName"], "pubmed_books_refs")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][0]["MenuTag"], "Cited in Books")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][0]["HtmlTag"], "Cited in Books")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][0]["Priority"], "185")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][1]["DbTo"], "gene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][1]["LinkName"], "pubmed_gene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][1]["MenuTag"], "Gene Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][1]["HtmlTag"], "Gene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][1]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][2]["DbTo"], "geo")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][2]["LinkName"], "pubmed_geo")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][2]["MenuTag"], "GEO Profile Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][2]["HtmlTag"], "GEO Profiles")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][2]["Priority"], "170")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][3]["DbTo"], "nuccore")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][3]["LinkName"], "pubmed_nuccore")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][3]["MenuTag"], "CoreNucleotide Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][3]["HtmlTag"], "CoreNucleotide")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][3]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][4]["DbTo"], "nuccore")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][4]["LinkName"], "pubmed_nuccore_refseq")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][4]["MenuTag"], "CoreNucleotide (RefSeq) Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][4]["HtmlTag"], "CoreNucleotide (RefSeq)")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][4]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][5]["DbTo"], "nucleotide")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][5]["LinkName"], "pubmed_nucleotide")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][5]["MenuTag"], "Nucleotide Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][5]["HtmlTag"], "Nucleotide")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][5]["Priority"], "135")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][6]["DbTo"], "nucleotide")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][6]["LinkName"], "pubmed_nucleotide_refseq")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][6]["MenuTag"], "Nucleotide (RefSeq) Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][6]["HtmlTag"], "Nucleotide (RefSeq)")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][6]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][7]["DbTo"], "pmc")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][7]["LinkName"], "pubmed_pmc_refs")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][7]["MenuTag"], "Cited in PMC")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][7]["HtmlTag"], "Cited in PMC")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][7]["Url"], "http://www.pubmedcentral.gov/tocrender.fcgi?action=cited&tool=pubmed&pubmedid=<@UID@>")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][7]["Priority"], "180")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][8]["DbTo"], "protein")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][8]["LinkName"], "pubmed_protein")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][8]["MenuTag"], "Protein Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][8]["HtmlTag"], "Protein")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][8]["Priority"], "140")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][9]["DbTo"], "protein")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][9]["LinkName"], "pubmed_protein_refseq")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][9]["MenuTag"], "Protein (RefSeq) Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][9]["HtmlTag"], "Protein (RefSeq)")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][9]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][10]["DbTo"], "pubmed")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][10]["LinkName"], "pubmed_pubmed")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][10]["MenuTag"], "Related Articles")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][10]["HtmlTag"], "Related Articles")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][10]["Priority"], "1")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][11]["DbTo"], "taxonomy")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][11]["LinkName"], "pubmed_taxonomy_entrez")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][11]["MenuTag"], "Taxonomy via GenBank")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][11]["HtmlTag"], "Taxonomy via GenBank")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][11]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][12]["DbTo"], "unigene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][12]["LinkName"], "pubmed_unigene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][12]["MenuTag"], "UniGene Links")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][12]["HtmlTag"], "UniGene")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][12]["Priority"], "128")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][13]["DbTo"], "LinkOut")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][13]["LinkName"], "ExternalLink")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][13]["MenuTag"], "LinkOut")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][13]["HtmlTag"], "LinkOut")
self.assertEqual(record[0]["IdCheckList"]["IdLinkSet"][1]["LinkInfo"][13]["Priority"], "255")
def test_pubmed6(self):
'''Test parsing pubmed links returned by ELink (sixth test)
'''
# Check for the existence of a Related Articles link for PMIDs
# 0611131, 111645 and 12068369
# To create the XML file, use
# >>> Bio.Entrez.elink(dbfrom="pubmed", id="10611131,111645",
# id="12068369", cmd="ncheck")
input = open('Entrez/elink8.xml')
record = Entrez.read(input)
self.assertEqual(len(record), 1)
self.assertEqual(record[0]["DbFrom"], "pubmed")
self.assertEqual(len(record[0]["IdCheckList"]), 2)
self.assertEqual(len(record[0]["IdCheckList"]["Id"]), 1)
self.assertEqual(record[0]["IdCheckList"]["Id"][0], "12068369")
self.assertEqual(len(record[0]["IdCheckList"]["Id"][0].attributes), 1)
self.assertEqual(record[0]["IdCheckList"]["Id"][0].attributes["HasNeighbor"], "Y")
def test_cancerchromosomes(self):
'''Test parsing cancerchromosomes links returned by ELink
'''
# Retrieve neighbors for Cancer Chromosomes ID 2662 to the link
# subset related by cytogenetics
# To create the XML file, use
# >>> Bio.Entrez.elink(dbfrom="cancerchromosomes",
# db="cancerchromosomes", id="2662",
# cmd="neighbor",
# linkname="cancerchromosomes_cancerchromosomes_cyto")
input = open('Entrez/elink9.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["DbFrom"], "cancerchromosomes")
self.assertEqual(record[0]["IdList"], ["2662"])
class EGQueryTest(unittest.TestCase):
'''Tests for parsing XML output returned by EGQuery
'''
def test_egquery1(self):
'''Test parsing XML output returned by EGQuery (first test)
'''
# Display counts in XML for stem cells in each Entrez database
# To create the XML file, use
# >>> Bio.Entrez.egquery(term="stem cells")
input = open('Entrez/egquery1.xml')
record = Entrez.read(input)
self.assertEqual(record["Term"], "stem cells")
self.assertEqual(record["eGQueryResult"][0]["DbName"], "pubmed")
self.assertEqual(record["eGQueryResult"][0]["MenuName"], "PubMed")
self.assertEqual(record["eGQueryResult"][0]["Count"], "392")
self.assertEqual(record["eGQueryResult"][0]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][1]["DbName"], "pmc")
self.assertEqual(record["eGQueryResult"][1]["MenuName"], "PMC")
self.assertEqual(record["eGQueryResult"][1]["Count"], "173")
self.assertEqual(record["eGQueryResult"][1]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][2]["DbName"], "journals")
self.assertEqual(record["eGQueryResult"][2]["MenuName"], "Journals")
self.assertEqual(record["eGQueryResult"][2]["Count"], "0")
self.assertEqual(record["eGQueryResult"][2]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][3]["DbName"], "mesh")
self.assertEqual(record["eGQueryResult"][3]["MenuName"], "MeSH")
self.assertEqual(record["eGQueryResult"][3]["Count"], "0")
self.assertEqual(record["eGQueryResult"][3]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][4]["DbName"], "books")
self.assertEqual(record["eGQueryResult"][4]["MenuName"], "Books")
self.assertEqual(record["eGQueryResult"][4]["Count"], "10")
self.assertEqual(record["eGQueryResult"][4]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][5]["DbName"], "omim")
self.assertEqual(record["eGQueryResult"][5]["MenuName"], "OMIM")
self.assertEqual(record["eGQueryResult"][5]["Count"], "0")
self.assertEqual(record["eGQueryResult"][5]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][6]["DbName"], "omia")
self.assertEqual(record["eGQueryResult"][6]["MenuName"], "OMIA")
self.assertEqual(record["eGQueryResult"][6]["Count"], "0")
self.assertEqual(record["eGQueryResult"][6]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][7]["DbName"], "ncbisearch")
self.assertEqual(record["eGQueryResult"][7]["MenuName"], "NCBI Web Site")
self.assertEqual(record["eGQueryResult"][7]["Count"], "0")
self.assertEqual(record["eGQueryResult"][7]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][8]["DbName"], "nuccore")
self.assertEqual(record["eGQueryResult"][8]["MenuName"], "CoreNucleotide")
self.assertEqual(record["eGQueryResult"][8]["Count"], "0")
self.assertEqual(record["eGQueryResult"][8]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][9]["DbName"], "nucgss")
self.assertEqual(record["eGQueryResult"][9]["MenuName"], "GSS")
self.assertEqual(record["eGQueryResult"][9]["Count"], "0")
self.assertEqual(record["eGQueryResult"][9]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][10]["DbName"], "nucest")
self.assertEqual(record["eGQueryResult"][10]["MenuName"], "EST")
self.assertEqual(record["eGQueryResult"][10]["Count"], "0")
self.assertEqual(record["eGQueryResult"][10]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][11]["DbName"], "protein")
self.assertEqual(record["eGQueryResult"][11]["MenuName"], "Protein")
self.assertEqual(record["eGQueryResult"][11]["Count"], "0")
self.assertEqual(record["eGQueryResult"][11]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][12]["DbName"], "genome")
self.assertEqual(record["eGQueryResult"][12]["MenuName"], "Genome")
self.assertEqual(record["eGQueryResult"][12]["Count"], "0")
self.assertEqual(record["eGQueryResult"][12]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][13]["DbName"], "structure")
self.assertEqual(record["eGQueryResult"][13]["MenuName"], "Structure")
self.assertEqual(record["eGQueryResult"][13]["Count"], "0")
self.assertEqual(record["eGQueryResult"][13]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][14]["DbName"], "taxonomy")
self.assertEqual(record["eGQueryResult"][14]["MenuName"], "Taxonomy")
self.assertEqual(record["eGQueryResult"][14]["Count"], "0")
self.assertEqual(record["eGQueryResult"][14]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][15]["DbName"], "snp")
self.assertEqual(record["eGQueryResult"][15]["MenuName"], "SNP")
self.assertEqual(record["eGQueryResult"][15]["Count"], "0")
self.assertEqual(record["eGQueryResult"][15]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][16]["DbName"], "gene")
self.assertEqual(record["eGQueryResult"][16]["MenuName"], "Gene")
self.assertEqual(record["eGQueryResult"][16]["Count"], "0")
self.assertEqual(record["eGQueryResult"][16]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][17]["DbName"], "unigene")
self.assertEqual(record["eGQueryResult"][17]["MenuName"], "UniGene")
self.assertEqual(record["eGQueryResult"][17]["Count"], "0")
self.assertEqual(record["eGQueryResult"][17]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][18]["DbName"], "cdd")
self.assertEqual(record["eGQueryResult"][18]["MenuName"], "Conserved Domains")
self.assertEqual(record["eGQueryResult"][18]["Count"], "0")
self.assertEqual(record["eGQueryResult"][18]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][19]["DbName"], "domains")
self.assertEqual(record["eGQueryResult"][19]["MenuName"], "3D Domains")
self.assertEqual(record["eGQueryResult"][19]["Count"], "0")
self.assertEqual(record["eGQueryResult"][19]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][20]["DbName"], "unists")
self.assertEqual(record["eGQueryResult"][20]["MenuName"], "UniSTS")
self.assertEqual(record["eGQueryResult"][20]["Count"], "0")
self.assertEqual(record["eGQueryResult"][20]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][21]["DbName"], "popset")
self.assertEqual(record["eGQueryResult"][21]["MenuName"], "PopSet")
self.assertEqual(record["eGQueryResult"][21]["Count"], "0")
self.assertEqual(record["eGQueryResult"][21]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][22]["DbName"], "geo")
self.assertEqual(record["eGQueryResult"][22]["MenuName"], "GEO Profiles")
self.assertEqual(record["eGQueryResult"][22]["Count"], "0")
self.assertEqual(record["eGQueryResult"][22]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][23]["DbName"], "gds")
self.assertEqual(record["eGQueryResult"][23]["MenuName"], "GEO DataSets")
self.assertEqual(record["eGQueryResult"][23]["Count"], "0")
self.assertEqual(record["eGQueryResult"][23]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][24]["DbName"], "homologene")
self.assertEqual(record["eGQueryResult"][24]["MenuName"], "HomoloGene")
self.assertEqual(record["eGQueryResult"][24]["Count"], "0")
self.assertEqual(record["eGQueryResult"][24]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][25]["DbName"], "cancerchromosomes")
self.assertEqual(record["eGQueryResult"][25]["MenuName"], "CancerChromosomes")
self.assertEqual(record["eGQueryResult"][25]["Count"], "0")
self.assertEqual(record["eGQueryResult"][25]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][26]["DbName"], "pccompound")
self.assertEqual(record["eGQueryResult"][26]["MenuName"], "PubChem Compound")
self.assertEqual(record["eGQueryResult"][26]["Count"], "0")
self.assertEqual(record["eGQueryResult"][26]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][27]["DbName"], "pcsubstance")
self.assertEqual(record["eGQueryResult"][27]["MenuName"], "PubChem Substance")
self.assertEqual(record["eGQueryResult"][27]["Count"], "0")
self.assertEqual(record["eGQueryResult"][27]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][28]["DbName"], "pcassay")
self.assertEqual(record["eGQueryResult"][28]["MenuName"], "PubChem BioAssay")
self.assertEqual(record["eGQueryResult"][28]["Count"], "0")
self.assertEqual(record["eGQueryResult"][28]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][29]["DbName"], "nlmcatalog")
self.assertEqual(record["eGQueryResult"][29]["MenuName"], "NLM Catalog")
self.assertEqual(record["eGQueryResult"][29]["Count"], "2")
self.assertEqual(record["eGQueryResult"][29]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][30]["DbName"], "gensat")
self.assertEqual(record["eGQueryResult"][30]["MenuName"], "GENSAT")
self.assertEqual(record["eGQueryResult"][30]["Count"], "0")
self.assertEqual(record["eGQueryResult"][30]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][31]["DbName"], "probe")
self.assertEqual(record["eGQueryResult"][31]["MenuName"], "Probe")
self.assertEqual(record["eGQueryResult"][31]["Count"], "0")
self.assertEqual(record["eGQueryResult"][31]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][32]["DbName"], "genomeprj")
self.assertEqual(record["eGQueryResult"][32]["MenuName"], "Genome Project")
self.assertEqual(record["eGQueryResult"][32]["Count"], "0")
self.assertEqual(record["eGQueryResult"][32]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][33]["DbName"], "gap")
self.assertEqual(record["eGQueryResult"][33]["MenuName"], "dbGaP")
self.assertEqual(record["eGQueryResult"][33]["Count"], "0")
self.assertEqual(record["eGQueryResult"][33]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][34]["DbName"], "proteinclusters")
self.assertEqual(record["eGQueryResult"][34]["MenuName"], "Protein Clusters")
self.assertEqual(record["eGQueryResult"][34]["Count"], "0")
self.assertEqual(record["eGQueryResult"][34]["Status"], "Term or Database is not found")
def test_egquery2(self):
'''Test parsing XML output returned by EGQuery (second test)
'''
# Display counts in XML for brca1 or brca2 for each Entrez database
# To create the XML file, use
# >>> Bio.Entrez.egquery(term="brca1 OR brca2")
input = open('Entrez/egquery2.xml')
record = Entrez.read(input)
self.assertEqual(record["Term"], "brca1 OR brca2")
self.assertEqual(record["eGQueryResult"][0]["DbName"], "pubmed")
self.assertEqual(record["eGQueryResult"][0]["MenuName"], "PubMed")
self.assertEqual(record["eGQueryResult"][0]["Count"], "0")
self.assertEqual(record["eGQueryResult"][0]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][1]["DbName"], "pmc")
self.assertEqual(record["eGQueryResult"][1]["MenuName"], "PMC")
self.assertEqual(record["eGQueryResult"][1]["Count"], "2739")
self.assertEqual(record["eGQueryResult"][1]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][2]["DbName"], "journals")
self.assertEqual(record["eGQueryResult"][2]["MenuName"], "Journals")
self.assertEqual(record["eGQueryResult"][2]["Count"], "0")
self.assertEqual(record["eGQueryResult"][2]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][3]["DbName"], "mesh")
self.assertEqual(record["eGQueryResult"][3]["MenuName"], "MeSH")
self.assertEqual(record["eGQueryResult"][3]["Count"], "29")
self.assertEqual(record["eGQueryResult"][3]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][4]["DbName"], "books")
self.assertEqual(record["eGQueryResult"][4]["MenuName"], "Books")
self.assertEqual(record["eGQueryResult"][4]["Count"], "392")
self.assertEqual(record["eGQueryResult"][4]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][5]["DbName"], "omim")
self.assertEqual(record["eGQueryResult"][5]["MenuName"], "OMIM")
self.assertEqual(record["eGQueryResult"][5]["Count"], "149")
self.assertEqual(record["eGQueryResult"][5]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][6]["DbName"], "omia")
self.assertEqual(record["eGQueryResult"][6]["MenuName"], "OMIA")
self.assertEqual(record["eGQueryResult"][6]["Count"], "0")
self.assertEqual(record["eGQueryResult"][6]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][7]["DbName"], "ncbisearch")
self.assertEqual(record["eGQueryResult"][7]["MenuName"], "NCBI Web Site")
self.assertEqual(record["eGQueryResult"][7]["Count"], "13")
self.assertEqual(record["eGQueryResult"][7]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][8]["DbName"], "nuccore")
self.assertEqual(record["eGQueryResult"][8]["MenuName"], "CoreNucleotide")
self.assertEqual(record["eGQueryResult"][8]["Count"], "4917")
self.assertEqual(record["eGQueryResult"][8]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][9]["DbName"], "nucgss")
self.assertEqual(record["eGQueryResult"][9]["MenuName"], "GSS")
self.assertEqual(record["eGQueryResult"][9]["Count"], "184")
self.assertEqual(record["eGQueryResult"][9]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][10]["DbName"], "nucest")
self.assertEqual(record["eGQueryResult"][10]["MenuName"], "EST")
self.assertEqual(record["eGQueryResult"][10]["Count"], "600")
self.assertEqual(record["eGQueryResult"][10]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][11]["DbName"], "protein")
self.assertEqual(record["eGQueryResult"][11]["MenuName"], "Protein")
self.assertEqual(record["eGQueryResult"][11]["Count"], "6779")
self.assertEqual(record["eGQueryResult"][11]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][12]["DbName"], "genome")
self.assertEqual(record["eGQueryResult"][12]["MenuName"], "Genome")
self.assertEqual(record["eGQueryResult"][12]["Count"], "44")
self.assertEqual(record["eGQueryResult"][12]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][13]["DbName"], "structure")
self.assertEqual(record["eGQueryResult"][13]["MenuName"], "Structure")
self.assertEqual(record["eGQueryResult"][13]["Count"], "29")
self.assertEqual(record["eGQueryResult"][13]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][14]["DbName"], "taxonomy")
self.assertEqual(record["eGQueryResult"][14]["MenuName"], "Taxonomy")
self.assertEqual(record["eGQueryResult"][14]["Count"], "0")
self.assertEqual(record["eGQueryResult"][14]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][15]["DbName"], "snp")
self.assertEqual(record["eGQueryResult"][15]["MenuName"], "SNP")
self.assertEqual(record["eGQueryResult"][15]["Count"], "2013")
self.assertEqual(record["eGQueryResult"][15]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][16]["DbName"], "gene")
self.assertEqual(record["eGQueryResult"][16]["MenuName"], "Gene")
self.assertEqual(record["eGQueryResult"][16]["Count"], "1775")
self.assertEqual(record["eGQueryResult"][16]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][17]["DbName"], "unigene")
self.assertEqual(record["eGQueryResult"][17]["MenuName"], "UniGene")
self.assertEqual(record["eGQueryResult"][17]["Count"], "207")
self.assertEqual(record["eGQueryResult"][17]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][18]["DbName"], "cdd")
self.assertEqual(record["eGQueryResult"][18]["MenuName"], "Conserved Domains")
self.assertEqual(record["eGQueryResult"][18]["Count"], "17")
self.assertEqual(record["eGQueryResult"][18]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][19]["DbName"], "domains")
self.assertEqual(record["eGQueryResult"][19]["MenuName"], "3D Domains")
self.assertEqual(record["eGQueryResult"][19]["Count"], "131")
self.assertEqual(record["eGQueryResult"][19]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][20]["DbName"], "unists")
self.assertEqual(record["eGQueryResult"][20]["MenuName"], "UniSTS")
self.assertEqual(record["eGQueryResult"][20]["Count"], "198")
self.assertEqual(record["eGQueryResult"][20]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][21]["DbName"], "popset")
self.assertEqual(record["eGQueryResult"][21]["MenuName"], "PopSet")
self.assertEqual(record["eGQueryResult"][21]["Count"], "43")
self.assertEqual(record["eGQueryResult"][21]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][22]["DbName"], "geo")
self.assertEqual(record["eGQueryResult"][22]["MenuName"], "GEO Profiles")
self.assertEqual(record["eGQueryResult"][22]["Count"], "128692")
self.assertEqual(record["eGQueryResult"][22]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][23]["DbName"], "gds")
self.assertEqual(record["eGQueryResult"][23]["MenuName"], "GEO DataSets")
self.assertEqual(record["eGQueryResult"][23]["Count"], "21")
self.assertEqual(record["eGQueryResult"][23]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][24]["DbName"], "homologene")
self.assertEqual(record["eGQueryResult"][24]["MenuName"], "HomoloGene")
self.assertEqual(record["eGQueryResult"][24]["Count"], "50")
self.assertEqual(record["eGQueryResult"][24]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][25]["DbName"], "cancerchromosomes")
self.assertEqual(record["eGQueryResult"][25]["MenuName"], "CancerChromosomes")
self.assertEqual(record["eGQueryResult"][25]["Count"], "18")
self.assertEqual(record["eGQueryResult"][25]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][26]["DbName"], "pccompound")
self.assertEqual(record["eGQueryResult"][26]["MenuName"], "PubChem Compound")
self.assertEqual(record["eGQueryResult"][26]["Count"], "0")
self.assertEqual(record["eGQueryResult"][26]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][27]["DbName"], "pcsubstance")
self.assertEqual(record["eGQueryResult"][27]["MenuName"], "PubChem Substance")
self.assertEqual(record["eGQueryResult"][27]["Count"], "26")
self.assertEqual(record["eGQueryResult"][27]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][28]["DbName"], "pcassay")
self.assertEqual(record["eGQueryResult"][28]["MenuName"], "PubChem BioAssay")
self.assertEqual(record["eGQueryResult"][28]["Count"], "0")
self.assertEqual(record["eGQueryResult"][28]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][29]["DbName"], "nlmcatalog")
self.assertEqual(record["eGQueryResult"][29]["MenuName"], "NLM Catalog")
self.assertEqual(record["eGQueryResult"][29]["Count"], "31")
self.assertEqual(record["eGQueryResult"][29]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][30]["DbName"], "gensat")
self.assertEqual(record["eGQueryResult"][30]["MenuName"], "GENSAT")
self.assertEqual(record["eGQueryResult"][30]["Count"], "0")
self.assertEqual(record["eGQueryResult"][30]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][31]["DbName"], "probe")
self.assertEqual(record["eGQueryResult"][31]["MenuName"], "Probe")
self.assertEqual(record["eGQueryResult"][31]["Count"], "1410")
self.assertEqual(record["eGQueryResult"][31]["Status"], "Ok")
self.assertEqual(record["eGQueryResult"][32]["DbName"], "genomeprj")
self.assertEqual(record["eGQueryResult"][32]["MenuName"], "Genome Project")
self.assertEqual(record["eGQueryResult"][32]["Count"], "0")
self.assertEqual(record["eGQueryResult"][32]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][33]["DbName"], "gap")
self.assertEqual(record["eGQueryResult"][33]["MenuName"], "dbGaP")
self.assertEqual(record["eGQueryResult"][33]["Count"], "0")
self.assertEqual(record["eGQueryResult"][33]["Status"], "Term or Database is not found")
self.assertEqual(record["eGQueryResult"][34]["DbName"], "proteinclusters")
self.assertEqual(record["eGQueryResult"][34]["MenuName"], "Protein Clusters")
self.assertEqual(record["eGQueryResult"][34]["Count"], "0")
self.assertEqual(record["eGQueryResult"][34]["Status"], "Term or Database is not found")
class ESpellTest(unittest.TestCase):
'''Tests for parsing XML output returned by ESpell
'''
def test_espell(self):
'''Test parsing XML output returned by ESpell
'''
# Request suggestions for the PubMed search biopythooon
# To create the XML file, use
# >>> Bio.Entrez.espell(db="pubmed", term="biopythooon")
input = open('Entrez/espell.xml')
record = Entrez.read(input)
self.assertEqual(record["Database"], "pubmed")
self.assertEqual(record["Query"], "biopythooon")
self.assertEqual(record["CorrectedQuery"], "biopython")
self.assertEqual(len(record["SpelledQuery"]), 1)
self.assertEqual(record["SpelledQuery"][0], "biopython")
self.assertEqual(record["SpelledQuery"][0].tag, "Replaced")
class EFetchTest(unittest.TestCase):
'''Tests for parsing XML output returned by EFetch
'''
def test_pubmed1(self):
'''Test parsing XML returned by EFetch, PubMed database (first test)
'''
# In PubMed display PMIDs 12091962 and 9997 in xml retrieval mode
# and abstract retrieval type.
# To create the XML file, use
# >>> Bio.Entrez.efetch(db='pubmed', id='12091962,9997',
# retmode='xml', rettype='abstract')
input = open('Entrez/pubmed1.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["MedlineCitation"].attributes["Owner"], "KIE")
self.assertEqual(record[0]["MedlineCitation"].attributes["Status"], "MEDLINE")
self.assertEqual(record[0]["MedlineCitation"]["PMID"], "12091962")
self.assertEqual(record[0]["MedlineCitation"]["DateCreated"]["Year"], "1991")
self.assertEqual(record[0]["MedlineCitation"]["DateCreated"]["Month"], "01")
self.assertEqual(record[0]["MedlineCitation"]["DateCreated"]["Day"], "22")
self.assertEqual(record[0]["MedlineCitation"]["DateCompleted"]["Year"], "1991")
self.assertEqual(record[0]["MedlineCitation"]["DateCompleted"]["Month"], "01")
self.assertEqual(record[0]["MedlineCitation"]["DateCompleted"]["Day"], "22")
self.assertEqual(record[0]["MedlineCitation"]["DateRevised"]["Year"], "2007")
self.assertEqual(record[0]["MedlineCitation"]["DateRevised"]["Month"], "11")
self.assertEqual(record[0]["MedlineCitation"]["DateRevised"]["Day"], "15")
self.assertEqual(record[0]["MedlineCitation"]["Article"].attributes["PubModel"], "Print")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["ISSN"], "1043-1578")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["ISSN"].attributes["IssnType"], "Print")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"].attributes["CitedMedium"], "Print")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["Volume"], "17")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["Issue"], "1")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["PubDate"]["Year"], "1990")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["PubDate"]["Season"], "Spring")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["Title"], "Social justice (San Francisco, Calif.)")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["ArticleTitle"], "The treatment of AIDS behind the walls of correctional facilities.")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Pagination"]["MedlinePgn"], "113-25")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"].attributes["CompleteYN"], 'Y')
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][0].attributes["ValidYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][0]["LastName"], "Olivero")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][0]["ForeName"], "J Michael")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][0]["Initials"], "JM")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Language"], ["eng"])
self.assertEqual(record[0]["MedlineCitation"]["Article"]["PublicationTypeList"], ["Journal Article", "Review"])
self.assertEqual(record[0]["MedlineCitation"]["MedlineJournalInfo"]["Country"], "United States")
self.assertEqual(record[0]["MedlineCitation"]["MedlineJournalInfo"]["MedlineTA"], "Soc Justice")
self.assertEqual(record[0]["MedlineCitation"]["MedlineJournalInfo"]["NlmUniqueID"], "9891830")
self.assertEqual(record[0]["MedlineCitation"]["CitationSubset"], ["E"])
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][0]["DescriptorName"], "AIDS Serodiagnosis")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][0]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][1]["DescriptorName"], "Acquired Immunodeficiency Syndrome")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][1]["DescriptorName"].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][2]["DescriptorName"], "Civil Rights")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][2]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][3]["DescriptorName"], "HIV Seropositivity")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][3]["DescriptorName"].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][4]["DescriptorName"], "Humans")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][4]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][5]["DescriptorName"], "Jurisprudence")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][5]["DescriptorName"].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][6]["DescriptorName"], "Law Enforcement")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][6]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][7]["DescriptorName"], "Mass Screening")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][7]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][8]["DescriptorName"], "Minority Groups")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][8]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][9]["DescriptorName"], "Organizational Policy")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][9]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][10]["DescriptorName"], "Patient Care")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][10]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][11]["DescriptorName"], "Prejudice")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][11]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][12]["DescriptorName"], "Prisoners")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][12]["DescriptorName"].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][13]["DescriptorName"], "Public Policy")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][13]["DescriptorName"].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][14]["DescriptorName"], "Quarantine")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][14]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][15]["DescriptorName"], "Social Control, Formal")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][15]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][16]["DescriptorName"], "Statistics as Topic")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][16]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][17]["DescriptorName"], "Stereotyping")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][17]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][18]["DescriptorName"], "United States")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][18]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["NumberOfReferences"], "63")
self.assertEqual(record[0]["MedlineCitation"]["OtherID"][0], "31840")
self.assertEqual(record[0]["MedlineCitation"]["OtherID"][0].attributes["Source"], "KIE")
self.assertEqual(record[0]["MedlineCitation"]["KeywordList"][0].attributes["Owner"], "KIE")
self.assertEqual(record[0]["MedlineCitation"]["KeywordList"][0][0], "Health Care and Public Health")
self.assertEqual(record[0]["MedlineCitation"]["KeywordList"][0][0].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["KeywordList"][0][1], "Legal Approach")
self.assertEqual(record[0]["MedlineCitation"]["KeywordList"][0][1].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["GeneralNote"][0], "14 fn.")
self.assertEqual(record[0]["MedlineCitation"]["GeneralNote"][0].attributes["Owner"], "KIE")
self.assertEqual(record[0]["MedlineCitation"]["GeneralNote"][1], "KIE BoB Subject Heading: AIDS")
self.assertEqual(record[0]["MedlineCitation"]["GeneralNote"][1].attributes["Owner"], "KIE")
self.assertEqual(record[0]["MedlineCitation"]["GeneralNote"][2], "63 refs.")
self.assertEqual(record[0]["MedlineCitation"]["GeneralNote"][2].attributes["Owner"], "KIE")
self.assertEqual(record[0]["PubmedData"]["History"][0][0].attributes["PubStatus"], "pubmed")
self.assertEqual(record[0]["PubmedData"]["History"][0][0]["Year"], "1990")
self.assertEqual(record[0]["PubmedData"]["History"][0][0]["Month"], "4")
self.assertEqual(record[0]["PubmedData"]["History"][0][0]["Day"], "1")
self.assertEqual(record[0]["PubmedData"]["History"][0][0]["Hour"], "0")
self.assertEqual(record[0]["PubmedData"]["History"][0][0]["Minute"], "0")
self.assertEqual(record[0]["PubmedData"]["History"][0][1].attributes["PubStatus"], "medline")
self.assertEqual(record[0]["PubmedData"]["History"][0][1]["Year"], "2002")
self.assertEqual(record[0]["PubmedData"]["History"][0][1]["Month"], "7")
self.assertEqual(record[0]["PubmedData"]["History"][0][1]["Day"], "16")
self.assertEqual(record[0]["PubmedData"]["History"][0][1]["Hour"], "10")
self.assertEqual(record[0]["PubmedData"]["History"][0][1]["Minute"], "1")
self.assertEqual(record[0]["PubmedData"]["PublicationStatus"], "ppublish")
self.assertEqual(len(record[0]["PubmedData"]["ArticleIdList"]), 1)
self.assertEqual(record[0]["PubmedData"]["ArticleIdList"][0], "12091962")
self.assertEqual(record[0]["PubmedData"]["ArticleIdList"][0].attributes["IdType"], "pubmed")
self.assertEqual(record[1]["MedlineCitation"].attributes["Owner"], "NLM")
self.assertEqual(record[1]["MedlineCitation"].attributes["Status"], "MEDLINE")
self.assertEqual(record[1]["MedlineCitation"]["PMID"], "9997")
self.assertEqual(record[1]["MedlineCitation"]["DateCreated"]["Year"], "1976")
self.assertEqual(record[1]["MedlineCitation"]["DateCreated"]["Month"], "12")
self.assertEqual(record[1]["MedlineCitation"]["DateCreated"]["Day"], "30")
self.assertEqual(record[1]["MedlineCitation"]["DateCompleted"]["Year"], "1976")
self.assertEqual(record[1]["MedlineCitation"]["DateCompleted"]["Month"], "12")
self.assertEqual(record[1]["MedlineCitation"]["DateCompleted"]["Day"], "30")
self.assertEqual(record[1]["MedlineCitation"]["DateRevised"]["Year"], "2003")
self.assertEqual(record[1]["MedlineCitation"]["DateRevised"]["Month"], "11")
self.assertEqual(record[1]["MedlineCitation"]["DateRevised"]["Day"], "14")
self.assertEqual(record[1]["MedlineCitation"]["Article"].attributes["PubModel"], "Print")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["ISSN"], "0006-3002")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["ISSN"].attributes["IssnType"], "Print")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"].attributes["CitedMedium"], "Print")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["Volume"], "446")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["Issue"], "1")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["PubDate"]["Year"], "1976")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["PubDate"]["Month"], "Sep")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["PubDate"]["Day"], "28")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["Title"], "Biochimica et biophysica acta")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["ISOAbbreviation"], "Biochim. Biophys. Acta")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["ArticleTitle"], "Magnetic studies of Chromatium flavocytochrome C552. A mechanism for heme-flavin interaction.")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Pagination"]["MedlinePgn"], "179-91")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Abstract"]["AbstractText"], "Electron paramagnetic resonance and magnetic susceptibility studies of Chromatium flavocytochrome C552 and its diheme flavin-free subunit at temperatures below 45 degrees K are reported. The results show that in the intact protein and the subunit the two low-spin (S = 1/2) heme irons are distinguishable, giving rise to separate EPR signals. In the intact protein only, one of the heme irons exists in two different low spin environments in the pH range 5.5 to 10.5, while the other remains in a constant environment. Factors influencing the variable heme iron environment also influence flavin reactivity, indicating the existence of a mechanism for heme-flavin interaction.")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"].attributes["CompleteYN"], "Y")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][0].attributes["ValidYN"], "Y")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][0]["LastName"], "Strekas")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][0]["ForeName"], "T C")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][0]["Initials"], "TC")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Language"], ["eng"])
self.assertEqual(record[1]["MedlineCitation"]["Article"]["PublicationTypeList"], ["Journal Article"])
self.assertEqual(record[1]["MedlineCitation"]["MedlineJournalInfo"]["Country"], "NETHERLANDS")
self.assertEqual(record[1]["MedlineCitation"]["MedlineJournalInfo"]["MedlineTA"], "Biochim Biophys Acta")
self.assertEqual(record[1]["MedlineCitation"]["MedlineJournalInfo"]["NlmUniqueID"], "0217513")
self.assertEqual(record[1]["MedlineCitation"]["ChemicalList"][0]["RegistryNumber"], "0")
self.assertEqual(record[1]["MedlineCitation"]["ChemicalList"][0]["NameOfSubstance"], "Cytochrome c Group")
self.assertEqual(record[1]["MedlineCitation"]["ChemicalList"][1]["RegistryNumber"], "0")
self.assertEqual(record[1]["MedlineCitation"]["ChemicalList"][1]["NameOfSubstance"], "Flavins")
self.assertEqual(record[1]["MedlineCitation"]["ChemicalList"][2]["RegistryNumber"], "14875-96-8")
self.assertEqual(record[1]["MedlineCitation"]["ChemicalList"][2]["NameOfSubstance"], "Heme")
self.assertEqual(record[1]["MedlineCitation"]["ChemicalList"][3]["RegistryNumber"], "7439-89-6")
self.assertEqual(record[1]["MedlineCitation"]["ChemicalList"][3]["NameOfSubstance"], "Iron")
self.assertEqual(record[1]["MedlineCitation"]["CitationSubset"], ["IM"])
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][0]["DescriptorName"], "Binding Sites")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][0]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][1]["DescriptorName"], "Chromatium")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][1]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][1]["QualifierName"][0], "enzymology")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][1]["QualifierName"][0].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][2]["DescriptorName"], "Cytochrome c Group")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][2]["DescriptorName"].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][3]["DescriptorName"], "Electron Spin Resonance Spectroscopy")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][3]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][4]["DescriptorName"], "Flavins")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][4]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][5]["DescriptorName"], "Heme")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][5]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][6]["DescriptorName"], "Hydrogen-Ion Concentration")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][6]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][7]["DescriptorName"], "Iron")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][7]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][7]["QualifierName"][0], "analysis")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][7]["QualifierName"][0].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][8]["DescriptorName"], "Magnetics")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][8]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][9]["DescriptorName"], "Oxidation-Reduction")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][9]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][10]["DescriptorName"], "Protein Binding")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][10]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][11]["DescriptorName"], "Protein Conformation")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][11]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][12]["DescriptorName"], "Temperature")
self.assertEqual(record[1]["MedlineCitation"]["MeshHeadingList"][12]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[1]["PubmedData"]["History"][0][0].attributes["PubStatus"], "pubmed")
self.assertEqual(record[1]["PubmedData"]["History"][0][0]["Year"], "1976")
self.assertEqual(record[1]["PubmedData"]["History"][0][0]["Month"], "9")
self.assertEqual(record[1]["PubmedData"]["History"][0][0]["Day"], "28")
self.assertEqual(record[1]["PubmedData"]["History"][0][1].attributes["PubStatus"], "medline")
self.assertEqual(record[1]["PubmedData"]["History"][0][1]["Year"], "1976")
self.assertEqual(record[1]["PubmedData"]["History"][0][1]["Month"], "9")
self.assertEqual(record[1]["PubmedData"]["History"][0][1]["Day"], "28")
self.assertEqual(record[1]["PubmedData"]["History"][0][1]["Hour"], "0")
self.assertEqual(record[1]["PubmedData"]["History"][0][1]["Minute"], "1")
self.assertEqual(record[1]["PubmedData"]["PublicationStatus"], "ppublish")
self.assertEqual(len(record[1]["PubmedData"]["ArticleIdList"]), 1)
self.assertEqual(record[1]["PubmedData"]["ArticleIdList"][0], "9997")
self.assertEqual(record[1]["PubmedData"]["ArticleIdList"][0].attributes["IdType"], "pubmed")
def test_pubmed2(self):
'''Test parsing XML returned by EFetch, PubMed database (second test)
'''
# In PubMed display PMIDs in xml retrieval mode.
# To create the XML file, use
# >>> Bio.Entrez.efetch(db='pubmed', id="11748933,11700088",
# retmode="xml")
input = open('Entrez/pubmed2.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["MedlineCitation"].attributes["Owner"], "NLM")
self.assertEqual(record[0]["MedlineCitation"].attributes["Status"], "MEDLINE")
self.assertEqual(record[0]["MedlineCitation"]["PMID"], "11748933")
self.assertEqual(record[0]["MedlineCitation"]["DateCreated"]["Year"], "2001")
self.assertEqual(record[0]["MedlineCitation"]["DateCreated"]["Month"], "12")
self.assertEqual(record[0]["MedlineCitation"]["DateCreated"]["Day"], "25")
self.assertEqual(record[0]["MedlineCitation"]["DateCompleted"]["Year"], "2002")
self.assertEqual(record[0]["MedlineCitation"]["DateCompleted"]["Month"], "03")
self.assertEqual(record[0]["MedlineCitation"]["DateCompleted"]["Day"], "04")
self.assertEqual(record[0]["MedlineCitation"]["DateRevised"]["Year"], "2006")
self.assertEqual(record[0]["MedlineCitation"]["DateRevised"]["Month"], "11")
self.assertEqual(record[0]["MedlineCitation"]["DateRevised"]["Day"], "15")
self.assertEqual(record[0]["MedlineCitation"]["Article"].attributes["PubModel"], "Print")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["ISSN"], "0011-2240")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["ISSN"].attributes["IssnType"], "Print")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"].attributes["CitedMedium"], "Print")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["Volume"], "42")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["Issue"], "4")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["PubDate"]["Year"], "2001")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["PubDate"]["Month"], "Jun")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["Title"], "Cryobiology")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Journal"]["ISOAbbreviation"], "Cryobiology")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["ArticleTitle"], "Is cryopreservation a homogeneous process? Ultrastructure and motility of untreated, prefreezing, and postthawed spermatozoa of Diplodus puntazzo (Cetti).")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Pagination"]["MedlinePgn"], "244-55")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Abstract"]["AbstractText"], "This study subdivides the cryopreservation procedure for Diplodus puntazzo spermatozoa into three key phases, fresh, prefreezing (samples equilibrated in cryosolutions), and postthawed stages, and examines the ultrastructural anomalies and motility profiles of spermatozoa in each stage, with different cryodiluents. Two simple cryosolutions were evaluated: 0.17 M sodium chloride containing a final concentration of 15% dimethyl sulfoxide (Me(2)SO) (cryosolution A) and 0.1 M sodium citrate containing a final concentration of 10% Me(2)SO (cryosolution B). Ultrastructural anomalies of the plasmatic and nuclear membranes of the sperm head were common and the severity of the cryoinjury differed significantly between the pre- and the postfreezing phases and between the two cryosolutions. In spermatozoa diluted with cryosolution A, during the prefreezing phase, the plasmalemma of 61% of the cells was absent or damaged compared with 24% in the fresh sample (P < 0.001). In spermatozoa diluted with cryosolution B, there was a pronounced increase in the number of cells lacking the head plasmatic membrane from the prefreezing to the postthawed stages (from 32 to 52%, P < 0.01). In both cryosolutions, damages to nuclear membrane were significantly higher after freezing (cryosolution A: 8 to 23%, P < 0.01; cryosolution B: 5 to 38%, P < 0.001). With cryosolution A, the after-activation motility profile confirmed a consistent drop from fresh at the prefreezing stage, whereas freezing and thawing did not affect the motility much further and 50% of the cells were immotile by 60-90 s after activation. With cryosolution B, only the postthawing stage showed a sharp drop of motility profile. This study suggests that the different phases of the cryoprocess should be investigated to better understand the process of sperm damage.")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Abstract"]["CopyrightInformation"], "Copyright 2001 Elsevier Science.")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Affiliation"], u'Dipartimento di Scienze Ambientali, Universit\xe0 degli Studi della Tuscia, 01100 Viterbo, Italy.')
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"].attributes["CompleteYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][0].attributes["ValidYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][0]["LastName"], "Taddei")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][0]["ForeName"], "A R")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][0]["Initials"], "AR")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][1].attributes["ValidYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][1]["LastName"], "Barbato")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][1]["ForeName"], "F")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][1]["Initials"], "F")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][2].attributes["ValidYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][2]["LastName"], "Abelli")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][2]["ForeName"], "L")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][2]["Initials"], "L")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][3].attributes["ValidYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][3]["LastName"], "Canese")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][3]["ForeName"], "S")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][3]["Initials"], "S")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][4].attributes["ValidYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][4]["LastName"], "Moretti")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][4]["ForeName"], "F")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][4]["Initials"], "F")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][5].attributes["ValidYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][5]["LastName"], "Rana")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][5]["ForeName"], "K J")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][5]["Initials"], "KJ")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][6].attributes["ValidYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][6]["LastName"], "Fausto")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][6]["ForeName"], "A M")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][6]["Initials"], "AM")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][7].attributes["ValidYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][7]["LastName"], "Mazzini")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][7]["ForeName"], "M")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["AuthorList"][7]["Initials"], "M")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["Language"], ["eng"])
self.assertEqual(record[0]["MedlineCitation"]["Article"]["PublicationTypeList"][0], "Journal Article")
self.assertEqual(record[0]["MedlineCitation"]["Article"]["PublicationTypeList"][1], "Research Support, Non-U.S. Gov't")
self.assertEqual(record[0]["MedlineCitation"]["MedlineJournalInfo"]["Country"], "United States")
self.assertEqual(record[0]["MedlineCitation"]["MedlineJournalInfo"]["MedlineTA"], "Cryobiology")
self.assertEqual(record[0]["MedlineCitation"]["MedlineJournalInfo"]["NlmUniqueID"], "0006252")
self.assertEqual(record[0]["MedlineCitation"]["CitationSubset"], ["IM"])
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][0]["DescriptorName"], "Animals")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][0]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][1]["DescriptorName"], "Cell Membrane")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][1]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][1]["QualifierName"][0], "ultrastructure")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][1]["QualifierName"][0].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][2]["DescriptorName"], "Cryopreservation")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][2]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][2]["QualifierName"][0], "methods")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][2]["QualifierName"][0].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][3]["DescriptorName"], "Male")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][3]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][4]["DescriptorName"], "Microscopy, Electron")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][4]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][5]["DescriptorName"], "Microscopy, Electron, Scanning")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][5]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][6]["DescriptorName"], "Nuclear Envelope")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][6]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][6]["QualifierName"][0], "ultrastructure")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][6]["QualifierName"][0].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][7]["DescriptorName"], "Sea Bream")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][7]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][7]["QualifierName"][0], "anatomy & histology")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][7]["QualifierName"][0].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][7]["QualifierName"][1], "physiology")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][7]["QualifierName"][1].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][8]["DescriptorName"], "Semen Preservation")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][8]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][8]["QualifierName"][0], "adverse effects")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][8]["QualifierName"][0].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][8]["QualifierName"][1], "methods")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][8]["QualifierName"][1].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][9]["DescriptorName"], "Sperm Motility")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][9]["DescriptorName"].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][10]["DescriptorName"], "Spermatozoa")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][10]["DescriptorName"].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][10]["QualifierName"][0], "physiology")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][10]["QualifierName"][0].attributes["MajorTopicYN"], "N")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][10]["QualifierName"][1], "ultrastructure")
self.assertEqual(record[0]["MedlineCitation"]["MeshHeadingList"][10]["QualifierName"][1].attributes["MajorTopicYN"], "Y")
self.assertEqual(record[0]["PubmedData"]["History"][0][0].attributes["PubStatus"], "pubmed")
self.assertEqual(record[0]["PubmedData"]["History"][0][0]["Year"], "2001")
self.assertEqual(record[0]["PubmedData"]["History"][0][0]["Month"], "12")
self.assertEqual(record[0]["PubmedData"]["History"][0][0]["Day"], "26")
self.assertEqual(record[0]["PubmedData"]["History"][0][0]["Hour"], "10")
self.assertEqual(record[0]["PubmedData"]["History"][0][0]["Minute"], "0")
self.assertEqual(record[0]["PubmedData"]["History"][0][1].attributes["PubStatus"], "medline")
self.assertEqual(record[0]["PubmedData"]["History"][0][1]["Year"], "2002")
self.assertEqual(record[0]["PubmedData"]["History"][0][1]["Month"], "3")
self.assertEqual(record[0]["PubmedData"]["History"][0][1]["Day"], "5")
self.assertEqual(record[0]["PubmedData"]["History"][0][1]["Hour"], "10")
self.assertEqual(record[0]["PubmedData"]["History"][0][1]["Minute"], "1")
self.assertEqual(record[0]["PubmedData"]["PublicationStatus"], "ppublish")
self.assertEqual(record[0]["PubmedData"]["ArticleIdList"][0], "11748933")
self.assertEqual(record[0]["PubmedData"]["ArticleIdList"][0].attributes["IdType"], "pubmed")
self.assertEqual(record[0]["PubmedData"]["ArticleIdList"][1], "10.1006/cryo.2001.2328")
self.assertEqual(record[0]["PubmedData"]["ArticleIdList"][1].attributes["IdType"], "doi")
self.assertEqual(record[0]["PubmedData"]["ArticleIdList"][2], "S0011-2240(01)92328-4")
self.assertEqual(record[0]["PubmedData"]["ArticleIdList"][2].attributes["IdType"], "pii")
self.assertEqual(record[1]["MedlineCitation"].attributes["Owner"], "NLM")
self.assertEqual(record[1]["MedlineCitation"].attributes["Status"], "PubMed-not-MEDLINE")
self.assertEqual(record[1]["MedlineCitation"]["PMID"], "11700088")
self.assertEqual(record[1]["MedlineCitation"]["DateCreated"]["Year"], "2001")
self.assertEqual(record[1]["MedlineCitation"]["DateCreated"]["Month"], "11")
self.assertEqual(record[1]["MedlineCitation"]["DateCreated"]["Day"], "08")
self.assertEqual(record[1]["MedlineCitation"]["DateCompleted"]["Year"], "2001")
self.assertEqual(record[1]["MedlineCitation"]["DateCompleted"]["Month"], "12")
self.assertEqual(record[1]["MedlineCitation"]["DateCompleted"]["Day"], "20")
self.assertEqual(record[1]["MedlineCitation"]["DateRevised"]["Year"], "2003")
self.assertEqual(record[1]["MedlineCitation"]["DateRevised"]["Month"], "10")
self.assertEqual(record[1]["MedlineCitation"]["DateRevised"]["Day"], "31")
self.assertEqual(record[1]["MedlineCitation"]["Article"].attributes["PubModel"], "Print")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["ISSN"], "1090-7807")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["ISSN"].attributes["IssnType"], "Print")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"].attributes["CitedMedium"], "Print")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["Volume"], "153")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["Issue"], "1")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["PubDate"]["Year"], "2001")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["JournalIssue"]["PubDate"]["Month"], "Nov")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["Title"], "Journal of magnetic resonance (San Diego, Calif. : 1997)")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Journal"]["ISOAbbreviation"], "J. Magn. Reson.")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["ArticleTitle"], "Proton MRI of (13)C distribution by J and chemical shift editing.")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Pagination"]["MedlinePgn"], "117-23")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Abstract"]["AbstractText"], "The sensitivity of (13)C NMR imaging can be considerably favored by detecting the (1)H nuclei bound to (13)C nuclei via scalar J-interaction (X-filter). However, the J-editing approaches have difficulty in discriminating between compounds with similar J-constant as, for example, different glucose metabolites. In such cases, it is almost impossible to get J-edited images of a single-compound distribution, since the various molecules are distinguishable only via their chemical shift. In a recent application of J-editing to high-resolution spectroscopy, it has been shown that a more efficient chemical selectivity could be obtained by utilizing the larger chemical shift range of (13)C. This has been made by introducing frequency-selective (13)C pulses that allow a great capability of indirect chemical separation. Here a double-resonance imaging approach is proposed, based on both J-editing and (13)C chemical shift editing, which achieves a powerful chemical selectivity and is able to produce full maps of specific chemical compounds. Results are presented on a multicompartments sample containing solutions of glucose and lactic and glutamic acid in water.")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Abstract"]["CopyrightInformation"], "Copyright 2001 Academic Press.")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Affiliation"], "INFM and Department of Physics, University of L'Aquila, I-67100 L'Aquila, Italy.")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"].attributes["CompleteYN"], "Y")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][0].attributes["ValidYN"], "Y")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][0]["LastName"], "Casieri")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][0]["ForeName"], "C")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][0]["Initials"], "C")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][1].attributes["ValidYN"], "Y")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][1]["LastName"], "Testa")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][1]["ForeName"], "C")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][1]["Initials"], "C")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][2].attributes["ValidYN"], "Y")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][2]["LastName"], "Carpinelli")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][2]["ForeName"], "G")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][2]["Initials"], "G")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][3].attributes["ValidYN"], "Y")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][3]["LastName"], "Canese")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][3]["ForeName"], "R")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][3]["Initials"], "R")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][4].attributes["ValidYN"], "Y")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][4]["LastName"], "Podo")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][4]["ForeName"], "F")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][4]["Initials"], "F")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][5].attributes["ValidYN"], "Y")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][5]["LastName"], "De Luca")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][5]["ForeName"], "F")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["AuthorList"][5]["Initials"], "F")
self.assertEqual(record[1]["MedlineCitation"]["Article"]["Language"], ["eng"])
self.assertEqual(record[1]["MedlineCitation"]["Article"]["PublicationTypeList"][0], "Journal Article")
self.assertEqual(record[1]["MedlineCitation"]["MedlineJournalInfo"]["Country"], "United States")
self.assertEqual(record[1]["MedlineCitation"]["MedlineJournalInfo"]["MedlineTA"], "J Magn Reson")
self.assertEqual(record[1]["MedlineCitation"]["MedlineJournalInfo"]["NlmUniqueID"], "9707935")
self.assertEqual(record[1]["PubmedData"]["History"][0][0].attributes["PubStatus"], "pubmed")
self.assertEqual(record[1]["PubmedData"]["History"][0][0]["Year"], "2001")
self.assertEqual(record[1]["PubmedData"]["History"][0][0]["Month"], "11")
self.assertEqual(record[1]["PubmedData"]["History"][0][0]["Day"], "9")
self.assertEqual(record[1]["PubmedData"]["History"][0][0]["Hour"], "10")
self.assertEqual(record[1]["PubmedData"]["History"][0][0]["Minute"], "0")
self.assertEqual(record[1]["PubmedData"]["History"][0][1].attributes["PubStatus"], "medline")
self.assertEqual(record[1]["PubmedData"]["History"][0][1]["Year"], "2001")
self.assertEqual(record[1]["PubmedData"]["History"][0][1]["Month"], "11")
self.assertEqual(record[1]["PubmedData"]["History"][0][1]["Day"], "9")
self.assertEqual(record[1]["PubmedData"]["History"][0][1]["Hour"], "10")
self.assertEqual(record[1]["PubmedData"]["History"][0][1]["Minute"], "1")
self.assertEqual(record[1]["PubmedData"]["PublicationStatus"], "ppublish")
self.assertEqual(record[1]["PubmedData"]["ArticleIdList"][0], "11700088")
self.assertEqual(record[1]["PubmedData"]["ArticleIdList"][0].attributes["IdType"], "pubmed")
self.assertEqual(record[1]["PubmedData"]["ArticleIdList"][1], "10.1006/jmre.2001.2429")
self.assertEqual(record[1]["PubmedData"]["ArticleIdList"][1].attributes["IdType"], "doi")
self.assertEqual(record[1]["PubmedData"]["ArticleIdList"][2], "S1090-7807(01)92429-2")
self.assertEqual(record[1]["PubmedData"]["ArticleIdList"][2].attributes["IdType"], "pii")
def test_journals(self):
'''Test parsing XML returned by EFetch, Journals database
'''
# In Journals display records for journal IDs 22682,21698,1490
# To create the XML file, use
# >>> Bio.Entrez.efetch(db="journals", id=["22682","21698","1490"],
# rettype="full", retmode='xml')
input = open('Entrez/serialset.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["NlmUniqueID"], "100971611")
self.assertEqual(record[0]["Title"], "21st century science & technology")
self.assertEqual(record[0]["MedlineTA"], "21st Century Sci Technol")
self.assertEqual(len(record[0]["PublicationInfo"]), 8)
self.assertEqual(record[0]["PublicationInfo"]["Country"], "United States")
self.assertEqual(record[0]["PublicationInfo"]["Place"], ["[Washington, D.C. :"])
self.assertEqual(record[0]["PublicationInfo"]["Publisher"], ["21st Century Science Associates,"])
self.assertEqual(record[0]["PublicationInfo"]["PublicationFirstYear"], "1988")
self.assertEqual(record[0]["PublicationInfo"]["Frequency"], ["Quarterly,"])
self.assertEqual(record[0]["PublicationInfo"]["Imprint"], [])
self.assertEqual(record[0]["PublicationInfo"]["DateIssued"], [])
self.assertEqual(record[0]["PublicationInfo"]["DatesOfSerialPublication"], [])
self.assertEqual(record[0]["ISSN"], ["0895-6820"])
self.assertEqual(record[0]["ISSN"][0].attributes["IssnType"], "Print")
self.assertEqual(record[0]["Language"], ["eng"])
self.assertEqual(record[0]["AcidFreeYN"], "N")
self.assertEqual(record[0]["MinorTitleChangeYN"], "N")
self.assertEqual(record[0]["CurrentlyIndexedYN"], "N")
self.assertEqual(record[0]["IndexOnlineYN"], "N")
self.assertEqual(record[0]["IndexingSubset"], "S")
self.assertEqual(len(record[0]["CrossReferenceList"]), 5)
self.assertEqual(record[0]["CrossReferenceList"][0]["XrTitle"], "21 century")
self.assertEqual(record[0]["CrossReferenceList"][0].attributes["XrType"], "X")
self.assertEqual(record[0]["CrossReferenceList"][1]["XrTitle"], "21st century science & technology.")
self.assertEqual(record[0]["CrossReferenceList"][1].attributes["XrType"], "A")
self.assertEqual(record[0]["CrossReferenceList"][2]["XrTitle"], "21st century science and technology")
self.assertEqual(record[0]["CrossReferenceList"][2].attributes["XrType"], "X")
self.assertEqual(record[0]["CrossReferenceList"][3]["XrTitle"], "Twenty-first century science & technology")
self.assertEqual(record[0]["CrossReferenceList"][3].attributes["XrType"], "X")
self.assertEqual(record[0]["CrossReferenceList"][4]["XrTitle"], "Twenty-first century science and technology")
self.assertEqual(record[0]["CrossReferenceList"][4].attributes["XrType"], "X")
self.assertEqual(record[0]["SortSerialName"], "21ST CENTURY SCIENCE & TECHNOLOGY")
self.assertEqual(record[0]["IlsCreatedTimestamp"]["Year"], "2000")
self.assertEqual(record[0]["IlsCreatedTimestamp"]["Month"], "11")
self.assertEqual(record[0]["IlsCreatedTimestamp"]["Day"], "22")
self.assertEqual(record[0]["IlsUpdatedTimestamp"]["Year"], "2006")
self.assertEqual(record[0]["IlsUpdatedTimestamp"]["Month"], "10")
self.assertEqual(record[0]["IlsUpdatedTimestamp"]["Day"], "21")
self.assertEqual(record[1].attributes["DataCreationMethod"], "P")
self.assertEqual(record[1]["NlmUniqueID"], "100939625")
self.assertEqual(record[1]["Title"], "AIHAJ : a journal for the science of occupational and environmental\nhealth and safety")
self.assertEqual(record[1]["MedlineTA"], "AIHAJ")
self.assertEqual(len(record[1]["PublicationInfo"]), 9)
self.assertEqual(record[1]["PublicationInfo"]["Country"], "United States")
self.assertEqual(record[1]["PublicationInfo"]["Place"], ["Fairfax, VA :"])
self.assertEqual(record[1]["PublicationInfo"]["Publisher"], ["American Industrial Hygiene Association,"])
self.assertEqual(record[1]["PublicationInfo"]["PublicationFirstYear"], "2000")
self.assertEqual(record[1]["PublicationInfo"]["PublicationEndYear"], "2001")
self.assertEqual(record[1]["PublicationInfo"]["Frequency"], ["Bimonthly"])
self.assertEqual(record[1]["PublicationInfo"]["Imprint"], [])
self.assertEqual(record[1]["PublicationInfo"]["DateIssued"], [])
self.assertEqual(record[1]["PublicationInfo"]["DatesOfSerialPublication"], [])
self.assertEqual(record[1]["ISSN"], ["1529-8663"])
self.assertEqual(record[1]["ISSN"][0].attributes["IssnType"], "Print")
self.assertEqual(record[1]["Language"], ["eng"])
self.assertEqual(record[1]["AcidFreeYN"], "N")
self.assertEqual(record[1]["ContinuationNotes"], "Continues: American Industrial Hygiene Association\njournal. Continued by: AIHA journal. ")
self.assertEqual(record[1]["MinorTitleChangeYN"], "N")
self.assertEqual(len(record[1]["IndexingHistoryList"]), 2)
self.assertEqual(record[1]["IndexingHistoryList"][0].attributes["CitationSubset"], "IM")
self.assertEqual(record[1]["IndexingHistoryList"][0].attributes["IndexingTreatment"], "Full")
self.assertEqual(record[1]["IndexingHistoryList"][0].attributes["IndexingStatus"], "Continued-by-another-indexed-title")
self.assertEqual(record[1]["IndexingHistoryList"][0]["DateOfAction"]["Year"], "2002")
self.assertEqual(record[1]["IndexingHistoryList"][0]["DateOfAction"]["Month"], "06")
self.assertEqual(record[1]["IndexingHistoryList"][0]["DateOfAction"]["Day"], "03")
self.assertEqual(record[1]["IndexingHistoryList"][1].attributes["CitationSubset"], "IM")
self.assertEqual(record[1]["IndexingHistoryList"][1].attributes["IndexingTreatment"], "Full")
self.assertEqual(record[1]["IndexingHistoryList"][1].attributes["IndexingStatus"], "Currently-indexed-Title-changed")
self.assertEqual(record[1]["IndexingHistoryList"][1]["DateOfAction"]["Year"], "2000")
self.assertEqual(record[1]["IndexingHistoryList"][1]["DateOfAction"]["Month"], "03")
self.assertEqual(record[1]["IndexingHistoryList"][1]["DateOfAction"]["Day"], "24")
self.assertEqual(record[1]["IndexingHistoryList"][1]["Coverage"], "v61n1,Jan./Feb. 2000-v62n6,Nov./Dec. 2001")
self.assertEqual(record[1]["CurrentlyIndexedYN"], "N")
self.assertEqual(record[1]["IndexOnlineYN"], "N")
self.assertEqual(record[1]["IndexingSubset"], "IM")
self.assertEqual(record[1]["BroadJournalHeadingList"][0], "Occupational Medicine")
self.assertEqual(len(record[1]["CrossReferenceList"]), 2)
self.assertEqual(record[1]["CrossReferenceList"][0]["XrTitle"], "AIHAJ :")
self.assertEqual(record[1]["CrossReferenceList"][0].attributes["XrType"], "A")
self.assertEqual(record[1]["CrossReferenceList"][1]["XrTitle"], "American Industrial Hygiene Association journal")
self.assertEqual(record[1]["CrossReferenceList"][1].attributes["XrType"], "X")
self.assertEqual(record[1]["SortSerialName"], "AIHAJ : A JOURNAL FOR THE SCIENCE OF OCCUPATIONAL AND\nENVIRONMENTAL HEALTH AND SAFETY")
self.assertEqual(record[1]["IlsCreatedTimestamp"]["Year"], "2000")
self.assertEqual(record[1]["IlsCreatedTimestamp"]["Month"], "03")
self.assertEqual(record[1]["IlsCreatedTimestamp"]["Day"], "22")
self.assertEqual(record[1]["IlsUpdatedTimestamp"]["Year"], "2005")
self.assertEqual(record[1]["IlsUpdatedTimestamp"]["Month"], "11")
self.assertEqual(record[1]["IlsUpdatedTimestamp"]["Day"], "20")
self.assertEqual(record[2].attributes["DataCreationMethod"], "P")
self.assertEqual(record[2]["NlmUniqueID"], "8403252")
self.assertEqual(record[2]["Title"], "Acta crystallographica. Section B, Structural science")
self.assertEqual(record[2]["MedlineTA"], "Acta Crystallogr B")
self.assertEqual(len(record[2]["PublicationInfo"]), 8)
self.assertEqual(record[2]["PublicationInfo"]["Country"], "Denmark")
self.assertEqual(record[2]["PublicationInfo"]["Place"], ["Copenhagen"])
self.assertEqual(record[2]["PublicationInfo"]["Publisher"], ["Munksgaard International Publishers For The International\nUnion Of Crystallography"])
self.assertEqual(record[2]["PublicationInfo"]["PublicationFirstYear"], "1983")
self.assertEqual(record[2]["PublicationInfo"]["Frequency"], ["Bimonthly"])
self.assertEqual(record[2]["PublicationInfo"]["Imprint"], [])
self.assertEqual(record[2]["PublicationInfo"]["DateIssued"], [])
self.assertEqual(record[2]["PublicationInfo"]["DatesOfSerialPublication"], [])
self.assertEqual(len(record[2]["ISSN"]), 2)
self.assertEqual(record[2]["ISSN"][0], "0108-7681")
self.assertEqual(record[2]["ISSN"][0].attributes["IssnType"], "Print")
self.assertEqual(record[2]["ISSN"][1], "1600-5740")
self.assertEqual(record[2]["ISSN"][1].attributes["IssnType"], "Electronic")
self.assertEqual(record[2]["ISOAbbreviation"], "Acta Crystallogr., B")
self.assertEqual(record[2]["Language"], ["eng", "fre", "ger"])
self.assertEqual(record[2]["AcidFreeYN"], "N")
self.assertEqual(record[2]["Coden"], "ASBSDK")
self.assertEqual(record[2]["ContinuationNotes"], "Continues: Acta crystallographica. Section B, Structural\ncrystallography and crystal chemistry. ")
self.assertEqual(record[2]["MinorTitleChangeYN"], "N")
self.assertEqual(len(record[2]["IndexingHistoryList"]), 1)
self.assertEqual(record[2]["IndexingHistoryList"][0].attributes["CitationSubset"], "IM")
self.assertEqual(record[2]["IndexingHistoryList"][0].attributes["IndexingTreatment"], "Selective")
self.assertEqual(record[2]["IndexingHistoryList"][0].attributes["IndexingStatus"], "Currently-indexed")
self.assertEqual(record[2]["IndexingHistoryList"][0]["DateOfAction"]["Year"], "1989")
self.assertEqual(record[2]["IndexingHistoryList"][0]["DateOfAction"]["Month"], "11")
self.assertEqual(record[2]["IndexingHistoryList"][0]["DateOfAction"]["Day"], "06")
self.assertEqual(record[2]["IndexingHistoryList"][0]["Coverage"], "v44n1, 1988-")
self.assertEqual(record[2]["CurrentlyIndexedYN"], "Y")
self.assertEqual(record[2]["CurrentlyIndexedForSubset"], [""])
self.assertEqual(record[2]["CurrentlyIndexedForSubset"][0].attributes["CurrentSubset"], "IM")
self.assertEqual(record[2]["CurrentlyIndexedForSubset"][0].attributes["CurrentIndexingTreatment"], "Selective")
self.assertEqual(record[2]["IndexOnlineYN"], "Y")
self.assertEqual(record[2]["IndexingSubset"], "IM")
self.assertEqual(record[2]["BroadJournalHeadingList"][0], "Chemistry, Analytical")
self.assertEqual(len(record[2]["CrossReferenceList"]), 4)
self.assertEqual(record[2]["CrossReferenceList"][0]["XrTitle"], "ACTA CRYSTALLOGR B")
self.assertEqual(record[2]["CrossReferenceList"][0].attributes["XrType"], "A")
self.assertEqual(record[2]["CrossReferenceList"][1]["XrTitle"], "Acta Crystallogr.,Sect.B")
self.assertEqual(record[2]["CrossReferenceList"][1].attributes["XrType"], "A")
self.assertEqual(record[2]["CrossReferenceList"][2]["XrTitle"], "Acta crystallographica. Section B, Structural\nscience.")
self.assertEqual(record[2]["CrossReferenceList"][2].attributes["XrType"], "A")
self.assertEqual(record[2]["CrossReferenceList"][3]["XrTitle"], "Structural science")
self.assertEqual(record[2]["CrossReferenceList"][3].attributes["XrType"], "X")
self.assertEqual(record[2]["SortSerialName"], "ACTA CRYSTALLOGRAPHICA. SECTION B, STRUCTURAL\nSCIENCE")
self.assertEqual(record[2]["IlsCreatedTimestamp"]["Year"], "1998")
self.assertEqual(record[2]["IlsCreatedTimestamp"]["Month"], "11")
self.assertEqual(record[2]["IlsCreatedTimestamp"]["Day"], "05")
self.assertEqual(record[2]["IlsUpdatedTimestamp"]["Year"], "2008")
self.assertEqual(record[2]["IlsUpdatedTimestamp"]["Month"], "10")
self.assertEqual(record[2]["IlsUpdatedTimestamp"]["Day"], "15")
def test_omim(self):
'''Test parsing XML returned by EFetch, OMIM database
'''
# In OMIM show the full record for MIM number 601100 as XML
# To create the XML file, use
# >>> Bio.Entrez.efetch(db="omim", id="601100", retmode='xml',
# rettype='full')
input = open('Entrez/ncbi_mim.xml')
record = Entrez.read(input)
self.assertEqual(len(record), 1)
self.assertEqual(record[0]["Mim-entry_mimNumber"], "601100")
self.assertEqual(record[0]["Mim-entry_mimType"], "1")
self.assertEqual(record[0]["Mim-entry_mimType"].attributes["value"], "star")
self.assertEqual(record[0]["Mim-entry_title"], "STRESS 70 PROTEIN CHAPERONE, MICROSOME-ASSOCIATED, 60-KD; STCH")
self.assertEqual(record[0]["Mim-entry_copyright"], "Copyright (c) 1966-2008 Johns Hopkins University")
self.assertEqual(record[0]["Mim-entry_symbol"], "STCH")
self.assertEqual(record[0]["Mim-entry_locus"], "21q11.1")
self.assertEqual(len(record[0]["Mim-entry_text"]), 2)
self.assertEqual(record[0]["Mim-entry_text"][0]["Mim-text_label"], "TEXT")
self.assertEqual(record[0]["Mim-entry_text"][0]["Mim-text_text"], "The stress-70 chaperone family consists of proteins that bind to denatured or incorrectly folded polypeptides and play a major role in the processing of cytosolic and secretory proteins. {2:Otterson et al. (1994)} cloned a human cDNA encoding a predicted 471-amino acid protein (60 kD) which they designated STCH. {1:Brodsky et al. (1995)} stated that the protein sequence is very similar to that of HSP70 ({140550}) and BiP ({138120}). As with other members of the family, the STCH protein contains an ATPase domain at the amino terminus whose activity was shown to be independent of peptide stimulation. The protein was found to be microsome-associated and constitutively expressed in all cell types examined.")
self.assertEqual(len(record[0]["Mim-entry_text"][0]["Mim-text_neighbors"]), 1)
self.assertEqual(record[0]["Mim-entry_text"][0]["Mim-text_neighbors"]["Mim-link"]["Mim-link_num"], "30")
self.assertEqual(record[0]["Mim-entry_text"][0]["Mim-text_neighbors"]["Mim-link"]["Mim-link_uids"], "8131751,9358068,10675567,9488737,8757872,11048651,2559088,10982831,2105497,16572726,9083109,17181539,14508011,15028727,10651811,9108392,11599566,2661019,11836248,7594475,12406544,8536694,12389629,10430932,9177027,9837933,8522346,2928112,12834280,8702658")
self.assertEqual(record[0]["Mim-entry_text"][0]["Mim-text_neighbors"]["Mim-link"]["Mim-link_numRelevant"], "0")
self.assertEqual(record[0]["Mim-entry_text"][1]["Mim-text_label"], "TEXT")
self.assertEqual(record[0]["Mim-entry_text"][1]["Mim-text_text"], "{1:Brodsky et al. (1995)} mapped the STCH gene to chromosome 21q11.1 with a high-resolution somatic cell hybrid panel for chromosome 21 and by fluorescence in situ hybridization with a YAC containing the gene. By interspecific backcross analysis, {3:Reeves et al. (1998)} mapped the mouse Stch gene to chromosome 16.")
self.assertEqual(len(record[0]["Mim-entry_text"][1]["Mim-text_neighbors"]), 1)
self.assertEqual(record[0]["Mim-entry_text"][1]["Mim-text_neighbors"]["Mim-link"]["Mim-link_num"], "30")
self.assertEqual(record[0]["Mim-entry_text"][1]["Mim-text_neighbors"]["Mim-link"]["Mim-link_uids"], "1354597,8244375,8597637,8838809,9143508,1427875,7806216,9852683,7835904,11060461,10083745,7789175,7806232,7513297,8020937,12014109,1769649,2045096,9747039,8034329,8088815,1783375,8275716,8020959,7956352,8020952,10198174,7655454,8750197,11272792")
self.assertEqual(record[0]["Mim-entry_text"][1]["Mim-text_neighbors"]["Mim-link"]["Mim-link_numRelevant"], "0")
self.assertEqual(record[0]["Mim-entry_hasSummary"], "")
self.assertEqual(record[0]["Mim-entry_hasSummary"].attributes["value"], "false")
self.assertEqual(record[0]["Mim-entry_hasSynopsis"], "")
self.assertEqual(record[0]["Mim-entry_hasSynopsis"].attributes["value"], "false")
self.assertEqual(len(record[0]["Mim-entry_editHistory"]), 6)
self.assertEqual(record[0]["Mim-entry_editHistory"][0]["Mim-edit-item_author"], "terry")
self.assertEqual(record[0]["Mim-entry_editHistory"][0]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_year"], "1999")
self.assertEqual(record[0]["Mim-entry_editHistory"][0]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_month"], "3")
self.assertEqual(record[0]["Mim-entry_editHistory"][0]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_day"], "9")
self.assertEqual(record[0]["Mim-entry_editHistory"][1]["Mim-edit-item_author"], "carol")
self.assertEqual(record[0]["Mim-entry_editHistory"][1]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_year"], "1999")
self.assertEqual(record[0]["Mim-entry_editHistory"][1]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_month"], "3")
self.assertEqual(record[0]["Mim-entry_editHistory"][1]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_day"], "7")
self.assertEqual(record[0]["Mim-entry_editHistory"][2]["Mim-edit-item_author"], "carol")
self.assertEqual(record[0]["Mim-entry_editHistory"][2]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_year"], "1998")
self.assertEqual(record[0]["Mim-entry_editHistory"][2]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_month"], "7")
self.assertEqual(record[0]["Mim-entry_editHistory"][2]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_day"], "8")
self.assertEqual(record[0]["Mim-entry_editHistory"][3]["Mim-edit-item_author"], "terry")
self.assertEqual(record[0]["Mim-entry_editHistory"][3]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_year"], "1996")
self.assertEqual(record[0]["Mim-entry_editHistory"][3]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_month"], "5")
self.assertEqual(record[0]["Mim-entry_editHistory"][3]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_day"], "24")
self.assertEqual(record[0]["Mim-entry_editHistory"][4]["Mim-edit-item_author"], "mark")
self.assertEqual(record[0]["Mim-entry_editHistory"][4]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_year"], "1996")
self.assertEqual(record[0]["Mim-entry_editHistory"][4]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_month"], "3")
self.assertEqual(record[0]["Mim-entry_editHistory"][4]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_day"], "1")
self.assertEqual(record[0]["Mim-entry_editHistory"][5]["Mim-edit-item_author"], "mark")
self.assertEqual(record[0]["Mim-entry_editHistory"][5]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_year"], "1996")
self.assertEqual(record[0]["Mim-entry_editHistory"][5]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_month"], "3")
self.assertEqual(record[0]["Mim-entry_editHistory"][5]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_day"], "1")
self.assertEqual(record[0]["Mim-entry_creationDate"]["Mim-edit-item"]["Mim-edit-item_author"], "Alan F. Scott")
self.assertEqual(record[0]["Mim-entry_creationDate"]["Mim-edit-item"]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_year"], "1996")
self.assertEqual(record[0]["Mim-entry_creationDate"]["Mim-edit-item"]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_month"], "3")
self.assertEqual(record[0]["Mim-entry_creationDate"]["Mim-edit-item"]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_day"], "1")
self.assertEqual(len(record[0]["Mim-entry_references"]), 3)
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_number"], "1")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_origNumber"], "1")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_type"], "")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_type"].attributes["value"], "citation")
self.assertEqual(len(record[0]["Mim-entry_references"][0]["Mim-reference_authors"]), 6)
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][0]["Mim-author_name"], "Brodsky, G.")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][0]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][1]["Mim-author_name"], "Otterson, G. A.")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][1]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][2]["Mim-author_name"], "Parry, B. B.")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][2]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][3]["Mim-author_name"], "Hart, I.")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][3]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][4]["Mim-author_name"], "Patterson, D.")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][4]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][5]["Mim-author_name"], "Kaye, F. J.")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_authors"][5]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_primaryAuthor"], "Brodsky")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_otherAuthors"], "et al.")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_citationTitle"], "Localization of STCH to human chromosome 21q11.1.")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_citationType"], "0")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_volume"], "30")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_journal"], "Genomics")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_pubDate"]["Mim-date"]["Mim-date_year"], "1995")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_pubDate"]["Mim-date"]["Mim-date_month"], "0")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_pubDate"]["Mim-date"]["Mim-date_day"], "0")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_pages"][0]["Mim-page_from"], "627")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_pages"][0]["Mim-page_to"], "628")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_pubmedUID"], "8825657")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_ambiguous"], "")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_ambiguous"].attributes["value"], "false")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_noLink"], "")
self.assertEqual(record[0]["Mim-entry_references"][0]["Mim-reference_noLink"].attributes["value"], "false")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_number"], "2")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_origNumber"], "2")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_type"], "")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_type"].attributes["value"], "citation")
self.assertEqual(len(record[0]["Mim-entry_references"][1]["Mim-reference_authors"]), 6)
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][0]["Mim-author_name"], "Otterson, G. A.")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][0]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][1]["Mim-author_name"], "Flynn, G. C.")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][1]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][2]["Mim-author_name"], "Kratzke, R. A.")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][2]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][3]["Mim-author_name"], "Coxon, A.")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][3]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][4]["Mim-author_name"], "Johnston, P. G.")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][4]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][5]["Mim-author_name"], "Kaye, F. J.")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_authors"][5]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_primaryAuthor"], "Otterson")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_otherAuthors"], "et al.")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_citationTitle"], "Stch encodes the 'ATPase core' of a microsomal stress70 protein.")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_citationType"], "0")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_volume"], "13")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_journal"], "EMBO J.")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_pubDate"]["Mim-date"]["Mim-date_year"], "1994")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_pubDate"]["Mim-date"]["Mim-date_month"], "0")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_pubDate"]["Mim-date"]["Mim-date_day"], "0")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_pages"][0]["Mim-page_from"], "1216")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_pages"][0]["Mim-page_to"], "1225")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_pubmedUID"], "8131751")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_ambiguous"], "")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_ambiguous"].attributes["value"], "false")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_noLink"], "")
self.assertEqual(record[0]["Mim-entry_references"][1]["Mim-reference_noLink"].attributes["value"], "false")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_number"], "3")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_origNumber"], "3")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_type"], "")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_type"].attributes["value"], "citation")
self.assertEqual(len(record[0]["Mim-entry_references"][2]["Mim-reference_authors"]), 4)
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_authors"][0]["Mim-author_name"], "Reeves, R. H.")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_authors"][0]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_authors"][1]["Mim-author_name"], "Rue, E.")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_authors"][1]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_authors"][2]["Mim-author_name"], "Yu, J.")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_authors"][2]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_authors"][3]["Mim-author_name"], "Kao, F.-T.")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_authors"][3]["Mim-author_index"], "1")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_primaryAuthor"], "Reeves")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_otherAuthors"], "et al.")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_citationTitle"], "Stch maps to mouse chromosome 16, extending the conserved synteny with human chromosome 21.")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_citationType"], "0")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_volume"], "49")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_journal"], "Genomics")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_pubDate"]["Mim-date"]["Mim-date_year"], "1998")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_pubDate"]["Mim-date"]["Mim-date_month"], "0")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_pubDate"]["Mim-date"]["Mim-date_day"], "0")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_pages"][0]["Mim-page_from"], "156")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_pages"][0]["Mim-page_to"], "157")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_pubmedUID"], "9570963")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_ambiguous"], "")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_ambiguous"].attributes["value"], "false")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_noLink"], "")
self.assertEqual(record[0]["Mim-entry_references"][2]["Mim-reference_noLink"].attributes["value"], "false")
self.assertEqual(record[0]["Mim-entry_attribution"][0]["Mim-edit-item_author"], "Carol A. Bocchini - updated")
self.assertEqual(record[0]["Mim-entry_attribution"][0]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_year"], "1999")
self.assertEqual(record[0]["Mim-entry_attribution"][0]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_month"], "3")
self.assertEqual(record[0]["Mim-entry_attribution"][0]["Mim-edit-item_modDate"]["Mim-date"]["Mim-date_day"], "7")
self.assertEqual(record[0]["Mim-entry_numGeneMaps"], "1")
self.assertEqual(len(record[0]["Mim-entry_medlineLinks"]), 1)
self.assertEqual(record[0]["Mim-entry_medlineLinks"]["Mim-link"]["Mim-link_num"], "3")
self.assertEqual(record[0]["Mim-entry_medlineLinks"]["Mim-link"]["Mim-link_uids"], "8825657,8131751,9570963")
self.assertEqual(record[0]["Mim-entry_medlineLinks"]["Mim-link"]["Mim-link_numRelevant"], "0")
self.assertEqual(len(record[0]["Mim-entry_proteinLinks"]), 1)
self.assertEqual(record[0]["Mim-entry_proteinLinks"]["Mim-link"]["Mim-link_num"], "7")
self.assertEqual(record[0]["Mim-entry_proteinLinks"]["Mim-link"]["Mim-link_uids"], "148747550,67461586,48928056,30089677,2352621,1351125,460148")
self.assertEqual(record[0]["Mim-entry_proteinLinks"]["Mim-link"]["Mim-link_numRelevant"], "0")
self.assertEqual(len(record[0]["Mim-entry_nucleotideLinks"]), 1)
self.assertEqual(record[0]["Mim-entry_nucleotideLinks"]["Mim-link"]["Mim-link_num"], "5")
self.assertEqual(record[0]["Mim-entry_nucleotideLinks"]["Mim-link"]["Mim-link_uids"], "148747549,55741785,48928055,2352620,460147")
self.assertEqual(record[0]["Mim-entry_nucleotideLinks"]["Mim-link"]["Mim-link_numRelevant"], "0")
def test_taxonomy(self):
'''Test parsing XML returned by EFetch, Taxonomy database
'''
# Access the Taxonomy database using efetch.
# To create the XML file, use
# >>> Bio.Entrez.efetch(db="taxonomy", id="9685", retmode="xml")
input = open('Entrez/taxonomy.xml')
record = Entrez.read(input)
self.assertEqual(len(record), 1)
self.assertEqual(record[0]["TaxId"], "9685")
self.assertEqual(record[0]["ScientificName"], "Felis catus")
self.assertEqual(record[0]["OtherNames"]["GenbankCommonName"], "domestic cat")
self.assertEqual(record[0]["OtherNames"]["Synonym"][0], "Felis silvestris catus")
self.assertEqual(record[0]["OtherNames"]["Synonym"][1], "Felis domesticus")
self.assertEqual(record[0]["OtherNames"]["CommonName"][0], "cat")
self.assertEqual(record[0]["OtherNames"]["CommonName"][1], "cats")
self.assertEqual(record[0]["OtherNames"]["Includes"][0], "Korat cats")
self.assertEqual(record[0]["ParentTaxId"], "9682")
self.assertEqual(record[0]["Rank"], "species")
self.assertEqual(record[0]["Division"], "Mammals")
self.assertEqual(record[0]["GeneticCode"]["GCId"], "1")
self.assertEqual(record[0]["GeneticCode"]["GCName"], "Standard")
self.assertEqual(record[0]["MitoGeneticCode"]["MGCId"], "2")
self.assertEqual(record[0]["MitoGeneticCode"]["MGCName"], "Vertebrate Mitochondrial")
self.assertEqual(record[0]["Lineage"], "cellular organisms; Eukaryota; Fungi/Metazoa group; Metazoa; Eumetazoa; Bilateria; Coelomata; Deuterostomia; Chordata; Craniata; Vertebrata; Gnathostomata; Teleostomi; Euteleostomi; Sarcopterygii; Tetrapoda; Amniota; Mammalia; Theria; Eutheria; Laurasiatheria; Carnivora; Feliformia; Felidae; Felinae; Felis")
self.assertEqual(record[0]["LineageEx"][0]["TaxId"], "131567")
self.assertEqual(record[0]["LineageEx"][0]["ScientificName"], "cellular organisms")
self.assertEqual(record[0]["LineageEx"][0]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][1]["TaxId"], "2759")
self.assertEqual(record[0]["LineageEx"][1]["ScientificName"], "Eukaryota")
self.assertEqual(record[0]["LineageEx"][1]["Rank"], "superkingdom")
self.assertEqual(record[0]["LineageEx"][2]["TaxId"], "33154")
self.assertEqual(record[0]["LineageEx"][2]["ScientificName"], "Fungi/Metazoa group")
self.assertEqual(record[0]["LineageEx"][2]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][3]["TaxId"], "33208")
self.assertEqual(record[0]["LineageEx"][3]["ScientificName"], "Metazoa")
self.assertEqual(record[0]["LineageEx"][3]["Rank"], "kingdom")
self.assertEqual(record[0]["LineageEx"][4]["TaxId"], "6072")
self.assertEqual(record[0]["LineageEx"][4]["ScientificName"], "Eumetazoa")
self.assertEqual(record[0]["LineageEx"][4]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][5]["TaxId"], "33213")
self.assertEqual(record[0]["LineageEx"][5]["ScientificName"], "Bilateria")
self.assertEqual(record[0]["LineageEx"][5]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][6]["TaxId"], "33316")
self.assertEqual(record[0]["LineageEx"][6]["ScientificName"], "Coelomata")
self.assertEqual(record[0]["LineageEx"][6]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][7]["TaxId"], "33511")
self.assertEqual(record[0]["LineageEx"][7]["ScientificName"], "Deuterostomia")
self.assertEqual(record[0]["LineageEx"][7]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][8]["TaxId"], "7711")
self.assertEqual(record[0]["LineageEx"][8]["ScientificName"], "Chordata")
self.assertEqual(record[0]["LineageEx"][8]["Rank"], "phylum")
self.assertEqual(record[0]["LineageEx"][9]["TaxId"], "89593")
self.assertEqual(record[0]["LineageEx"][9]["ScientificName"], "Craniata")
self.assertEqual(record[0]["LineageEx"][9]["Rank"], "subphylum")
self.assertEqual(record[0]["LineageEx"][10]["TaxId"], "7742")
self.assertEqual(record[0]["LineageEx"][10]["ScientificName"], "Vertebrata")
self.assertEqual(record[0]["LineageEx"][10]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][11]["TaxId"], "7776")
self.assertEqual(record[0]["LineageEx"][11]["ScientificName"], "Gnathostomata")
self.assertEqual(record[0]["LineageEx"][11]["Rank"], "superclass")
self.assertEqual(record[0]["LineageEx"][12]["TaxId"], "117570")
self.assertEqual(record[0]["LineageEx"][12]["ScientificName"], "Teleostomi")
self.assertEqual(record[0]["LineageEx"][12]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][13]["TaxId"], "117571")
self.assertEqual(record[0]["LineageEx"][13]["ScientificName"], "Euteleostomi")
self.assertEqual(record[0]["LineageEx"][13]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][14]["TaxId"], "8287")
self.assertEqual(record[0]["LineageEx"][14]["ScientificName"], "Sarcopterygii")
self.assertEqual(record[0]["LineageEx"][14]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][15]["TaxId"], "32523")
self.assertEqual(record[0]["LineageEx"][15]["ScientificName"], "Tetrapoda")
self.assertEqual(record[0]["LineageEx"][15]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][16]["TaxId"], "32524")
self.assertEqual(record[0]["LineageEx"][16]["ScientificName"], "Amniota")
self.assertEqual(record[0]["LineageEx"][16]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][17]["TaxId"], "40674")
self.assertEqual(record[0]["LineageEx"][17]["ScientificName"], "Mammalia")
self.assertEqual(record[0]["LineageEx"][17]["Rank"], "class")
self.assertEqual(record[0]["LineageEx"][18]["TaxId"], "32525")
self.assertEqual(record[0]["LineageEx"][18]["ScientificName"], "Theria")
self.assertEqual(record[0]["LineageEx"][18]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][19]["TaxId"], "9347")
self.assertEqual(record[0]["LineageEx"][19]["ScientificName"], "Eutheria")
self.assertEqual(record[0]["LineageEx"][19]["Rank"], "no rank")
self.assertEqual(record[0]["LineageEx"][20]["TaxId"], "314145")
self.assertEqual(record[0]["LineageEx"][20]["ScientificName"], "Laurasiatheria")
self.assertEqual(record[0]["LineageEx"][20]["Rank"], "superorder")
self.assertEqual(record[0]["LineageEx"][21]["TaxId"], "33554")
self.assertEqual(record[0]["LineageEx"][21]["ScientificName"], "Carnivora")
self.assertEqual(record[0]["LineageEx"][21]["Rank"], "order")
self.assertEqual(record[0]["LineageEx"][22]["TaxId"], "379583")
self.assertEqual(record[0]["LineageEx"][22]["ScientificName"], "Feliformia")
self.assertEqual(record[0]["LineageEx"][22]["Rank"], "suborder")
self.assertEqual(record[0]["LineageEx"][23]["TaxId"], "9681")
self.assertEqual(record[0]["LineageEx"][23]["ScientificName"], "Felidae")
self.assertEqual(record[0]["LineageEx"][23]["Rank"], "family")
self.assertEqual(record[0]["LineageEx"][24]["TaxId"], "338152")
self.assertEqual(record[0]["LineageEx"][24]["ScientificName"], "Felinae")
self.assertEqual(record[0]["LineageEx"][24]["Rank"], "subfamily")
self.assertEqual(record[0]["LineageEx"][25]["TaxId"], "9682")
self.assertEqual(record[0]["LineageEx"][25]["ScientificName"], "Felis")
self.assertEqual(record[0]["LineageEx"][25]["Rank"], "genus")
self.assertEqual(record[0]["CreateDate"], "1995/02/27")
self.assertEqual(record[0]["UpdateDate"], "2007/09/04")
self.assertEqual(record[0]["PubDate"], "1993/07/26")
def test_nucleotide1(self):
'''Test parsing XML returned by EFetch, Nucleotide database (first test)
'''
# Access the nucleotide database using efetch.
# To create the XML file, use
# >>> Bio.Entrez.efetch(db='nucleotide', id=5, retmode='xml')
input = open('Entrez/nucleotide1.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["GBSeq_locus"], "X60065")
self.assertEqual(record[0]["GBSeq_length"], "1136")
self.assertEqual(record[0]["GBSeq_strandedness"], "single")
self.assertEqual(record[0]["GBSeq_moltype"], "mRNA")
self.assertEqual(record[0]["GBSeq_topology"], "linear")
self.assertEqual(record[0]["GBSeq_division"], "MAM")
self.assertEqual(record[0]["GBSeq_update-date"], "14-NOV-2006")
self.assertEqual(record[0]["GBSeq_create-date"], "05-MAY-1992")
self.assertEqual(record[0]["GBSeq_definition"], "B.bovis beta-2-gpI mRNA for beta-2-glycoprotein I")
self.assertEqual(record[0]["GBSeq_primary-accession"], "X60065")
self.assertEqual(record[0]["GBSeq_accession-version"], "X60065.1")
self.assertEqual(record[0]["GBSeq_other-seqids"][0], "emb|X60065.1|")
self.assertEqual(record[0]["GBSeq_other-seqids"][1], "gi|5")
self.assertEqual(record[0]["GBSeq_keywords"][0], "beta-2 glycoprotein I")
self.assertEqual(record[0]["GBSeq_source"], "Bos taurus (cattle)")
self.assertEqual(record[0]["GBSeq_organism"], "Bos taurus")
self.assertEqual(record[0]["GBSeq_taxonomy"], "Eukaryota; Metazoa; Chordata; Craniata; Vertebrata; Euteleostomi; Mammalia; Eutheria; Laurasiatheria; Cetartiodactyla; Ruminantia; Pecora; Bovidae; Bovinae; Bos")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_reference"], "1")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_authors"][0], "Bendixen,E.")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_authors"][1], "Halkier,T.")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_authors"][2], "Magnusson,S.")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_authors"][3], "Sottrup-Jensen,L.")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_authors"][4], "Kristensen,T.")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_title"], "Complete primary structure of bovine beta 2-glycoprotein I: localization of the disulfide bridges")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_journal"], "Biochemistry 31 (14), 3611-3617 (1992)")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_pubmed"], "1567819")
self.assertEqual(record[0]["GBSeq_references"][1]["GBReference_reference"], "2")
self.assertEqual(record[0]["GBSeq_references"][1]["GBReference_position"], "1..1136")
self.assertEqual(record[0]["GBSeq_references"][1]["GBReference_authors"][0], "Kristensen,T.")
self.assertEqual(record[0]["GBSeq_references"][1]["GBReference_title"], "Direct Submission")
self.assertEqual(record[0]["GBSeq_references"][1]["GBReference_journal"], "Submitted (11-JUN-1991) T. Kristensen, Dept of Mol Biology, University of Aarhus, C F Mollers Alle 130, DK-8000 Aarhus C, DENMARK")
self.assertEqual(len(record[0]["GBSeq_feature-table"]), 7)
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_key"], "source")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_location"], "1..1136")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_intervals"][0]["GBInterval_from"], "1")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_intervals"][0]["GBInterval_to"], "1136")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_intervals"][0]["GBInterval_accession"], "X60065.1")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][0]["GBQualifier_name"], "organism")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][0]["GBQualifier_value"], "Bos taurus")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][1]["GBQualifier_name"], "mol_type")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][1]["GBQualifier_value"], "mRNA")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][2]["GBQualifier_name"], "db_xref")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][2]["GBQualifier_value"], "taxon:9913")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][3]["GBQualifier_name"], "clone")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][3]["GBQualifier_value"], "pBB2I")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][4]["GBQualifier_name"], "tissue_type")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][4]["GBQualifier_value"], "liver")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_key"], "gene")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_location"], "<1..1136")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_intervals"][0]["GBInterval_from"], "1")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_intervals"][0]["GBInterval_to"], "1136")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_intervals"][0]["GBInterval_accession"], "X60065.1")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_partial5"], "")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_partial5"].attributes["value"], "true")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_quals"][0]["GBQualifier_name"], "gene")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_quals"][0]["GBQualifier_value"], "beta-2-gpI")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_key"], "CDS")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_location"], "<1..1029")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_intervals"][0]["GBInterval_from"], "1")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_intervals"][0]["GBInterval_to"], "1029")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_intervals"][0]["GBInterval_accession"], "X60065.1")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_partial5"], "")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_partial5"].attributes["value"], "true")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][0]["GBQualifier_name"], "gene")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][0]["GBQualifier_value"], "beta-2-gpI")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][1]["GBQualifier_name"], "codon_start")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][1]["GBQualifier_value"], "1")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][2]["GBQualifier_name"], "transl_table")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][2]["GBQualifier_value"], "1")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][3]["GBQualifier_name"], "product")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][3]["GBQualifier_value"], "beta-2-glycoprotein I")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][4]["GBQualifier_name"], "protein_id")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][4]["GBQualifier_value"], "CAA42669.1")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][5]["GBQualifier_name"], "db_xref")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][5]["GBQualifier_value"], "GI:6")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][6]["GBQualifier_name"], "db_xref")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][6]["GBQualifier_value"], "GOA:P17690")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][7]["GBQualifier_name"], "db_xref")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][7]["GBQualifier_value"], "UniProtKB/Swiss-Prot:P17690")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][8]["GBQualifier_name"], "translation")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][8]["GBQualifier_value"], "PALVLLLGFLCHVAIAGRTCPKPDELPFSTVVPLKRTYEPGEQIVFSCQPGYVSRGGIRRFTCPLTGLWPINTLKCMPRVCPFAGILENGTVRYTTFEYPNTISFSCHTGFYLKGASSAKCTEEGKWSPDLPVCAPITCPPPPIPKFASLSVYKPLAGNNSFYGSKAVFKCLPHHAMFGNDTVTCTEHGNWTQLPECREVRCPFPSRPDNGFVNHPANPVLYYKDTATFGCHETYSLDGPEEVECSKFGNWSAQPSCKASCKLSIKRATVIYEGERVAIQNKFKNGMLHGQKVSFFCKHKEKKCSYTEDAQCIDGTIEIPKCFKEHSSLAFWKTDASDVKPC")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_key"], "sig_peptide")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_location"], "<1..48")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_intervals"][0]["GBInterval_from"], "1")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_intervals"][0]["GBInterval_to"], "48")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_intervals"][0]["GBInterval_accession"], "X60065.1")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_partial5"], "")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_partial5"].attributes["value"], "true")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][0]["GBQualifier_name"], "gene")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][0]["GBQualifier_value"], "beta-2-gpI")
self.assertEqual(record[0]["GBSeq_feature-table"][4]["GBFeature_key"], "mat_peptide")
self.assertEqual(record[0]["GBSeq_feature-table"][4]["GBFeature_location"], "49..1026")
self.assertEqual(record[0]["GBSeq_feature-table"][4]["GBFeature_intervals"][0]["GBInterval_from"], "49")
self.assertEqual(record[0]["GBSeq_feature-table"][4]["GBFeature_intervals"][0]["GBInterval_to"], "1026")
self.assertEqual(record[0]["GBSeq_feature-table"][4]["GBFeature_intervals"][0]["GBInterval_accession"], "X60065.1")
self.assertEqual(record[0]["GBSeq_feature-table"][4]["GBFeature_quals"][0]["GBQualifier_name"], "gene")
self.assertEqual(record[0]["GBSeq_feature-table"][4]["GBFeature_quals"][0]["GBQualifier_value"], "beta-2-gpI")
self.assertEqual(record[0]["GBSeq_feature-table"][4]["GBFeature_quals"][1]["GBQualifier_name"], "product")
self.assertEqual(record[0]["GBSeq_feature-table"][4]["GBFeature_quals"][1]["GBQualifier_value"], "beta-2-glycoprotein I")
self.assertEqual(record[0]["GBSeq_feature-table"][5]["GBFeature_key"], "polyA_signal")
self.assertEqual(record[0]["GBSeq_feature-table"][5]["GBFeature_location"], "1101..1106")
self.assertEqual(record[0]["GBSeq_feature-table"][5]["GBFeature_intervals"][0]["GBInterval_from"], "1101")
self.assertEqual(record[0]["GBSeq_feature-table"][5]["GBFeature_intervals"][0]["GBInterval_to"], "1106")
self.assertEqual(record[0]["GBSeq_feature-table"][5]["GBFeature_intervals"][0]["GBInterval_accession"], "X60065.1")
self.assertEqual(record[0]["GBSeq_feature-table"][5]["GBFeature_quals"][0]["GBQualifier_name"], "gene")
self.assertEqual(record[0]["GBSeq_feature-table"][5]["GBFeature_quals"][0]["GBQualifier_value"], "beta-2-gpI")
self.assertEqual(record[0]["GBSeq_feature-table"][6]["GBFeature_key"], "polyA_site")
self.assertEqual(record[0]["GBSeq_feature-table"][6]["GBFeature_location"], "1130")
self.assertEqual(record[0]["GBSeq_feature-table"][6]["GBFeature_intervals"][0]["GBInterval_point"], "1130")
self.assertEqual(record[0]["GBSeq_feature-table"][6]["GBFeature_intervals"][0]["GBInterval_accession"], "X60065.1")
self.assertEqual(record[0]["GBSeq_feature-table"][6]["GBFeature_quals"][0]["GBQualifier_name"], "gene")
self.assertEqual(record[0]["GBSeq_feature-table"][6]["GBFeature_quals"][0]["GBQualifier_value"], "beta-2-gpI")
self.assertEqual(record[0]["GBSeq_sequence"], "ccagcgctcgtcttgctgttggggtttctctgccacgttgctatcgcaggacgaacctgccccaagccagatgagctaccgttttccacggtggttccactgaaacggacctatgagcccggggagcagatagtcttctcctgccagccgggctacgtgtcccggggagggatccggcggtttacatgcccgctcacaggactctggcccatcaacacgctgaaatgcatgcccagagtatgtccttttgctgggatcttagaaaacggaacggtacgctatacaacgtttgagtatcccaacaccatcagcttttcttgccacacggggttttatctgaaaggagctagttctgcaaaatgcactgaggaagggaagtggagcccagaccttcctgtctgtgcccctataacctgccctccaccacccatacccaagtttgcaagtctcagcgtttacaagccgttggctgggaacaactccttctatggcagcaaggcagtctttaagtgcttgccacaccacgcgatgtttggaaatgacaccgttacctgcacggaacatgggaactggacgcagttgccagaatgcagggaagtaagatgcccattcccatcaagaccagacaatgggtttgtgaaccatcctgcaaatccagtgctctactataaggacaccgccacctttggctgccatgaaacgtattccttggatggaccggaagaagtagaatgcagcaaattcggaaactggtctgcacagccaagctgtaaagcatcttgtaagttatctattaaaagagctactgtgatatatgaaggagagagagtagctatccagaacaaatttaagaatggaatgctgcatggccaaaaggtttctttcttctgcaagcataaggaaaagaagtgcagctacacagaagatgctcagtgcatagacggcaccatcgagattcccaaatgcttcaaggagcacagttctttagctttctggaaaacggatgcatctgacgtaaaaccatgctaagctggttttcacactgaaaattaaatgtcatgcttatatgtgtctgtctgagaatctgatggaaacggaaaaataaagagactgaatttaccgtgtcaagaaaaaaa")
def test_nucleotide2(self):
'''Test parsing XML returned by EFetch, Nucleotide database (second test)
'''
# Access the nucleotide database using efetch.
# To create the XML file, use
# >>> Bio.handle = Entrez.efetch(db='nucleotide', id=5,
# rettype='fasta', complexity=0,
# retmode='xml')
input = open('Entrez/nucleotide2.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["TSeq_seqtype"], "")
self.assertEqual(record[0]["TSeq_seqtype"].attributes["value"], "nucleotide")
self.assertEqual(record[0]["TSeq_gi"], "5")
self.assertEqual(record[0]["TSeq_accver"], "X60065.1")
self.assertEqual(record[0]["TSeq_taxid"], "9913")
self.assertEqual(record[0]["TSeq_orgname"], "Bos taurus")
self.assertEqual(record[0]["TSeq_defline"], "B.bovis beta-2-gpI mRNA for beta-2-glycoprotein I")
self.assertEqual(record[0]["TSeq_length"], "1136")
self.assertEqual(record[0]["TSeq_sequence"], "CCAGCGCTCGTCTTGCTGTTGGGGTTTCTCTGCCACGTTGCTATCGCAGGACGAACCTGCCCCAAGCCAGATGAGCTACCGTTTTCCACGGTGGTTCCACTGAAACGGACCTATGAGCCCGGGGAGCAGATAGTCTTCTCCTGCCAGCCGGGCTACGTGTCCCGGGGAGGGATCCGGCGGTTTACATGCCCGCTCACAGGACTCTGGCCCATCAACACGCTGAAATGCATGCCCAGAGTATGTCCTTTTGCTGGGATCTTAGAAAACGGAACGGTACGCTATACAACGTTTGAGTATCCCAACACCATCAGCTTTTCTTGCCACACGGGGTTTTATCTGAAAGGAGCTAGTTCTGCAAAATGCACTGAGGAAGGGAAGTGGAGCCCAGACCTTCCTGTCTGTGCCCCTATAACCTGCCCTCCACCACCCATACCCAAGTTTGCAAGTCTCAGCGTTTACAAGCCGTTGGCTGGGAACAACTCCTTCTATGGCAGCAAGGCAGTCTTTAAGTGCTTGCCACACCACGCGATGTTTGGAAATGACACCGTTACCTGCACGGAACATGGGAACTGGACGCAGTTGCCAGAATGCAGGGAAGTAAGATGCCCATTCCCATCAAGACCAGACAATGGGTTTGTGAACCATCCTGCAAATCCAGTGCTCTACTATAAGGACACCGCCACCTTTGGCTGCCATGAAACGTATTCCTTGGATGGACCGGAAGAAGTAGAATGCAGCAAATTCGGAAACTGGTCTGCACAGCCAAGCTGTAAAGCATCTTGTAAGTTATCTATTAAAAGAGCTACTGTGATATATGAAGGAGAGAGAGTAGCTATCCAGAACAAATTTAAGAATGGAATGCTGCATGGCCAAAAGGTTTCTTTCTTCTGCAAGCATAAGGAAAAGAAGTGCAGCTACACAGAAGATGCTCAGTGCATAGACGGCACCATCGAGATTCCCAAATGCTTCAAGGAGCACAGTTCTTTAGCTTTCTGGAAAACGGATGCATCTGACGTAAAACCATGCTAAGCTGGTTTTCACACTGAAAATTAAATGTCATGCTTATATGTGTCTGTCTGAGAATCTGATGGAAACGGAAAAATAAAGAGACTGAATTTACCGTGTCAAGAAAAAAA")
self.assertEqual(record[1]["TSeq_seqtype"], "")
self.assertEqual(record[1]["TSeq_seqtype"].attributes["value"], "protein")
self.assertEqual(record[1]["TSeq_gi"], "6")
self.assertEqual(record[1]["TSeq_accver"], "CAA42669.1")
self.assertEqual(record[1]["TSeq_taxid"], "9913")
self.assertEqual(record[1]["TSeq_orgname"], "Bos taurus")
self.assertEqual(record[1]["TSeq_defline"], "beta-2-glycoprotein I [Bos taurus]")
self.assertEqual(record[1]["TSeq_length"], "342")
self.assertEqual(record[1]["TSeq_sequence"], "PALVLLLGFLCHVAIAGRTCPKPDELPFSTVVPLKRTYEPGEQIVFSCQPGYVSRGGIRRFTCPLTGLWPINTLKCMPRVCPFAGILENGTVRYTTFEYPNTISFSCHTGFYLKGASSAKCTEEGKWSPDLPVCAPITCPPPPIPKFASLSVYKPLAGNNSFYGSKAVFKCLPHHAMFGNDTVTCTEHGNWTQLPECREVRCPFPSRPDNGFVNHPANPVLYYKDTATFGCHETYSLDGPEEVECSKFGNWSAQPSCKASCKLSIKRATVIYEGERVAIQNKFKNGMLHGQKVSFFCKHKEKKCSYTEDAQCIDGTIEIPKCFKEHSSLAFWKTDASDVKPC")
def test_nucleotide2(self):
'''Test parsing XML returned by EFetch, Protein database
'''
# Access the protein database using efetch.
# To create the XML file, use
# >>> Bio.handle = Entrez.efetch(db='protein', id=8,
# rettype='gp', retmode='xml')
input = open('Entrez/protein.xml')
record = Entrez.read(input)
self.assertEqual(record[0]["GBSeq_locus"], "CAA35997")
self.assertEqual(record[0]["GBSeq_length"], "100")
self.assertEqual(record[0]["GBSeq_moltype"], "AA")
self.assertEqual(record[0]["GBSeq_topology"], "linear")
self.assertEqual(record[0]["GBSeq_division"], "MAM")
self.assertEqual(record[0]["GBSeq_update-date"], "12-SEP-1993")
self.assertEqual(record[0]["GBSeq_create-date"], "03-APR-1990")
self.assertEqual(record[0]["GBSeq_definition"], "unnamed protein product [Bos taurus]")
self.assertEqual(record[0]["GBSeq_primary-accession"], "CAA35997")
self.assertEqual(record[0]["GBSeq_accession-version"], "CAA35997.1")
self.assertEqual(record[0]["GBSeq_other-seqids"][0], "emb|CAA35997.1|")
self.assertEqual(record[0]["GBSeq_other-seqids"][1], "gi|8")
self.assertEqual(record[0]["GBSeq_source"], "Bos taurus (cattle)")
self.assertEqual(record[0]["GBSeq_organism"], "Bos taurus")
self.assertEqual(record[0]["GBSeq_taxonomy"], "Eukaryota; Metazoa; Chordata; Craniata; Vertebrata; Euteleostomi; Mammalia; Eutheria; Laurasiatheria; Cetartiodactyla; Ruminantia; Pecora; Bovidae; Bovinae; Bos")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_reference"], "1")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_position"], "1..100")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_authors"][0], "Kiefer,M.C.")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_authors"][1], "Saphire,A.C.S.")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_authors"][2], "Bauer,D.M.")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_authors"][3], "Barr,P.J.")
self.assertEqual(record[0]["GBSeq_references"][0]["GBReference_journal"], "Unpublished")
self.assertEqual(record[0]["GBSeq_references"][1]["GBReference_reference"], "2")
self.assertEqual(record[0]["GBSeq_references"][1]["GBReference_position"], "1..100")
self.assertEqual(record[0]["GBSeq_references"][1]["GBReference_authors"][0], "Kiefer,M.C.")
self.assertEqual(record[0]["GBSeq_references"][1]["GBReference_title"], "Direct Submission")
self.assertEqual(record[0]["GBSeq_references"][1]["GBReference_journal"], "Submitted (30-JAN-1990) Kiefer M.C., Chiron Corporation, 4560 Hortom St, Emeryville CA 94608-2916, U S A")
self.assertEqual(record[0]["GBSeq_comment"], "See <X15699> for Human sequence.~Data kindly reviewed (08-MAY-1990) by Kiefer M.C.")
self.assertEqual(record[0]["GBSeq_source-db"], "embl accession X51700.1")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_key"], "source")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_location"], "1..100")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_intervals"][0]["GBInterval_from"], "1")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_intervals"][0]["GBInterval_to"], "100")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_intervals"][0]["GBInterval_accession"], "CAA35997.1")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][0]["GBQualifier_name"], "organism")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][0]["GBQualifier_value"], "Bos taurus")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][1]["GBQualifier_name"], "db_xref")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][1]["GBQualifier_value"], "taxon:9913")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][2]["GBQualifier_name"], "clone")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][2]["GBQualifier_value"], "bBGP-3")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][3]["GBQualifier_name"], "tissue_type")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][3]["GBQualifier_value"], "bone matrix")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][4]["GBQualifier_name"], "clone_lib")
self.assertEqual(record[0]["GBSeq_feature-table"][0]["GBFeature_quals"][4]["GBQualifier_value"], "Zap-bb")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_key"], "Protein")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_location"], "1..100")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_intervals"][0]["GBInterval_from"], "1")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_intervals"][0]["GBInterval_to"], "100")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_intervals"][0]["GBInterval_accession"], "CAA35997.1")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_quals"][0]["GBQualifier_name"], "name")
self.assertEqual(record[0]["GBSeq_feature-table"][1]["GBFeature_quals"][0]["GBQualifier_value"], "unnamed protein product")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_key"], "Region")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_location"], "33..97")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_intervals"][0]["GBInterval_from"], "33")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_intervals"][0]["GBInterval_to"], "97")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_intervals"][0]["GBInterval_accession"], "CAA35997.1")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][0]["GBQualifier_name"], "region_name")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][0]["GBQualifier_value"], "Gla")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][1]["GBQualifier_name"], "note")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][1]["GBQualifier_value"], "Vitamin K-dependent carboxylation/gamma-carboxyglutamic (GLA) domain. This domain is responsible for the high-affinity binding of calcium ions. This domain contains post-translational modifications of many glutamate residues by Vitamin K-dependent...; cl02449")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][2]["GBQualifier_name"], "db_xref")
self.assertEqual(record[0]["GBSeq_feature-table"][2]["GBFeature_quals"][2]["GBQualifier_value"], "CDD:92835")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_key"], "CDS")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_location"], "1..100")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_intervals"][0]["GBInterval_from"], "1")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_intervals"][0]["GBInterval_to"], "100")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_intervals"][0]["GBInterval_accession"], "CAA35997.1")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][0]["GBQualifier_name"], "coded_by")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][0]["GBQualifier_value"], "X51700.1:28..330")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][1]["GBQualifier_name"], "note")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][1]["GBQualifier_value"], "bone Gla precursor (100 AA)")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][2]["GBQualifier_name"], "db_xref")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][2]["GBQualifier_value"], "GOA:P02820")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][3]["GBQualifier_name"], "db_xref")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][3]["GBQualifier_value"], "InterPro:IPR000294")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][4]["GBQualifier_name"], "db_xref")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][4]["GBQualifier_value"], "InterPro:IPR002384")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][5]["GBQualifier_name"], "db_xref")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][5]["GBQualifier_value"], "PDB:1Q3M")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][6]["GBQualifier_name"], "db_xref")
self.assertEqual(record[0]["GBSeq_feature-table"][3]["GBFeature_quals"][6]["GBQualifier_value"], "UniProtKB/Swiss-Prot:P02820")
self.assertEqual(record[0]["GBSeq_sequence"], "mrtpmllallalatlclagradakpgdaesgkgaafvskqegsevvkrlrryldhwlgapapypdplepkrevcelnpdcdeladhigfqeayrrfygpv")
if __name__ == '__main__':
runner = unittest.TextTestRunner(verbosity = 2)
unittest.main(testRunner=runner)
| 81.687333
| 1,931
| 0.635895
| 37,865
| 337,287
| 5.636789
| 0.09864
| 0.25258
| 0.344757
| 0.254492
| 0.812361
| 0.758528
| 0.71177
| 0.675975
| 0.588
| 0.32261
| 0
| 0.091919
| 0.133246
| 337,287
| 4,128
| 1,932
| 81.707122
| 0.638167
| 0.031196
| 0
| 0.116914
| 0
| 0.007688
| 0.369863
| 0.02588
| 0
| 1
| 0
| 0
| 0.954666
| 1
| 0.01087
| false
| 0.00053
| 0.00053
| 0
| 0.013521
| 0.000795
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4da07ff5a8a72a696cea0339cf1734f1958fa8ec
| 97
|
py
|
Python
|
overtime/tests/algorithms/__init__.py
|
overtime3/overtime
|
ed3ae6877894f4d2c9f8473a885698e1622be3bd
|
[
"MIT"
] | 9
|
2020-10-15T13:53:36.000Z
|
2022-03-08T12:08:09.000Z
|
overtime/tests/algorithms/__init__.py
|
overtime3/overtime
|
ed3ae6877894f4d2c9f8473a885698e1622be3bd
|
[
"MIT"
] | 6
|
2021-02-07T15:43:12.000Z
|
2021-04-24T04:03:39.000Z
|
overtime/tests/algorithms/__init__.py
|
overtime3/overtime
|
ed3ae6877894f4d2c9f8473a885698e1622be3bd
|
[
"MIT"
] | 7
|
2020-10-15T13:55:12.000Z
|
2022-03-12T03:54:02.000Z
|
from overtime.tests.algorithms.centrality import *
from overtime.tests.algorithms.paths import *
| 48.5
| 51
| 0.835052
| 12
| 97
| 6.75
| 0.583333
| 0.296296
| 0.419753
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082474
| 97
| 2
| 52
| 48.5
| 0.910112
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4dae00e9c146c73b2c7e1e4325efd52153f46f67
| 18,791
|
py
|
Python
|
Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/topi/python/topi/intel_graphics/conv2d.py
|
mengkai94/training_results_v0.6
|
43dc3e250f8da47b5f8833197d74cb8cf1004fc9
|
[
"Apache-2.0"
] | 64
|
2021-05-02T14:42:34.000Z
|
2021-05-06T01:35:03.000Z
|
topi/python/topi/intel_graphics/conv2d.py
|
clhne/tvm
|
d59320c764bd09474775e1b292f3c05c27743d24
|
[
"Apache-2.0"
] | 23
|
2019-07-29T05:21:52.000Z
|
2020-08-31T18:51:42.000Z
|
topi/python/topi/intel_graphics/conv2d.py
|
clhne/tvm
|
d59320c764bd09474775e1b292f3c05c27743d24
|
[
"Apache-2.0"
] | 51
|
2019-07-12T05:10:25.000Z
|
2021-07-28T16:19:06.000Z
|
# pylint: disable=invalid-name,unused-variable,unused-argument,no-else-return, too-many-arguments, too-many-locals, too-many-statements, no-member, too-many-branches
"""conv2d schedule on Intel Graphics"""
from __future__ import absolute_import as _abs
import tvm
from .. import generic
from .. import util
from .. import tag
from ..nn import pad
from ..nn.conv2d import conv2d, conv2d_NCHWc, conv2d_alter_layout, _get_workload
from ..nn.util import get_pad_tuple
from ..util import simplify
##### SCHEDULE UTILITIES #####
def tile_and_bind3d(s, tensor, z, y, x, z_factor=2, y_factor=None, x_factor=None):
""" tile and bind 3d """
y_factor = y_factor or z_factor
x_factor = x_factor or y_factor
zo, zi = s[tensor].split(z, z_factor)
yo, yi = s[tensor].split(y, y_factor)
xo, xi = s[tensor].split(x, x_factor)
s[tensor].reorder(zo, yo, xo, zi, yi, xi)
thread_z = tvm.thread_axis((0, z_factor), "threadIdx.z")
thread_y = tvm.thread_axis((0, y_factor), "threadIdx.y")
thread_x = tvm.thread_axis((0, x_factor), "threadIdx.x")
s[tensor].bind(zo, tvm.thread_axis("blockIdx.z"))
s[tensor].bind(zi, thread_z)
s[tensor].bind(yo, tvm.thread_axis("blockIdx.y"))
s[tensor].bind(yi, thread_y)
s[tensor].bind(xo, tvm.thread_axis("blockIdx.x"))
s[tensor].bind(xi, thread_x)
return xi, thread_z, thread_y, thread_x
@conv2d_alter_layout.register(["intel_graphics"])
def _alter_conv2d_layout(attrs, inputs, tinfos):
import nnvm.symbol as sym
copy_inputs = [s for s in inputs]
data = tinfos[0]
kernel = tinfos[1]
import ast
padding = ast.literal_eval(attrs['padding'])
stride = ast.literal_eval(attrs['strides'])
wkl = _get_workload(data, kernel, stride, padding, data.dtype)
oc_bn = 1
kernel_shape = util.get_const_tuple(kernel.shape)
for oc_bn in range(16, 1, -1):
if kernel_shape[0] % oc_bn == 0:
break
new_attrs = {k: attrs[k] for k in attrs.keys()}
new_attrs['kernel_layout'] = 'OIHW%do' % (oc_bn)
return sym.contrib.conv2d_NCHWc(*copy_inputs, **new_attrs)
@conv2d_NCHWc.register(["intel_graphics"])
def _decl_conv2d(data, kernel, num_filter, kernel_size, stride, padding, layout,\
out_layout, out_dtype='float32'):
"""Conv2D operator for Intel Graphics backend.
Parameters
----------
data : tvm.Tensor
4-D with shape [batch, in_channel, in_height, in_width]
kernel : tvm.Tensor
5-D with shape [num_filter, in_channel, filter_height, filter_width, nnum_filter_vec]
stride : int or a list/tuple of two ints
stride size, or [stride_height, stride_width]
padding : int or a list/tuple of two ints
padding size, or [pad_height, pad_width]
layout : str
layout of data
Returns
-------
output : tvm.Tensor
4-D with shape [batch, out_channel, out_height, out_width]
"""
assert data.shape[0].value == 1, "only support batch size=1 convolution on intel gpu"
assert data.dtype == kernel.dtype, "Do not support inputs with different data types now."
out_dtype = data.dtype
HPAD, WPAD, _, _ = get_pad_tuple(padding, kernel)
kernel_shape = util.get_const_tuple(kernel.shape)
if isinstance(stride, (tuple, list)):
HSTR, WSTR = stride
else:
HSTR, WSTR = stride, stride
return _decl_cl_spatialpack_NCHWc(data, kernel, stride, padding, out_dtype)
@generic.schedule_conv2d_NCHWc.register(["intel_graphics"])
def schedule_conv2d_NCHWc(num_filter, kernel_size, stride, padding, layout, out_layout, outs):
"""Schedule for conv2d_nchw for Intel Graphics
Parameters
----------
outs: Array of Tensor
The computation graph description of conv2d_nchw
in the format of an array of tensors.
Returns
-------
s: Schedule
The computation schedule for conv2d_nchw.
"""
outs = [outs] if isinstance(outs, tvm.tensor.Tensor) else outs
s = tvm.create_schedule([x.op for x in outs])
scheduled_ops = []
def traverse(op):
"""inline all one-to-one-mapping operators except the last stage (output)"""
if tag.is_broadcast(op.tag):
if op not in s.outputs:
s[op].compute_inline()
for tensor in op.input_tensors:
if tensor.op.input_tensors and tensor.op not in scheduled_ops:
traverse(tensor.op)
if "4_5" in op.tag or "4_4" in op.tag or "2_7" in op.tag or "2_14" in op.tag \
or "1_16" in op.tag:
_schedule_cl_spatialpack_NCHWc(s, op)
scheduled_ops.append(op)
traverse(outs[0].op)
return s
def _decl_cl_spatialpack_NCHWc(data, kernel, stride, padding, out_dtype='float16'):
batch, in_channel, in_height, in_width = [util.get_const_int(x) for x in data.shape]
num_filter, channel, kernel_h, kernel_w, nv = [util.get_const_int(x) for x in kernel.shape]
num_filter = num_filter * nv
pad_top, pad_left, pad_down, pad_right = get_pad_tuple(padding, kernel)
if isinstance(stride, (tuple, list)):
stride_h, stride_w = stride
else:
stride_h, stride_w = stride, stride
out_channel = num_filter
out_height = simplify((in_height - kernel_h + pad_top + pad_down) // stride_h + 1)
out_width = simplify((in_width - kernel_w + pad_left + pad_right) // stride_w + 1)
oshape = (batch, out_channel, out_height, out_width)
rc = tvm.reduce_axis((0, in_channel), name='rc')
ry = tvm.reduce_axis((0, kernel_h), name='ry')
rx = tvm.reduce_axis((0, kernel_w), name='rx')
block_w = 0
block_h = 0
if stride_h == 2:
if num_filter + kernel_h == 515:
conv_tag = "4_4"
block_h = 4
block_w = 4
else:
conv_tag = "4_5"
block_h = 4
block_w = 5
elif kernel_h == 3:
if num_filter == 512:
conv_tag = "2_7"
block_h = 2
block_w = 7
else:
conv_tag = "2_14"
block_h = 2
block_w = 14
else:
conv_tag = "1_16"
block_h = 1
block_w = 16
c_h = out_height
c_w = out_width
if not out_height % block_h == 0:
c_h = (out_height // block_h + 1) * block_h
if not out_width % block_w == 0:
c_w = (out_width // block_w + 1) * block_w
pad_before = [0, 0, pad_top, pad_left]
pad_after = [0, 0, pad_down + c_h - block_h, pad_right + c_w - block_w]
temp = pad(data, pad_before, pad_after, name="pad_temp")
cshape = (batch, out_channel // nv, c_h, c_w, nv)
conv = tvm.compute(
cshape,
lambda nn, ff, yy, xx, vc:\
tvm.sum(
temp[nn, rc, yy * stride_h + ry, xx * stride_w + rx].astype(out_dtype) *
kernel[ff, rc, ry, rx, vc].astype(out_dtype),
axis=[rc, ry, rx]), tag=conv_tag, name='conv')
output = tvm.compute(
oshape,
lambda nn, ff, yy, xx:
conv[nn][ff//nv][yy][xx][ff%nv],
name='output_unpack', tag=conv_tag)
return output
def _schedule_cl_spatialpack_NCHWc(s, op):
output = op.output(0)
_, _, out_height, out_width = [util.get_const_int(x) for x in output.shape]
conv = op.input_tensors[0]
temp = s[conv].op.input_tensors[0]
kernel = s[conv].op.input_tensors[1]
temp_W = s.cache_read(temp, "warp", [conv])
conv_L = s.cache_write(conv, "local")
kernel_L = s.cache_read(kernel, "local", [conv_L])
_, in_channel, temp_h, temp_w = [util.get_const_int(x) for x in temp.shape]
if "1_16" in s[conv].op.tag:
OUTPUT_BLOCK_HEIGHT = 1
OUTPUT_BLOCK_WIDTH = 16
elif "2_14" in s[conv].op.tag:
OUTPUT_BLOCK_HEIGHT = 2
OUTPUT_BLOCK_WIDTH = 14
elif "2_7" in s[conv].op.tag:
OUTPUT_BLOCK_HEIGHT = 2
OUTPUT_BLOCK_WIDTH = 7
elif "4_5" in s[conv].op.tag:
OUTPUT_BLOCK_HEIGHT = 4
OUTPUT_BLOCK_WIDTH = 5
elif "4_4" in s[conv].op.tag:
OUTPUT_BLOCK_HEIGHT = 4
OUTPUT_BLOCK_WIDTH = 4
# schedule conv
z_factor = 1
y_factor = 1
x_factor = 16
thread_z = tvm.thread_axis((0, z_factor), "threadIdx.z")
thread_y = tvm.thread_axis((0, y_factor), "threadIdx.y")
thread_x = tvm.thread_axis((0, x_factor), "threadIdx.x")
_, co, oh, ow, vc = s[conv].op.axis
ooh, ioh = s[conv].split(oh, factor=OUTPUT_BLOCK_HEIGHT)
oow, iow = s[conv].split(ow, factor=OUTPUT_BLOCK_WIDTH)
s[conv].reorder(_, co, ooh, oow, vc, ioh, iow)
coo, coi = s[conv].split(co, nparts=1)
ooho, oohi = s[conv].split(ooh, factor=z_factor)
oowo, oowi = s[conv].split(oow, factor=y_factor)
vco, vci = s[conv].split(vc, factor=x_factor)
s[conv].reorder(_, coo, vco, ooho, oowo, coi, oohi, oowi, vci, ioh, iow)
s[conv].bind(oohi, thread_z)
s[conv].bind(oowi, thread_y)
s[conv].bind(vci, thread_x)
s[conv].bind(ooho, tvm.thread_axis("blockIdx.z"))
s[conv].bind(oowo, tvm.thread_axis("blockIdx.y"))
s[conv].bind(coi, tvm.thread_axis("blockIdx.x"))
# schedule conv_L
s[conv_L].compute_at(s[conv], vci)
i, oc, h, w, vc = s[conv_L].op.axis
rc, ry, rx = s[conv_L].op.reduce_axis
s[conv_L].reorder(i, oc, rc, ry, rx, vc, h, w)
s[temp_W].compute_at(s[conv_L], rc)
if kernel.shape[3].value != 7:
s[conv_L].unroll(ry)
s[conv_L].unroll(rx)
# schedule temp
_, ci, h, w = s[temp].op.axis
tile_and_bind3d(s, temp, ci, h, w, 1, 16, 16)
# schedule temp_W
_, ci, h, w = s[temp_W].op.axis
zo, zi = s[temp_W].split(ci, 1)
yo, yi = s[temp_W].split(h, 1)
xo, xi = s[temp_W].split(w, 16)
s[temp_W].reorder(zo, yo, xo, zi, yi, xi)
s[temp_W].bind(zi, thread_z)
s[temp_W].bind(yi, thread_y)
s[temp_W].bind(xi, thread_x)
s[temp_W].storage_align(s[temp_W].op.axis[2], 16, 0)
#schedule kernel
# schedule kernel_L
if "2_14" in s[conv].op.tag:
s[kernel_L].compute_at(s[conv_L], ry)
else:
s[kernel_L].compute_at(s[conv_L], rx)
# schedule output
if output.op in s.outputs:
out = output
else:
s[output].compute_inline()
out = s.outputs[0]
_, co, h, w = s[out].op.axis
tile_and_bind3d(s, out, w, h, co, 4, 8, 8)
@conv2d.register(["intel_graphics"])
def decl_conv2d(data, kernel, stride, padding, layout='NCHW', out_dtype='float32'):
"""Conv2D operator for Intel Graphics backend.
Parameters
----------
data : tvm.Tensor
4-D with shape [batch, in_channel, in_height, in_width]
kernel : tvm.Tensor
4-D with shape [num_filter, in_channel, filter_height, filter_width]
stride : int or a list/tuple of two ints
stride size, or [stride_height, stride_width]
padding : int or a list/tuple of two ints
padding size, or [pad_height, pad_width]
layout : str
layout of data
Returns
-------
output : tvm.Tensor
4-D with shape [batch, out_channel, out_height, out_width]
"""
assert layout == 'NCHW', "only support NCHW convolution on intel gpu"
assert data.shape[0].value == 1, "only support batch size=1 convolution on intel gpu"
assert data.dtype == kernel.dtype, "Do not support inputs with different data types now."
out_dtype = data.dtype
HPAD, WPAD, _, _ = get_pad_tuple(padding, kernel)
kernel_shape = util.get_const_tuple(kernel.shape)
if isinstance(stride, (tuple, list)):
HSTR, WSTR = stride
else:
HSTR, WSTR = stride, stride
return _decl_cl_spatialpack(data, kernel, stride, padding, layout, out_dtype)
@generic.schedule_conv2d_nchw.register(["intel_graphics"])
def schedule_conv2d_nchw(outs):
"""Schedule for conv2d_nchw for Intel Graphics
Parameters
----------
outs: Array of Tensor
The computation graph description of conv2d_nchw
in the format of an array of tensors.
Returns
-------
s: Schedule
The computation schedule for conv2d_nchw.
"""
outs = [outs] if isinstance(outs, tvm.tensor.Tensor) else outs
s = tvm.create_schedule([x.op for x in outs])
scheduled_ops = []
def traverse(op):
"""inline all one-to-one-mapping operators except the last stage (output)"""
if tag.is_broadcast(op.tag):
if op not in s.outputs:
s[op].compute_inline()
for tensor in op.input_tensors:
if tensor.op.input_tensors and tensor.op not in scheduled_ops:
traverse(tensor.op)
if "4_5" in op.tag or "4_4" in op.tag or "2_7" in op.tag or "2_14" in op.tag \
or "1_16" in op.tag:
_schedule_cl_spatialpack(s, op)
scheduled_ops.append(op)
traverse(outs[0].op)
return s
def _decl_cl_spatialpack(data, kernel, stride, padding, layout, out_dtype='float16'):
batch, in_channel, in_height, in_width = [util.get_const_int(x) for x in data.shape]
num_filter, channel, kernel_h, kernel_w = [util.get_const_int(x) for x in kernel.shape]
pad_top, pad_left, pad_down, pad_right = get_pad_tuple(padding, kernel)
if isinstance(stride, (tuple, list)):
stride_h, stride_w = stride
else:
stride_h, stride_w = stride, stride
out_channel = num_filter
out_height = simplify((in_height - kernel_h + pad_top + pad_down) // stride_h + 1)
out_width = simplify((in_width - kernel_w + pad_left + pad_right) // stride_w + 1)
oshape = (batch, out_channel, out_height, out_width)
rc = tvm.reduce_axis((0, in_channel), name='rc')
ry = tvm.reduce_axis((0, kernel_h), name='ry')
rx = tvm.reduce_axis((0, kernel_w), name='rx')
block_w = 0
block_h = 0
if stride_h == 2:
if num_filter + kernel_h == 515:
conv_tag = "4_4"
block_h = 4
block_w = 4
else:
conv_tag = "4_5"
block_h = 4
block_w = 5
elif kernel_h == 3:
if num_filter == 512:
conv_tag = "2_7"
block_h = 2
block_w = 7
else:
conv_tag = "2_14"
block_h = 2
block_w = 14
else:
conv_tag = "1_16"
block_h = 1
block_w = 16
c_h = out_height
c_w = out_width
if not out_width % block_w == 0:
c_w = (out_width // block_w + 1) * block_w
if not out_height % block_h == 0:
c_h = (out_height // block_h + 1) * block_h
pad_before = [0, 0, pad_top, pad_left]
pad_after = [0, 0, pad_down + c_h - block_h, pad_right + c_w - block_w]
temp = pad(data, pad_before, pad_after, name="pad_temp")
nv = 16
if not num_filter % nv == 0:
num_filter = (num_filter // nv + 1) * nv
out_channel = num_filter
cshape = (batch, out_channel // nv, c_h, c_w, nv)
kvshape = (num_filter // nv, channel, kernel_h, kernel_w, nv)
kernel_vec = tvm.compute(
kvshape,
lambda co, ci, kh, kw, vc:
kernel[co*nv + vc][ci][kh][kw], name='kernel_vec')
conv = tvm.compute(
cshape,
lambda nn, ff, yy, xx, vc:\
tvm.sum(
temp[nn, rc, yy * stride_h + ry, xx * stride_w + rx].astype(out_dtype) *
kernel_vec[ff, rc, ry, rx, vc].astype(out_dtype),
axis=[rc, ry, rx]), tag=conv_tag, name='conv')
output = tvm.compute(
oshape,
lambda nn, ff, yy, xx:
conv[nn][ff//nv][yy][xx][ff%nv],
name='output_unpack', tag=conv_tag)
return output
def _schedule_cl_spatialpack(s, op):
output = op.output(0)
_, _, out_height, out_width = [util.get_const_int(x) for x in output.shape]
conv = op.input_tensors[0]
temp = s[conv].op.input_tensors[0]
kernel_vec = s[conv].op.input_tensors[1]
kernel = s[kernel_vec].op.input_tensors[0]
temp_W = s.cache_read(temp, "warp", [conv])
conv_L = s.cache_write(conv, "local")
kernel_L = s.cache_read(kernel_vec, "local", [conv_L])
_, in_channel, temp_h, temp_w = [util.get_const_int(x) for x in temp.shape]
if "1_16" in s[conv].op.tag:
OUTPUT_BLOCK_HEIGHT = 1
OUTPUT_BLOCK_WIDTH = 16
elif "2_14" in s[conv].op.tag:
OUTPUT_BLOCK_HEIGHT = 2
OUTPUT_BLOCK_WIDTH = 14
elif "2_7" in s[conv].op.tag:
OUTPUT_BLOCK_HEIGHT = 2
OUTPUT_BLOCK_WIDTH = 7
elif "4_5" in s[conv].op.tag:
OUTPUT_BLOCK_HEIGHT = 4
OUTPUT_BLOCK_WIDTH = 5
elif "4_4" in s[conv].op.tag:
OUTPUT_BLOCK_HEIGHT = 4
OUTPUT_BLOCK_WIDTH = 4
# schedule conv
z_factor = 1
y_factor = 1
x_factor = 16
thread_z = tvm.thread_axis((0, z_factor), "threadIdx.z")
thread_y = tvm.thread_axis((0, y_factor), "threadIdx.y")
thread_x = tvm.thread_axis((0, x_factor), "threadIdx.x")
_, co, oh, ow, vc = s[conv].op.axis
ooh, ioh = s[conv].split(oh, factor=OUTPUT_BLOCK_HEIGHT)
oow, iow = s[conv].split(ow, factor=OUTPUT_BLOCK_WIDTH)
s[conv].reorder(_, co, ooh, oow, vc, ioh, iow)
coo, coi = s[conv].split(co, nparts=1)
ooho, oohi = s[conv].split(ooh, factor=z_factor)
oowo, oowi = s[conv].split(oow, factor=y_factor)
vco, vci = s[conv].split(vc, factor=x_factor)
s[conv].reorder(_, coo, vco, ooho, oowo, coi, oohi, oowi, vci, ioh, iow)
s[conv].bind(oohi, thread_z)
s[conv].bind(oowi, thread_y)
s[conv].bind(vci, thread_x)
s[conv].bind(ooho, tvm.thread_axis("blockIdx.z"))
s[conv].bind(oowo, tvm.thread_axis("blockIdx.y"))
s[conv].bind(coi, tvm.thread_axis("blockIdx.x"))
# schedule conv_L
s[conv_L].compute_at(s[conv], vci)
i, oc, h, w, vc = s[conv_L].op.axis
rc, ry, rx = s[conv_L].op.reduce_axis
s[conv_L].reorder(i, oc, rc, ry, rx, vc, h, w)
s[temp_W].compute_at(s[conv_L], rc)
if kernel.shape[3].value != 7:
s[conv_L].unroll(ry)
s[conv_L].unroll(rx)
# schedule temp
_, ci, h, w = s[temp].op.axis
tile_and_bind3d(s, temp, ci, h, w, 1, 16, 16)
# schedule temp_W
_, ci, h, w = s[temp_W].op.axis
zo, zi = s[temp_W].split(ci, 1)
yo, yi = s[temp_W].split(h, 1)
xo, xi = s[temp_W].split(w, 16)
s[temp_W].reorder(zo, yo, xo, zi, yi, xi)
s[temp_W].bind(zi, thread_z)
s[temp_W].bind(yi, thread_y)
s[temp_W].bind(xi, thread_x)
s[temp_W].storage_align(s[temp_W].op.axis[2], 16, 0)
s[kernel_vec].compute_inline()
# schedule kernel_L
if "2_14" in s[conv].op.tag:
s[kernel_L].compute_at(s[conv_L], ry)
else:
s[kernel_L].compute_at(s[conv_L], rx)
# schedule output
if output.op in s.outputs:
out = output
else:
s[output].compute_inline()
out = s.outputs[0]
_, co, h, w = s[out].op.axis
tile_and_bind3d(s, out, w, h, co, 4, 8, 8)
| 33.435943
| 165
| 0.616093
| 3,007
| 18,791
| 3.624875
| 0.081809
| 0.030275
| 0.01211
| 0.009908
| 0.875321
| 0.860642
| 0.838532
| 0.836789
| 0.825138
| 0.816514
| 0
| 0.021625
| 0.251876
| 18,791
| 561
| 166
| 33.495544
| 0.753735
| 0.117822
| 0
| 0.804569
| 0
| 0
| 0.048183
| 0
| 0
| 0
| 0
| 0
| 0.01269
| 1
| 0.030457
| false
| 0
| 0.027919
| 0
| 0.07868
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4de5188918f29f59fce02f7f1bf4c3caa3e6d7db
| 34,117
|
py
|
Python
|
python3/lib/python3.6/site-packages/tensorflow/core/profiler/tfprof_output_pb2.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | 3
|
2020-10-12T15:47:01.000Z
|
2022-01-14T19:51:26.000Z
|
python3/lib/python3.6/site-packages/tensorflow/core/profiler/tfprof_output_pb2.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | null | null | null |
python3/lib/python3.6/site-packages/tensorflow/core/profiler/tfprof_output_pb2.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | 2
|
2020-08-03T13:02:06.000Z
|
2020-11-04T03:15:44.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow/core/profiler/tfprof_output.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorflow.core.framework import tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2
from tensorflow.core.framework import types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow/core/profiler/tfprof_output.proto',
package='tensorflow.tfprof',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n,tensorflow/core/profiler/tfprof_output.proto\x12\x11tensorflow.tfprof\x1a,tensorflow/core/framework/tensor_shape.proto\x1a%tensorflow/core/framework/types.proto\"v\n\x11TFProfTensorProto\x12#\n\x05\x64type\x18\x01 \x01(\x0e\x32\x14.tensorflow.DataType\x12\x14\n\x0cvalue_double\x18\x02 \x03(\x01\x12\x13\n\x0bvalue_int64\x18\x03 \x03(\x03\x12\x11\n\tvalue_str\x18\x04 \x03(\t\"\x8e\x07\n\x0eGraphNodeProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12:\n\x0ctensor_value\x18\x0f \x01(\x0b\x32$.tensorflow.tfprof.TFProfTensorProto\x12\x11\n\trun_count\x18\x15 \x01(\x03\x12\x13\n\x0b\x65xec_micros\x18\x02 \x01(\x03\x12\x1f\n\x17\x61\x63\x63\x65lerator_exec_micros\x18\x11 \x01(\x03\x12\x17\n\x0f\x63pu_exec_micros\x18\x12 \x01(\x03\x12\x17\n\x0frequested_bytes\x18\x03 \x01(\x03\x12\x12\n\npeak_bytes\x18\x18 \x01(\x03\x12\x16\n\x0eresidual_bytes\x18\x19 \x01(\x03\x12\x14\n\x0coutput_bytes\x18\x1a \x01(\x03\x12\x12\n\nparameters\x18\x04 \x01(\x03\x12\x11\n\tfloat_ops\x18\r \x01(\x03\x12\x0f\n\x07\x64\x65vices\x18\n \x03(\t\x12\x1e\n\x16total_definition_count\x18\x17 \x01(\x03\x12\x17\n\x0ftotal_run_count\x18\x16 \x01(\x03\x12\x19\n\x11total_exec_micros\x18\x06 \x01(\x03\x12%\n\x1dtotal_accelerator_exec_micros\x18\x13 \x01(\x03\x12\x1d\n\x15total_cpu_exec_micros\x18\x14 \x01(\x03\x12\x1d\n\x15total_requested_bytes\x18\x07 \x01(\x03\x12\x18\n\x10total_peak_bytes\x18\x1b \x01(\x03\x12\x1c\n\x14total_residual_bytes\x18\x1c \x01(\x03\x12\x1a\n\x12total_output_bytes\x18\x1d \x01(\x03\x12\x18\n\x10total_parameters\x18\x08 \x01(\x03\x12\x17\n\x0ftotal_float_ops\x18\x0e \x01(\x03\x12,\n\x06shapes\x18\x0b \x03(\x0b\x32\x1c.tensorflow.TensorShapeProto\x12H\n\x0cinput_shapes\x18\x10 \x03(\x0b\x32\x32.tensorflow.tfprof.GraphNodeProto.InputShapesEntry\x12\x33\n\x08\x63hildren\x18\x0c \x03(\x0b\x32!.tensorflow.tfprof.GraphNodeProto\x1aP\n\x10InputShapesEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.tensorflow.TensorShapeProto:\x02\x38\x01\"\xed\x04\n\x13MultiGraphNodeProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x65xec_micros\x18\x02 \x01(\x03\x12\x1f\n\x17\x61\x63\x63\x65lerator_exec_micros\x18\x0c \x01(\x03\x12\x17\n\x0f\x63pu_exec_micros\x18\r \x01(\x03\x12\x17\n\x0frequested_bytes\x18\x03 \x01(\x03\x12\x12\n\npeak_bytes\x18\x10 \x01(\x03\x12\x16\n\x0eresidual_bytes\x18\x11 \x01(\x03\x12\x14\n\x0coutput_bytes\x18\x12 \x01(\x03\x12\x12\n\nparameters\x18\x04 \x01(\x03\x12\x11\n\tfloat_ops\x18\x05 \x01(\x03\x12\x19\n\x11total_exec_micros\x18\x06 \x01(\x03\x12%\n\x1dtotal_accelerator_exec_micros\x18\x0e \x01(\x03\x12\x1d\n\x15total_cpu_exec_micros\x18\x0f \x01(\x03\x12\x1d\n\x15total_requested_bytes\x18\x07 \x01(\x03\x12\x18\n\x10total_peak_bytes\x18\x13 \x01(\x03\x12\x1c\n\x14total_residual_bytes\x18\x14 \x01(\x03\x12\x1a\n\x12total_output_bytes\x18\x15 \x01(\x03\x12\x18\n\x10total_parameters\x18\x08 \x01(\x03\x12\x17\n\x0ftotal_float_ops\x18\t \x01(\x03\x12\x36\n\x0bgraph_nodes\x18\n \x03(\x0b\x32!.tensorflow.tfprof.GraphNodeProto\x12\x38\n\x08\x63hildren\x18\x0b \x03(\x0b\x32&.tensorflow.tfprof.MultiGraphNodeProto\"\xc2\x01\n\x0b\x41\x64viceProto\x12>\n\x08\x63heckers\x18\x01 \x03(\x0b\x32,.tensorflow.tfprof.AdviceProto.CheckersEntry\x1aW\n\rCheckersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.tensorflow.tfprof.AdviceProto.Checker:\x02\x38\x01\x1a\x1a\n\x07\x43hecker\x12\x0f\n\x07reports\x18\x02 \x03(\tb\x06proto3')
,
dependencies=[tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2.DESCRIPTOR,tensorflow_dot_core_dot_framework_dot_types__pb2.DESCRIPTOR,])
_TFPROFTENSORPROTO = _descriptor.Descriptor(
name='TFProfTensorProto',
full_name='tensorflow.tfprof.TFProfTensorProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dtype', full_name='tensorflow.tfprof.TFProfTensorProto.dtype', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value_double', full_name='tensorflow.tfprof.TFProfTensorProto.value_double', index=1,
number=2, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value_int64', full_name='tensorflow.tfprof.TFProfTensorProto.value_int64', index=2,
number=3, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value_str', full_name='tensorflow.tfprof.TFProfTensorProto.value_str', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=152,
serialized_end=270,
)
_GRAPHNODEPROTO_INPUTSHAPESENTRY = _descriptor.Descriptor(
name='InputShapesEntry',
full_name='tensorflow.tfprof.GraphNodeProto.InputShapesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='tensorflow.tfprof.GraphNodeProto.InputShapesEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='tensorflow.tfprof.GraphNodeProto.InputShapesEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1103,
serialized_end=1183,
)
_GRAPHNODEPROTO = _descriptor.Descriptor(
name='GraphNodeProto',
full_name='tensorflow.tfprof.GraphNodeProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='tensorflow.tfprof.GraphNodeProto.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tensor_value', full_name='tensorflow.tfprof.GraphNodeProto.tensor_value', index=1,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='run_count', full_name='tensorflow.tfprof.GraphNodeProto.run_count', index=2,
number=21, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exec_micros', full_name='tensorflow.tfprof.GraphNodeProto.exec_micros', index=3,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='accelerator_exec_micros', full_name='tensorflow.tfprof.GraphNodeProto.accelerator_exec_micros', index=4,
number=17, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cpu_exec_micros', full_name='tensorflow.tfprof.GraphNodeProto.cpu_exec_micros', index=5,
number=18, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='requested_bytes', full_name='tensorflow.tfprof.GraphNodeProto.requested_bytes', index=6,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='peak_bytes', full_name='tensorflow.tfprof.GraphNodeProto.peak_bytes', index=7,
number=24, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='residual_bytes', full_name='tensorflow.tfprof.GraphNodeProto.residual_bytes', index=8,
number=25, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='output_bytes', full_name='tensorflow.tfprof.GraphNodeProto.output_bytes', index=9,
number=26, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='parameters', full_name='tensorflow.tfprof.GraphNodeProto.parameters', index=10,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='float_ops', full_name='tensorflow.tfprof.GraphNodeProto.float_ops', index=11,
number=13, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='devices', full_name='tensorflow.tfprof.GraphNodeProto.devices', index=12,
number=10, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_definition_count', full_name='tensorflow.tfprof.GraphNodeProto.total_definition_count', index=13,
number=23, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_run_count', full_name='tensorflow.tfprof.GraphNodeProto.total_run_count', index=14,
number=22, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_exec_micros', full_name='tensorflow.tfprof.GraphNodeProto.total_exec_micros', index=15,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_accelerator_exec_micros', full_name='tensorflow.tfprof.GraphNodeProto.total_accelerator_exec_micros', index=16,
number=19, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_cpu_exec_micros', full_name='tensorflow.tfprof.GraphNodeProto.total_cpu_exec_micros', index=17,
number=20, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_requested_bytes', full_name='tensorflow.tfprof.GraphNodeProto.total_requested_bytes', index=18,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_peak_bytes', full_name='tensorflow.tfprof.GraphNodeProto.total_peak_bytes', index=19,
number=27, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_residual_bytes', full_name='tensorflow.tfprof.GraphNodeProto.total_residual_bytes', index=20,
number=28, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_output_bytes', full_name='tensorflow.tfprof.GraphNodeProto.total_output_bytes', index=21,
number=29, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_parameters', full_name='tensorflow.tfprof.GraphNodeProto.total_parameters', index=22,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_float_ops', full_name='tensorflow.tfprof.GraphNodeProto.total_float_ops', index=23,
number=14, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='shapes', full_name='tensorflow.tfprof.GraphNodeProto.shapes', index=24,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='input_shapes', full_name='tensorflow.tfprof.GraphNodeProto.input_shapes', index=25,
number=16, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='children', full_name='tensorflow.tfprof.GraphNodeProto.children', index=26,
number=12, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GRAPHNODEPROTO_INPUTSHAPESENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=273,
serialized_end=1183,
)
_MULTIGRAPHNODEPROTO = _descriptor.Descriptor(
name='MultiGraphNodeProto',
full_name='tensorflow.tfprof.MultiGraphNodeProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='tensorflow.tfprof.MultiGraphNodeProto.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exec_micros', full_name='tensorflow.tfprof.MultiGraphNodeProto.exec_micros', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='accelerator_exec_micros', full_name='tensorflow.tfprof.MultiGraphNodeProto.accelerator_exec_micros', index=2,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cpu_exec_micros', full_name='tensorflow.tfprof.MultiGraphNodeProto.cpu_exec_micros', index=3,
number=13, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='requested_bytes', full_name='tensorflow.tfprof.MultiGraphNodeProto.requested_bytes', index=4,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='peak_bytes', full_name='tensorflow.tfprof.MultiGraphNodeProto.peak_bytes', index=5,
number=16, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='residual_bytes', full_name='tensorflow.tfprof.MultiGraphNodeProto.residual_bytes', index=6,
number=17, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='output_bytes', full_name='tensorflow.tfprof.MultiGraphNodeProto.output_bytes', index=7,
number=18, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='parameters', full_name='tensorflow.tfprof.MultiGraphNodeProto.parameters', index=8,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='float_ops', full_name='tensorflow.tfprof.MultiGraphNodeProto.float_ops', index=9,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_exec_micros', full_name='tensorflow.tfprof.MultiGraphNodeProto.total_exec_micros', index=10,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_accelerator_exec_micros', full_name='tensorflow.tfprof.MultiGraphNodeProto.total_accelerator_exec_micros', index=11,
number=14, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_cpu_exec_micros', full_name='tensorflow.tfprof.MultiGraphNodeProto.total_cpu_exec_micros', index=12,
number=15, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_requested_bytes', full_name='tensorflow.tfprof.MultiGraphNodeProto.total_requested_bytes', index=13,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_peak_bytes', full_name='tensorflow.tfprof.MultiGraphNodeProto.total_peak_bytes', index=14,
number=19, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_residual_bytes', full_name='tensorflow.tfprof.MultiGraphNodeProto.total_residual_bytes', index=15,
number=20, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_output_bytes', full_name='tensorflow.tfprof.MultiGraphNodeProto.total_output_bytes', index=16,
number=21, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_parameters', full_name='tensorflow.tfprof.MultiGraphNodeProto.total_parameters', index=17,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_float_ops', full_name='tensorflow.tfprof.MultiGraphNodeProto.total_float_ops', index=18,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='graph_nodes', full_name='tensorflow.tfprof.MultiGraphNodeProto.graph_nodes', index=19,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='children', full_name='tensorflow.tfprof.MultiGraphNodeProto.children', index=20,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1186,
serialized_end=1807,
)
_ADVICEPROTO_CHECKERSENTRY = _descriptor.Descriptor(
name='CheckersEntry',
full_name='tensorflow.tfprof.AdviceProto.CheckersEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='tensorflow.tfprof.AdviceProto.CheckersEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='tensorflow.tfprof.AdviceProto.CheckersEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1889,
serialized_end=1976,
)
_ADVICEPROTO_CHECKER = _descriptor.Descriptor(
name='Checker',
full_name='tensorflow.tfprof.AdviceProto.Checker',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='reports', full_name='tensorflow.tfprof.AdviceProto.Checker.reports', index=0,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1978,
serialized_end=2004,
)
_ADVICEPROTO = _descriptor.Descriptor(
name='AdviceProto',
full_name='tensorflow.tfprof.AdviceProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='checkers', full_name='tensorflow.tfprof.AdviceProto.checkers', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ADVICEPROTO_CHECKERSENTRY, _ADVICEPROTO_CHECKER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1810,
serialized_end=2004,
)
_TFPROFTENSORPROTO.fields_by_name['dtype'].enum_type = tensorflow_dot_core_dot_framework_dot_types__pb2._DATATYPE
_GRAPHNODEPROTO_INPUTSHAPESENTRY.fields_by_name['value'].message_type = tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2._TENSORSHAPEPROTO
_GRAPHNODEPROTO_INPUTSHAPESENTRY.containing_type = _GRAPHNODEPROTO
_GRAPHNODEPROTO.fields_by_name['tensor_value'].message_type = _TFPROFTENSORPROTO
_GRAPHNODEPROTO.fields_by_name['shapes'].message_type = tensorflow_dot_core_dot_framework_dot_tensor__shape__pb2._TENSORSHAPEPROTO
_GRAPHNODEPROTO.fields_by_name['input_shapes'].message_type = _GRAPHNODEPROTO_INPUTSHAPESENTRY
_GRAPHNODEPROTO.fields_by_name['children'].message_type = _GRAPHNODEPROTO
_MULTIGRAPHNODEPROTO.fields_by_name['graph_nodes'].message_type = _GRAPHNODEPROTO
_MULTIGRAPHNODEPROTO.fields_by_name['children'].message_type = _MULTIGRAPHNODEPROTO
_ADVICEPROTO_CHECKERSENTRY.fields_by_name['value'].message_type = _ADVICEPROTO_CHECKER
_ADVICEPROTO_CHECKERSENTRY.containing_type = _ADVICEPROTO
_ADVICEPROTO_CHECKER.containing_type = _ADVICEPROTO
_ADVICEPROTO.fields_by_name['checkers'].message_type = _ADVICEPROTO_CHECKERSENTRY
DESCRIPTOR.message_types_by_name['TFProfTensorProto'] = _TFPROFTENSORPROTO
DESCRIPTOR.message_types_by_name['GraphNodeProto'] = _GRAPHNODEPROTO
DESCRIPTOR.message_types_by_name['MultiGraphNodeProto'] = _MULTIGRAPHNODEPROTO
DESCRIPTOR.message_types_by_name['AdviceProto'] = _ADVICEPROTO
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TFProfTensorProto = _reflection.GeneratedProtocolMessageType('TFProfTensorProto', (_message.Message,), dict(
DESCRIPTOR = _TFPROFTENSORPROTO,
__module__ = 'tensorflow.core.profiler.tfprof_output_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.tfprof.TFProfTensorProto)
))
_sym_db.RegisterMessage(TFProfTensorProto)
GraphNodeProto = _reflection.GeneratedProtocolMessageType('GraphNodeProto', (_message.Message,), dict(
InputShapesEntry = _reflection.GeneratedProtocolMessageType('InputShapesEntry', (_message.Message,), dict(
DESCRIPTOR = _GRAPHNODEPROTO_INPUTSHAPESENTRY,
__module__ = 'tensorflow.core.profiler.tfprof_output_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.tfprof.GraphNodeProto.InputShapesEntry)
))
,
DESCRIPTOR = _GRAPHNODEPROTO,
__module__ = 'tensorflow.core.profiler.tfprof_output_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.tfprof.GraphNodeProto)
))
_sym_db.RegisterMessage(GraphNodeProto)
_sym_db.RegisterMessage(GraphNodeProto.InputShapesEntry)
MultiGraphNodeProto = _reflection.GeneratedProtocolMessageType('MultiGraphNodeProto', (_message.Message,), dict(
DESCRIPTOR = _MULTIGRAPHNODEPROTO,
__module__ = 'tensorflow.core.profiler.tfprof_output_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.tfprof.MultiGraphNodeProto)
))
_sym_db.RegisterMessage(MultiGraphNodeProto)
AdviceProto = _reflection.GeneratedProtocolMessageType('AdviceProto', (_message.Message,), dict(
CheckersEntry = _reflection.GeneratedProtocolMessageType('CheckersEntry', (_message.Message,), dict(
DESCRIPTOR = _ADVICEPROTO_CHECKERSENTRY,
__module__ = 'tensorflow.core.profiler.tfprof_output_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.tfprof.AdviceProto.CheckersEntry)
))
,
Checker = _reflection.GeneratedProtocolMessageType('Checker', (_message.Message,), dict(
DESCRIPTOR = _ADVICEPROTO_CHECKER,
__module__ = 'tensorflow.core.profiler.tfprof_output_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.tfprof.AdviceProto.Checker)
))
,
DESCRIPTOR = _ADVICEPROTO,
__module__ = 'tensorflow.core.profiler.tfprof_output_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.tfprof.AdviceProto)
))
_sym_db.RegisterMessage(AdviceProto)
_sym_db.RegisterMessage(AdviceProto.CheckersEntry)
_sym_db.RegisterMessage(AdviceProto.Checker)
_GRAPHNODEPROTO_INPUTSHAPESENTRY._options = None
_ADVICEPROTO_CHECKERSENTRY._options = None
# @@protoc_insertion_point(module_scope)
| 50.394387
| 3,447
| 0.759826
| 4,396
| 34,117
| 5.605551
| 0.059827
| 0.058761
| 0.04748
| 0.063307
| 0.80935
| 0.760653
| 0.721208
| 0.690528
| 0.674742
| 0.659037
| 0
| 0.044051
| 0.123018
| 34,117
| 676
| 3,448
| 50.468935
| 0.779545
| 0.021661
| 0
| 0.713386
| 1
| 0.001575
| 0.24232
| 0.21034
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011024
| 0
| 0.011024
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
129758836690e6cf83c058208d8e2242c72374b3
| 128,517
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_rsi_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_rsi_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_rsi_cfg.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_infra_rsi_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR infra\-rsi package configuration.
This module contains definitions
for the following management objects\:
vrfs\: VRF configuration
global\-af\: global af
srlg\: srlg
vrf\-groups\: vrf groups
selective\-vrf\-download\: selective vrf download
This YANG module augments the
Cisco\-IOS\-XR\-ifmgr\-cfg,
Cisco\-IOS\-XR\-snmp\-agent\-cfg
modules with configuration data.
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class SrlgPriorityEnum(Enum):
"""
SrlgPriorityEnum
Srlg priority
.. data:: critical = 0
Critical
.. data:: high = 1
High
.. data:: default = 2
Default
.. data:: low = 3
Low
.. data:: very_low = 4
Very low
"""
critical = 0
high = 1
default = 2
low = 3
very_low = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['SrlgPriorityEnum']
class VrfAddressFamilyEnum(Enum):
"""
VrfAddressFamilyEnum
Vrf address family
.. data:: ipv4 = 1
IPv4
.. data:: ipv6 = 2
IPv6
"""
ipv4 = 1
ipv6 = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['VrfAddressFamilyEnum']
class VrfSubAddressFamilyEnum(Enum):
"""
VrfSubAddressFamilyEnum
Vrf sub address family
.. data:: unicast = 1
Unicast
.. data:: multicast = 2
Multicast
.. data:: flow_spec = 133
Flow spec
"""
unicast = 1
multicast = 2
flow_spec = 133
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['VrfSubAddressFamilyEnum']
class Vrfs(object):
"""
VRF configuration
.. attribute:: vrf
VRF configuration
**type**\: list of :py:class:`Vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.vrf = YList()
self.vrf.parent = self
self.vrf.name = 'vrf'
class Vrf(object):
"""
VRF configuration
.. attribute:: vrf_name <key>
VRF name
**type**\: str
**length:** 0..32
.. attribute:: afs
VRF address family configuration
**type**\: :py:class:`Afs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs>`
.. attribute:: create
VRF global configuration
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: description
A textual description of the VRF
**type**\: str
**length:** 0..244
.. attribute:: fallback_vrf
Fallback VRF
**type**\: str
**length:** 0..32
.. attribute:: mode_big
Configuration enable of big VRF
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: multicast_host
Multicast host stack configuration
**type**\: :py:class:`MulticastHost <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.MulticastHost>`
.. attribute:: remote_route_filter_disable
For disabling remote route filtering for this VRF on core\-facing card
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vpn_id
VPN\-ID for the VRF
**type**\: :py:class:`VpnId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.VpnId>`
**presence node**\: True
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.vrf_name = None
self.afs = Vrfs.Vrf.Afs()
self.afs.parent = self
self.create = None
self.description = None
self.fallback_vrf = None
self.mode_big = None
self.multicast_host = Vrfs.Vrf.MulticastHost()
self.multicast_host.parent = self
self.remote_route_filter_disable = None
self.vpn_id = None
class VpnId(object):
"""
VPN\-ID for the VRF
.. attribute:: vpn_index
Index of VPNID Index
**type**\: int
**range:** 0..16777215
**mandatory**\: True
.. attribute:: vpn_oui
OUI of VPNID OUI
**type**\: int
**range:** 0..4294967295
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self._is_presence = True
self.vpn_index = None
self.vpn_oui = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:vpn-id'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.vpn_index is not None:
return True
if self.vpn_oui is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.VpnId']['meta_info']
class Afs(object):
"""
VRF address family configuration
.. attribute:: af
VRF address family configuration
**type**\: list of :py:class:`Af <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.af = YList()
self.af.parent = self
self.af.name = 'af'
class Af(object):
"""
VRF address family configuration
.. attribute:: af_name <key>
Address family
**type**\: :py:class:`VrfAddressFamilyEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.VrfAddressFamilyEnum>`
.. attribute:: saf_name <key>
Sub\-Address family
**type**\: :py:class:`VrfSubAddressFamilyEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.VrfSubAddressFamilyEnum>`
.. attribute:: topology_name <key>
Topology name
**type**\: str
**length:** 0..244
.. attribute:: bgp
BGP AF VRF config
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp>`
.. attribute:: create
VRF configuration for a particular address family
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: maximum_prefix
Set maximum prefix limits
**type**\: :py:class:`MaximumPrefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.MaximumPrefix>`
**presence node**\: True
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.af_name = None
self.saf_name = None
self.topology_name = None
self.bgp = Vrfs.Vrf.Afs.Af.Bgp()
self.bgp.parent = self
self.create = None
self.maximum_prefix = None
class MaximumPrefix(object):
"""
Set maximum prefix limits
.. attribute:: mid_threshold
Mid\-threshold (% of maximum)
**type**\: int
**range:** 1..100
.. attribute:: prefix_limit
Set table's maximum prefix limit
**type**\: int
**range:** 32..5000000
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-rib-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.mid_threshold = None
self.prefix_limit = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-rib-cfg:maximum-prefix'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.mid_threshold is not None:
return True
if self.prefix_limit is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.MaximumPrefix']['meta_info']
class Bgp(object):
"""
BGP AF VRF config
.. attribute:: export_route_policy
Route policy for export filtering
**type**\: str
.. attribute:: export_route_targets
Export Route targets
**type**\: :py:class:`ExportRouteTargets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets>`
.. attribute:: export_vrf_options
Export VRF options
**type**\: :py:class:`ExportVrfOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.ExportVrfOptions>`
.. attribute:: global_to_vrf_import_route_policy
Route policy for global to vrf import filtering
**type**\: :py:class:`GlobalToVrfImportRoutePolicy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.GlobalToVrfImportRoutePolicy>`
**presence node**\: True
.. attribute:: import_route_policy
Route policy for import filtering
**type**\: str
.. attribute:: import_route_targets
Import Route targets
**type**\: :py:class:`ImportRouteTargets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets>`
.. attribute:: vrf_to_global_export_route_policy
Route policy for vrf to global export filtering
**type**\: :py:class:`VrfToGlobalExportRoutePolicy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.VrfToGlobalExportRoutePolicy>`
**presence node**\: True
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.export_route_policy = None
self.export_route_targets = Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets()
self.export_route_targets.parent = self
self.export_vrf_options = Vrfs.Vrf.Afs.Af.Bgp.ExportVrfOptions()
self.export_vrf_options.parent = self
self.global_to_vrf_import_route_policy = None
self.import_route_policy = None
self.import_route_targets = Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets()
self.import_route_targets.parent = self
self.vrf_to_global_export_route_policy = None
class ImportRouteTargets(object):
"""
Import Route targets
.. attribute:: route_targets
Route target table
**type**\: :py:class:`RouteTargets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets.RouteTargets>`
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.route_targets = Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets.RouteTargets()
self.route_targets.parent = self
class RouteTargets(object):
"""
Route target table
.. attribute:: route_target
Route target
**type**\: list of :py:class:`RouteTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets.RouteTargets.RouteTarget>`
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.route_target = YList()
self.route_target.parent = self
self.route_target.name = 'route_target'
class RouteTarget(object):
"""
Route target
.. attribute:: type <key>
Type of RT
**type**\: :py:class:`BgpVrfRouteTargetEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_cfg.BgpVrfRouteTargetEnum>`
.. attribute:: as_or_four_byte_as
as or four byte as
**type**\: list of :py:class:`AsOrFourByteAs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets.RouteTargets.RouteTarget.AsOrFourByteAs>`
.. attribute:: ipv4_address
ipv4 address
**type**\: list of :py:class:`Ipv4Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets.RouteTargets.RouteTarget.Ipv4Address>`
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.type = None
self.as_or_four_byte_as = YList()
self.as_or_four_byte_as.parent = self
self.as_or_four_byte_as.name = 'as_or_four_byte_as'
self.ipv4_address = YList()
self.ipv4_address.parent = self
self.ipv4_address.name = 'ipv4_address'
class AsOrFourByteAs(object):
"""
as or four byte as
.. attribute:: as_ <key>
AS number
**type**\: int
**range:** 1..4294967295
.. attribute:: as_index <key>
AS number Index
**type**\: int
**range:** 0..4294967295
.. attribute:: as_xx <key>
AS number
**type**\: int
**range:** 0..4294967295
.. attribute:: stitching_rt <key>
Stitching RT
**type**\: int
**range:** 0..1
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.as_ = None
self.as_index = None
self.as_xx = None
self.stitching_rt = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.as_ is None:
raise YPYModelError('Key property as_ is None')
if self.as_index is None:
raise YPYModelError('Key property as_index is None')
if self.as_xx is None:
raise YPYModelError('Key property as_xx is None')
if self.stitching_rt is None:
raise YPYModelError('Key property stitching_rt is None')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:as-or-four-byte-as[Cisco-IOS-XR-ipv4-bgp-cfg:as = ' + str(self.as_) + '][Cisco-IOS-XR-ipv4-bgp-cfg:as-index = ' + str(self.as_index) + '][Cisco-IOS-XR-ipv4-bgp-cfg:as-xx = ' + str(self.as_xx) + '][Cisco-IOS-XR-ipv4-bgp-cfg:stitching-rt = ' + str(self.stitching_rt) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.as_ is not None:
return True
if self.as_index is not None:
return True
if self.as_xx is not None:
return True
if self.stitching_rt is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets.RouteTargets.RouteTarget.AsOrFourByteAs']['meta_info']
class Ipv4Address(object):
"""
ipv4 address
.. attribute:: address <key>
IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: address_index <key>
IP address Index
**type**\: int
**range:** 0..65535
.. attribute:: stitching_rt <key>
Stitching RT
**type**\: int
**range:** 0..1
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.address = None
self.address_index = None
self.stitching_rt = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.address is None:
raise YPYModelError('Key property address is None')
if self.address_index is None:
raise YPYModelError('Key property address_index is None')
if self.stitching_rt is None:
raise YPYModelError('Key property stitching_rt is None')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:ipv4-address[Cisco-IOS-XR-ipv4-bgp-cfg:address = ' + str(self.address) + '][Cisco-IOS-XR-ipv4-bgp-cfg:address-index = ' + str(self.address_index) + '][Cisco-IOS-XR-ipv4-bgp-cfg:stitching-rt = ' + str(self.stitching_rt) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
if self.address_index is not None:
return True
if self.stitching_rt is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets.RouteTargets.RouteTarget.Ipv4Address']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.type is None:
raise YPYModelError('Key property type is None')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:route-target[Cisco-IOS-XR-ipv4-bgp-cfg:type = ' + str(self.type) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.type is not None:
return True
if self.as_or_four_byte_as is not None:
for child_ref in self.as_or_four_byte_as:
if child_ref._has_data():
return True
if self.ipv4_address is not None:
for child_ref in self.ipv4_address:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets.RouteTargets.RouteTarget']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:route-targets'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.route_target is not None:
for child_ref in self.route_target:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets.RouteTargets']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:import-route-targets'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.route_targets is not None and self.route_targets._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.ImportRouteTargets']['meta_info']
class ExportRouteTargets(object):
"""
Export Route targets
.. attribute:: route_targets
Route target table
**type**\: :py:class:`RouteTargets <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets.RouteTargets>`
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.route_targets = Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets.RouteTargets()
self.route_targets.parent = self
class RouteTargets(object):
"""
Route target table
.. attribute:: route_target
Route target
**type**\: list of :py:class:`RouteTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets.RouteTargets.RouteTarget>`
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.route_target = YList()
self.route_target.parent = self
self.route_target.name = 'route_target'
class RouteTarget(object):
"""
Route target
.. attribute:: type <key>
Type of RT
**type**\: :py:class:`BgpVrfRouteTargetEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_cfg.BgpVrfRouteTargetEnum>`
.. attribute:: as_or_four_byte_as
as or four byte as
**type**\: list of :py:class:`AsOrFourByteAs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets.RouteTargets.RouteTarget.AsOrFourByteAs>`
.. attribute:: ipv4_address
ipv4 address
**type**\: list of :py:class:`Ipv4Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets.RouteTargets.RouteTarget.Ipv4Address>`
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.type = None
self.as_or_four_byte_as = YList()
self.as_or_four_byte_as.parent = self
self.as_or_four_byte_as.name = 'as_or_four_byte_as'
self.ipv4_address = YList()
self.ipv4_address.parent = self
self.ipv4_address.name = 'ipv4_address'
class AsOrFourByteAs(object):
"""
as or four byte as
.. attribute:: as_ <key>
AS number
**type**\: int
**range:** 1..4294967295
.. attribute:: as_index <key>
AS number Index
**type**\: int
**range:** 0..4294967295
.. attribute:: as_xx <key>
AS number
**type**\: int
**range:** 0..4294967295
.. attribute:: stitching_rt <key>
Stitching RT
**type**\: int
**range:** 0..1
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.as_ = None
self.as_index = None
self.as_xx = None
self.stitching_rt = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.as_ is None:
raise YPYModelError('Key property as_ is None')
if self.as_index is None:
raise YPYModelError('Key property as_index is None')
if self.as_xx is None:
raise YPYModelError('Key property as_xx is None')
if self.stitching_rt is None:
raise YPYModelError('Key property stitching_rt is None')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:as-or-four-byte-as[Cisco-IOS-XR-ipv4-bgp-cfg:as = ' + str(self.as_) + '][Cisco-IOS-XR-ipv4-bgp-cfg:as-index = ' + str(self.as_index) + '][Cisco-IOS-XR-ipv4-bgp-cfg:as-xx = ' + str(self.as_xx) + '][Cisco-IOS-XR-ipv4-bgp-cfg:stitching-rt = ' + str(self.stitching_rt) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.as_ is not None:
return True
if self.as_index is not None:
return True
if self.as_xx is not None:
return True
if self.stitching_rt is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets.RouteTargets.RouteTarget.AsOrFourByteAs']['meta_info']
class Ipv4Address(object):
"""
ipv4 address
.. attribute:: address <key>
IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: address_index <key>
IP address Index
**type**\: int
**range:** 0..65535
.. attribute:: stitching_rt <key>
Stitching RT
**type**\: int
**range:** 0..1
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.address = None
self.address_index = None
self.stitching_rt = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.address is None:
raise YPYModelError('Key property address is None')
if self.address_index is None:
raise YPYModelError('Key property address_index is None')
if self.stitching_rt is None:
raise YPYModelError('Key property stitching_rt is None')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:ipv4-address[Cisco-IOS-XR-ipv4-bgp-cfg:address = ' + str(self.address) + '][Cisco-IOS-XR-ipv4-bgp-cfg:address-index = ' + str(self.address_index) + '][Cisco-IOS-XR-ipv4-bgp-cfg:stitching-rt = ' + str(self.stitching_rt) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
if self.address_index is not None:
return True
if self.stitching_rt is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets.RouteTargets.RouteTarget.Ipv4Address']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.type is None:
raise YPYModelError('Key property type is None')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:route-target[Cisco-IOS-XR-ipv4-bgp-cfg:type = ' + str(self.type) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.type is not None:
return True
if self.as_or_four_byte_as is not None:
for child_ref in self.as_or_four_byte_as:
if child_ref._has_data():
return True
if self.ipv4_address is not None:
for child_ref in self.ipv4_address:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets.RouteTargets.RouteTarget']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:route-targets'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.route_target is not None:
for child_ref in self.route_target:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets.RouteTargets']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:export-route-targets'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.route_targets is not None and self.route_targets._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.ExportRouteTargets']['meta_info']
class VrfToGlobalExportRoutePolicy(object):
"""
Route policy for vrf to global export filtering
.. attribute:: allow_imported_vpn
TRUE Enable imported VPN paths to be exported to Default VRF.FALSE Disable imported VPN paths to be exported to Default VRF
**type**\: bool
.. attribute:: route_policy_name
Vrf to global export route policy
**type**\: str
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self._is_presence = True
self.allow_imported_vpn = None
self.route_policy_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:vrf-to-global-export-route-policy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.allow_imported_vpn is not None:
return True
if self.route_policy_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.VrfToGlobalExportRoutePolicy']['meta_info']
class ExportVrfOptions(object):
"""
Export VRF options
.. attribute:: allow_imported_vpn
TRUE Enable imported VPN paths to be exported to non\-default VRFFALSE Disable imported VPN paths to be exported to non\-default VRF
**type**\: bool
.. attribute:: import_stitching_rt
TRUE Use stitchng RTs to import extranet pathsFALSE Use regular RTs to import extranet paths
**type**\: bool
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self.allow_imported_vpn = None
self.import_stitching_rt = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:export-vrf-options'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.allow_imported_vpn is not None:
return True
if self.import_stitching_rt is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.ExportVrfOptions']['meta_info']
class GlobalToVrfImportRoutePolicy(object):
"""
Route policy for global to vrf import filtering
.. attribute:: advertise_as_vpn
TRUE Enable advertising imported paths to PEsFALSE Disable advertising imported paths to PEs
**type**\: bool
.. attribute:: route_policy_name
Global to vrf import route policy
**type**\: str
**mandatory**\: True
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv4-bgp-cfg'
_revision = '2015-08-27'
def __init__(self):
self.parent = None
self._is_presence = True
self.advertise_as_vpn = None
self.route_policy_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:global-to-vrf-import-route-policy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.advertise_as_vpn is not None:
return True
if self.route_policy_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp.GlobalToVrfImportRoutePolicy']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-bgp-cfg:bgp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.export_route_policy is not None:
return True
if self.export_route_targets is not None and self.export_route_targets._has_data():
return True
if self.export_vrf_options is not None and self.export_vrf_options._has_data():
return True
if self.global_to_vrf_import_route_policy is not None and self.global_to_vrf_import_route_policy._has_data():
return True
if self.import_route_policy is not None:
return True
if self.import_route_targets is not None and self.import_route_targets._has_data():
return True
if self.vrf_to_global_export_route_policy is not None and self.vrf_to_global_export_route_policy._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af.Bgp']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.af_name is None:
raise YPYModelError('Key property af_name is None')
if self.saf_name is None:
raise YPYModelError('Key property saf_name is None')
if self.topology_name is None:
raise YPYModelError('Key property topology_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:af[Cisco-IOS-XR-infra-rsi-cfg:af-name = ' + str(self.af_name) + '][Cisco-IOS-XR-infra-rsi-cfg:saf-name = ' + str(self.saf_name) + '][Cisco-IOS-XR-infra-rsi-cfg:topology-name = ' + str(self.topology_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.af_name is not None:
return True
if self.saf_name is not None:
return True
if self.topology_name is not None:
return True
if self.bgp is not None and self.bgp._has_data():
return True
if self.create is not None:
return True
if self.maximum_prefix is not None and self.maximum_prefix._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs.Af']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:afs'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.af is not None:
for child_ref in self.af:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.Afs']['meta_info']
class MulticastHost(object):
"""
Multicast host stack configuration
.. attribute:: ipv4
IPv4 configuration
**type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.MulticastHost.Ipv4>`
.. attribute:: ipv6
IPv6 configuration
**type**\: :py:class:`Ipv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Vrfs.Vrf.MulticastHost.Ipv6>`
"""
_prefix = 'ip-iarm-vrf-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.ipv4 = Vrfs.Vrf.MulticastHost.Ipv4()
self.ipv4.parent = self
self.ipv6 = Vrfs.Vrf.MulticastHost.Ipv6()
self.ipv6.parent = self
class Ipv4(object):
"""
IPv4 configuration
.. attribute:: interface
Default multicast host interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
"""
_prefix = 'ip-iarm-vrf-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-iarm-vrf-cfg:ipv4'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.MulticastHost.Ipv4']['meta_info']
class Ipv6(object):
"""
IPv6 configuration
.. attribute:: interface
Default multicast host interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
"""
_prefix = 'ip-iarm-vrf-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-iarm-vrf-cfg:ipv6'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.MulticastHost.Ipv6']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ip-iarm-vrf-cfg:multicast-host'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ipv4 is not None and self.ipv4._has_data():
return True
if self.ipv6 is not None and self.ipv6._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf.MulticastHost']['meta_info']
@property
def _common_path(self):
if self.vrf_name is None:
raise YPYModelError('Key property vrf_name is None')
return '/Cisco-IOS-XR-infra-rsi-cfg:vrfs/Cisco-IOS-XR-infra-rsi-cfg:vrf[Cisco-IOS-XR-infra-rsi-cfg:vrf-name = ' + str(self.vrf_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrf_name is not None:
return True
if self.afs is not None and self.afs._has_data():
return True
if self.create is not None:
return True
if self.description is not None:
return True
if self.fallback_vrf is not None:
return True
if self.mode_big is not None:
return True
if self.multicast_host is not None and self.multicast_host._has_data():
return True
if self.remote_route_filter_disable is not None:
return True
if self.vpn_id is not None and self.vpn_id._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs.Vrf']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-rsi-cfg:vrfs'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrf is not None:
for child_ref in self.vrf:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Vrfs']['meta_info']
class GlobalAf(object):
"""
global af
.. attribute:: afs
VRF address family configuration
**type**\: :py:class:`Afs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.GlobalAf.Afs>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.afs = GlobalAf.Afs()
self.afs.parent = self
class Afs(object):
"""
VRF address family configuration
.. attribute:: af
VRF address family configuration
**type**\: list of :py:class:`Af <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.GlobalAf.Afs.Af>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.af = YList()
self.af.parent = self
self.af.name = 'af'
class Af(object):
"""
VRF address family configuration
.. attribute:: af_name <key>
Address family
**type**\: :py:class:`VrfAddressFamilyEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.VrfAddressFamilyEnum>`
.. attribute:: saf_name <key>
Sub\-Address family
**type**\: :py:class:`VrfSubAddressFamilyEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.VrfSubAddressFamilyEnum>`
.. attribute:: topology_name <key>
Topology name
**type**\: str
**length:** 0..244
.. attribute:: create
VRF configuration for a particular address family
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.af_name = None
self.saf_name = None
self.topology_name = None
self.create = None
@property
def _common_path(self):
if self.af_name is None:
raise YPYModelError('Key property af_name is None')
if self.saf_name is None:
raise YPYModelError('Key property saf_name is None')
if self.topology_name is None:
raise YPYModelError('Key property topology_name is None')
return '/Cisco-IOS-XR-infra-rsi-cfg:global-af/Cisco-IOS-XR-infra-rsi-cfg:afs/Cisco-IOS-XR-infra-rsi-cfg:af[Cisco-IOS-XR-infra-rsi-cfg:af-name = ' + str(self.af_name) + '][Cisco-IOS-XR-infra-rsi-cfg:saf-name = ' + str(self.saf_name) + '][Cisco-IOS-XR-infra-rsi-cfg:topology-name = ' + str(self.topology_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.af_name is not None:
return True
if self.saf_name is not None:
return True
if self.topology_name is not None:
return True
if self.create is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['GlobalAf.Afs.Af']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-rsi-cfg:global-af/Cisco-IOS-XR-infra-rsi-cfg:afs'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.af is not None:
for child_ref in self.af:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['GlobalAf.Afs']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-rsi-cfg:global-af'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.afs is not None and self.afs._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['GlobalAf']['meta_info']
class Srlg(object):
"""
srlg
.. attribute:: enable
Enable SRLG
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: groups
Set of groups configured with SRLG
**type**\: :py:class:`Groups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Groups>`
.. attribute:: inherit_nodes
Set of inherit nodes configured with SRLG
**type**\: :py:class:`InheritNodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.InheritNodes>`
.. attribute:: interfaces
Set of interfaces configured with SRLG
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Interfaces>`
.. attribute:: srlg_names
Set of SRLG name configuration
**type**\: :py:class:`SrlgNames <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.SrlgNames>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.enable = None
self.groups = Srlg.Groups()
self.groups.parent = self
self.inherit_nodes = Srlg.InheritNodes()
self.inherit_nodes.parent = self
self.interfaces = Srlg.Interfaces()
self.interfaces.parent = self
self.srlg_names = Srlg.SrlgNames()
self.srlg_names.parent = self
class Interfaces(object):
"""
Set of interfaces configured with SRLG
.. attribute:: interface
Interface configurations
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Interfaces.Interface>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.interface = YList()
self.interface.parent = self
self.interface.name = 'interface'
class Interface(object):
"""
Interface configurations
.. attribute:: interface_name <key>
Interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: enable
Enable SRLG interface
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: include_optical
Include optical configuration for an interface
**type**\: :py:class:`IncludeOptical <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Interfaces.Interface.IncludeOptical>`
.. attribute:: interface_group
Group configuration for an interface
**type**\: :py:class:`InterfaceGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Interfaces.Interface.InterfaceGroup>`
.. attribute:: interface_srlg_names
SRLG Name configuration for an interface
**type**\: :py:class:`InterfaceSrlgNames <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Interfaces.Interface.InterfaceSrlgNames>`
.. attribute:: values
SRLG Value configuration for an interface
**type**\: :py:class:`Values <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Interfaces.Interface.Values>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.interface_name = None
self.enable = None
self.include_optical = Srlg.Interfaces.Interface.IncludeOptical()
self.include_optical.parent = self
self.interface_group = Srlg.Interfaces.Interface.InterfaceGroup()
self.interface_group.parent = self
self.interface_srlg_names = Srlg.Interfaces.Interface.InterfaceSrlgNames()
self.interface_srlg_names.parent = self
self.values = Srlg.Interfaces.Interface.Values()
self.values.parent = self
class IncludeOptical(object):
"""
Include optical configuration for an interface
.. attribute:: enable
Enable SRLG interface include optical
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: priority
Priority for optical domain values
**type**\: :py:class:`SrlgPriorityEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.SrlgPriorityEnum>`
**default value**\: default
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.enable = None
self.priority = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:include-optical'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.priority is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Interfaces.Interface.IncludeOptical']['meta_info']
class InterfaceGroup(object):
"""
Group configuration for an interface
.. attribute:: enable
Enable SRLG interface group submode
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: group_names
Set of group name under an interface
**type**\: :py:class:`GroupNames <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Interfaces.Interface.InterfaceGroup.GroupNames>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.enable = None
self.group_names = Srlg.Interfaces.Interface.InterfaceGroup.GroupNames()
self.group_names.parent = self
class GroupNames(object):
"""
Set of group name under an interface
.. attribute:: group_name
Group name included under interface
**type**\: list of :py:class:`GroupName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Interfaces.Interface.InterfaceGroup.GroupNames.GroupName>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.group_name = YList()
self.group_name.parent = self
self.group_name.name = 'group_name'
class GroupName(object):
"""
Group name included under interface
.. attribute:: group_name_index <key>
Group name index
**type**\: int
**range:** 0..4294967295
.. attribute:: group_name
Group name
**type**\: str
**mandatory**\: True
.. attribute:: srlg_priority
SRLG priority
**type**\: :py:class:`SrlgPriorityEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.SrlgPriorityEnum>`
**default value**\: default
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.group_name_index = None
self.group_name = None
self.srlg_priority = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.group_name_index is None:
raise YPYModelError('Key property group_name_index is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:group-name[Cisco-IOS-XR-infra-rsi-cfg:group-name-index = ' + str(self.group_name_index) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.group_name_index is not None:
return True
if self.group_name is not None:
return True
if self.srlg_priority is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Interfaces.Interface.InterfaceGroup.GroupNames.GroupName']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:group-names'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.group_name is not None:
for child_ref in self.group_name:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Interfaces.Interface.InterfaceGroup.GroupNames']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:interface-group'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.group_names is not None and self.group_names._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Interfaces.Interface.InterfaceGroup']['meta_info']
class Values(object):
"""
SRLG Value configuration for an interface
.. attribute:: value
SRLG value data
**type**\: list of :py:class:`Value <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Interfaces.Interface.Values.Value>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.value = YList()
self.value.parent = self
self.value.name = 'value'
class Value(object):
"""
SRLG value data
.. attribute:: srlg_index <key>
SRLG index
**type**\: int
**range:** 1..65535
.. attribute:: srlg_priority
SRLG priority
**type**\: :py:class:`SrlgPriorityEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.SrlgPriorityEnum>`
**default value**\: default
.. attribute:: srlg_value
SRLG value
**type**\: int
**range:** 0..4294967295
**mandatory**\: True
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.srlg_index = None
self.srlg_priority = None
self.srlg_value = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.srlg_index is None:
raise YPYModelError('Key property srlg_index is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:value[Cisco-IOS-XR-infra-rsi-cfg:srlg-index = ' + str(self.srlg_index) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.srlg_index is not None:
return True
if self.srlg_priority is not None:
return True
if self.srlg_value is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Interfaces.Interface.Values.Value']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:values'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.value is not None:
for child_ref in self.value:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Interfaces.Interface.Values']['meta_info']
class InterfaceSrlgNames(object):
"""
SRLG Name configuration for an interface
.. attribute:: interface_srlg_name
SRLG name data
**type**\: list of :py:class:`InterfaceSrlgName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Interfaces.Interface.InterfaceSrlgNames.InterfaceSrlgName>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.interface_srlg_name = YList()
self.interface_srlg_name.parent = self
self.interface_srlg_name.name = 'interface_srlg_name'
class InterfaceSrlgName(object):
"""
SRLG name data
.. attribute:: srlg_name <key>
SRLG name
**type**\: str
**length:** 0..64
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.srlg_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.srlg_name is None:
raise YPYModelError('Key property srlg_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:interface-srlg-name[Cisco-IOS-XR-infra-rsi-cfg:srlg-name = ' + str(self.srlg_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.srlg_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Interfaces.Interface.InterfaceSrlgNames.InterfaceSrlgName']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:interface-srlg-names'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_srlg_name is not None:
for child_ref in self.interface_srlg_name:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Interfaces.Interface.InterfaceSrlgNames']['meta_info']
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return '/Cisco-IOS-XR-infra-rsi-cfg:srlg/Cisco-IOS-XR-infra-rsi-cfg:interfaces/Cisco-IOS-XR-infra-rsi-cfg:interface[Cisco-IOS-XR-infra-rsi-cfg:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.enable is not None:
return True
if self.include_optical is not None and self.include_optical._has_data():
return True
if self.interface_group is not None and self.interface_group._has_data():
return True
if self.interface_srlg_names is not None and self.interface_srlg_names._has_data():
return True
if self.values is not None and self.values._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Interfaces.Interface']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-rsi-cfg:srlg/Cisco-IOS-XR-infra-rsi-cfg:interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
for child_ref in self.interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Interfaces']['meta_info']
class SrlgNames(object):
"""
Set of SRLG name configuration
.. attribute:: srlg_name
SRLG name configuration
**type**\: list of :py:class:`SrlgName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.SrlgNames.SrlgName>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.srlg_name = YList()
self.srlg_name.parent = self
self.srlg_name.name = 'srlg_name'
class SrlgName(object):
"""
SRLG name configuration
.. attribute:: srlg_name <key>
SRLG name
**type**\: str
**length:** 0..64
.. attribute:: srlg_value
SRLG value
**type**\: int
**range:** 0..4294967295
**mandatory**\: True
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.srlg_name = None
self.srlg_value = None
@property
def _common_path(self):
if self.srlg_name is None:
raise YPYModelError('Key property srlg_name is None')
return '/Cisco-IOS-XR-infra-rsi-cfg:srlg/Cisco-IOS-XR-infra-rsi-cfg:srlg-names/Cisco-IOS-XR-infra-rsi-cfg:srlg-name[Cisco-IOS-XR-infra-rsi-cfg:srlg-name = ' + str(self.srlg_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.srlg_name is not None:
return True
if self.srlg_value is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.SrlgNames.SrlgName']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-rsi-cfg:srlg/Cisco-IOS-XR-infra-rsi-cfg:srlg-names'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.srlg_name is not None:
for child_ref in self.srlg_name:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.SrlgNames']['meta_info']
class Groups(object):
"""
Set of groups configured with SRLG
.. attribute:: group
Group configurations
**type**\: list of :py:class:`Group <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Groups.Group>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.group = YList()
self.group.parent = self
self.group.name = 'group'
class Group(object):
"""
Group configurations
.. attribute:: group_name <key>
Group name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: enable
Enable SRLG group
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: group_values
Set of SRLG values configured under a group
**type**\: :py:class:`GroupValues <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Groups.Group.GroupValues>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.group_name = None
self.enable = None
self.group_values = Srlg.Groups.Group.GroupValues()
self.group_values.parent = self
class GroupValues(object):
"""
Set of SRLG values configured under a group
.. attribute:: group_value
Group SRLG values with attribute
**type**\: list of :py:class:`GroupValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.Groups.Group.GroupValues.GroupValue>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.group_value = YList()
self.group_value.parent = self
self.group_value.name = 'group_value'
class GroupValue(object):
"""
Group SRLG values with attribute
.. attribute:: srlg_index <key>
SRLG index
**type**\: int
**range:** 1..65535
.. attribute:: srlg_priority
SRLG priority
**type**\: :py:class:`SrlgPriorityEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.SrlgPriorityEnum>`
**default value**\: default
.. attribute:: srlg_value
SRLG value
**type**\: int
**range:** 0..4294967295
**mandatory**\: True
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.srlg_index = None
self.srlg_priority = None
self.srlg_value = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.srlg_index is None:
raise YPYModelError('Key property srlg_index is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:group-value[Cisco-IOS-XR-infra-rsi-cfg:srlg-index = ' + str(self.srlg_index) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.srlg_index is not None:
return True
if self.srlg_priority is not None:
return True
if self.srlg_value is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Groups.Group.GroupValues.GroupValue']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:group-values'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.group_value is not None:
for child_ref in self.group_value:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Groups.Group.GroupValues']['meta_info']
@property
def _common_path(self):
if self.group_name is None:
raise YPYModelError('Key property group_name is None')
return '/Cisco-IOS-XR-infra-rsi-cfg:srlg/Cisco-IOS-XR-infra-rsi-cfg:groups/Cisco-IOS-XR-infra-rsi-cfg:group[Cisco-IOS-XR-infra-rsi-cfg:group-name = ' + str(self.group_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.group_name is not None:
return True
if self.enable is not None:
return True
if self.group_values is not None and self.group_values._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Groups.Group']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-rsi-cfg:srlg/Cisco-IOS-XR-infra-rsi-cfg:groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.group is not None:
for child_ref in self.group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.Groups']['meta_info']
class InheritNodes(object):
"""
Set of inherit nodes configured with SRLG
.. attribute:: inherit_node
Inherit node configurations
**type**\: list of :py:class:`InheritNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.InheritNodes.InheritNode>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.inherit_node = YList()
self.inherit_node.parent = self
self.inherit_node.name = 'inherit_node'
class InheritNode(object):
"""
Inherit node configurations
.. attribute:: inherit_node_name <key>
The inherit node name
**type**\: str
**pattern:** ((([a\-zA\-Z0\-9\_]\*\\d+)\|(\\\*))/){2}(([a\-zA\-Z0\-9\_]\*\\d+)\|(\\\*))
.. attribute:: enable
Enable SRLG inherit node
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: inherit_node_values
Set of SRLG values configured under an inherit node
**type**\: :py:class:`InheritNodeValues <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.InheritNodes.InheritNode.InheritNodeValues>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.inherit_node_name = None
self.enable = None
self.inherit_node_values = Srlg.InheritNodes.InheritNode.InheritNodeValues()
self.inherit_node_values.parent = self
class InheritNodeValues(object):
"""
Set of SRLG values configured under an inherit
node
.. attribute:: inherit_node_value
Inherit node SRLG value with attributes
**type**\: list of :py:class:`InheritNodeValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.Srlg.InheritNodes.InheritNode.InheritNodeValues.InheritNodeValue>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.inherit_node_value = YList()
self.inherit_node_value.parent = self
self.inherit_node_value.name = 'inherit_node_value'
class InheritNodeValue(object):
"""
Inherit node SRLG value with attributes
.. attribute:: srlg_index <key>
SRLG index
**type**\: int
**range:** 1..65535
.. attribute:: srlg_priority
SRLG priority
**type**\: :py:class:`SrlgPriorityEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.SrlgPriorityEnum>`
**default value**\: default
.. attribute:: srlg_value
SRLG value
**type**\: int
**range:** 0..4294967295
**mandatory**\: True
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.srlg_index = None
self.srlg_priority = None
self.srlg_value = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.srlg_index is None:
raise YPYModelError('Key property srlg_index is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:inherit-node-value[Cisco-IOS-XR-infra-rsi-cfg:srlg-index = ' + str(self.srlg_index) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.srlg_index is not None:
return True
if self.srlg_priority is not None:
return True
if self.srlg_value is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.InheritNodes.InheritNode.InheritNodeValues.InheritNodeValue']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:inherit-node-values'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.inherit_node_value is not None:
for child_ref in self.inherit_node_value:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.InheritNodes.InheritNode.InheritNodeValues']['meta_info']
@property
def _common_path(self):
if self.inherit_node_name is None:
raise YPYModelError('Key property inherit_node_name is None')
return '/Cisco-IOS-XR-infra-rsi-cfg:srlg/Cisco-IOS-XR-infra-rsi-cfg:inherit-nodes/Cisco-IOS-XR-infra-rsi-cfg:inherit-node[Cisco-IOS-XR-infra-rsi-cfg:inherit-node-name = ' + str(self.inherit_node_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.inherit_node_name is not None:
return True
if self.enable is not None:
return True
if self.inherit_node_values is not None and self.inherit_node_values._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.InheritNodes.InheritNode']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-rsi-cfg:srlg/Cisco-IOS-XR-infra-rsi-cfg:inherit-nodes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.inherit_node is not None:
for child_ref in self.inherit_node:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg.InheritNodes']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-rsi-cfg:srlg'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.groups is not None and self.groups._has_data():
return True
if self.inherit_nodes is not None and self.inherit_nodes._has_data():
return True
if self.interfaces is not None and self.interfaces._has_data():
return True
if self.srlg_names is not None and self.srlg_names._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['Srlg']['meta_info']
class VrfGroups(object):
"""
vrf groups
.. attribute:: vrf_group
VRF group configuration
**type**\: list of :py:class:`VrfGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.VrfGroups.VrfGroup>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.vrf_group = YList()
self.vrf_group.parent = self
self.vrf_group.name = 'vrf_group'
class VrfGroup(object):
"""
VRF group configuration
.. attribute:: vrf_group_name <key>
VRF group name
**type**\: str
**length:** 0..32
.. attribute:: enable
Enable VRF group
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vrfs
Set of VRFs configured under a VRF group
**type**\: :py:class:`Vrfs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.VrfGroups.VrfGroup.Vrfs>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.vrf_group_name = None
self.enable = None
self.vrfs = VrfGroups.VrfGroup.Vrfs()
self.vrfs.parent = self
class Vrfs(object):
"""
Set of VRFs configured under a VRF group
.. attribute:: vrf
VRF configuration
**type**\: list of :py:class:`Vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rsi_cfg.VrfGroups.VrfGroup.Vrfs.Vrf>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.vrf = YList()
self.vrf.parent = self
self.vrf.name = 'vrf'
class Vrf(object):
"""
VRF configuration
.. attribute:: vrf_name <key>
VRF name
**type**\: str
**length:** 0..32
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.vrf_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.vrf_name is None:
raise YPYModelError('Key property vrf_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:vrf[Cisco-IOS-XR-infra-rsi-cfg:vrf-name = ' + str(self.vrf_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['VrfGroups.VrfGroup.Vrfs.Vrf']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-infra-rsi-cfg:vrfs'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrf is not None:
for child_ref in self.vrf:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['VrfGroups.VrfGroup.Vrfs']['meta_info']
@property
def _common_path(self):
if self.vrf_group_name is None:
raise YPYModelError('Key property vrf_group_name is None')
return '/Cisco-IOS-XR-infra-rsi-cfg:vrf-groups/Cisco-IOS-XR-infra-rsi-cfg:vrf-group[Cisco-IOS-XR-infra-rsi-cfg:vrf-group-name = ' + str(self.vrf_group_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrf_group_name is not None:
return True
if self.enable is not None:
return True
if self.vrfs is not None and self.vrfs._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['VrfGroups.VrfGroup']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-rsi-cfg:vrf-groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrf_group is not None:
for child_ref in self.vrf_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['VrfGroups']['meta_info']
class SelectiveVrfDownload(object):
"""
selective vrf download
.. attribute:: disable
Disable selective VRF download
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-rsi-cfg'
_revision = '2015-07-30'
def __init__(self):
self.disable = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-rsi-cfg:selective-vrf-download'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.disable is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rsi_cfg as meta
return meta._meta_table['SelectiveVrfDownload']['meta_info']
| 37.966617
| 371
| 0.444393
| 11,974
| 128,517
| 4.548271
| 0.023718
| 0.048916
| 0.061145
| 0.050128
| 0.877197
| 0.85474
| 0.823121
| 0.802225
| 0.779181
| 0.763574
| 0
| 0.01389
| 0.47845
| 128,517
| 3,384
| 372
| 37.977837
| 0.798621
| 0.208292
| 0
| 0.799238
| 0
| 0.017154
| 0.118715
| 0.064007
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16709
| false
| 0
| 0.053367
| 0.006353
| 0.513977
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12c47e702ad012acb9a419fdb316d00879b4f724
| 1,083
|
py
|
Python
|
__main__.py
|
Fxcilities/one-line-dpy
|
469a616ed4ab31c1b3adf526a60149346fdac433
|
[
"MIT"
] | 2
|
2020-12-12T14:57:45.000Z
|
2021-08-24T03:54:45.000Z
|
__main__.py
|
Fxcilities/one-line-dpy
|
469a616ed4ab31c1b3adf526a60149346fdac433
|
[
"MIT"
] | null | null | null |
__main__.py
|
Fxcilities/one-line-dpy
|
469a616ed4ab31c1b3adf526a60149346fdac433
|
[
"MIT"
] | null | null | null |
import os; from dotenv import load_dotenv; load_dotenv(); import asyncio; import discord; from discord.ext import commands; bot = commands.Bot(command_prefix=">"); bot.remove_command('help'); exec('@bot.event\nasync def on_ready():\n print(f"Bot Ready as {bot.user} ({bot.user.id}) - Created by fxcilities#4728")'); exec ('@bot.command()\nasync def rocket(ctx):\n embed = discord.Embed(title="Rocket", description="This bot is made in one line. React to start something fun!", color=discord.Color.blue())\n msg = await ctx.send(embed=embed); await msg.add_reaction("🚀")\n def check(reaction, user):\n return user == ctx.author and str(reaction.emoji) == "🚀"\n reaction1, user1 = await bot.wait_for("reaction_add", check=check)\n await msg.edit(embed=discord.Embed(title="WEEEEEE 🔥🔥🚀", color=discord.Color.blurple(), description="Wow! You made it! Check out some other amazing bots such as [Essentials Bot](https://essentialsbot.xyz), [Bois Bot](https://bois-bot.tech), and [World Bot](https://top.gg/bot/700292147311542282)"))'); bot.run(os.getenv("token"))
| 541.5
| 1,082
| 0.713758
| 168
| 1,083
| 4.583333
| 0.535714
| 0.031169
| 0.044156
| 0.057143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.113573
| 1,083
| 1
| 1,083
| 1,083
| 0.771875
| 0
| 0
| 0
| 0
| 2
| 0.786704
| 0.207756
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
420bad47675c7d8c8695896d762524fe7209159b
| 164
|
py
|
Python
|
tsfel/classification/__init__.py
|
TSFDlib/TSFEL
|
a4c30acc93dd3717bf93b19e59c3dc927903caf2
|
[
"MIT"
] | 1
|
2020-08-02T03:26:32.000Z
|
2020-08-02T03:26:32.000Z
|
tsfel/classification/__init__.py
|
TSFDlib/TSFEL
|
a4c30acc93dd3717bf93b19e59c3dc927903caf2
|
[
"MIT"
] | null | null | null |
tsfel/classification/__init__.py
|
TSFDlib/TSFEL
|
a4c30acc93dd3717bf93b19e59c3dc927903caf2
|
[
"MIT"
] | null | null | null |
from TSFEL.tsfel.classification.supervised_learning import find_best_slclassifier
from TSFEL.tsfel.classification.hyperparameters_tunning import hyperparam_tunning
| 54.666667
| 81
| 0.914634
| 19
| 164
| 7.631579
| 0.631579
| 0.124138
| 0.193103
| 0.386207
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 164
| 2
| 82
| 82
| 0.929487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
426c0589709796fbb313d1826d2d931f2356a1ce
| 14,142
|
py
|
Python
|
enCount/integration/experiments_mock.py
|
mstrazar/enCount
|
dcff565ce96afe37aa8a41995637d00cce02360d
|
[
"MIT"
] | null | null | null |
enCount/integration/experiments_mock.py
|
mstrazar/enCount
|
dcff565ce96afe37aa8a41995637d00cce02360d
|
[
"MIT"
] | null | null | null |
enCount/integration/experiments_mock.py
|
mstrazar/enCount
|
dcff565ce96afe37aa8a41995637d00cce02360d
|
[
"MIT"
] | null | null | null |
# Mock return of URL fetch for ID
def get_online_list(sample_name):
return {sample_name: [{'Controlled by': '/files/ENCFF667EXM/', 'Experiment target': 'TARDBP-human', 'Library size range': '>200', 'Library made from': 'polyadenylated mRNA', 'Biosample life stage': 'adult', 'Audit INTERNAL_ACTION': 'inconsistent assay_term_id, NTR assay', 'Platform': 'HiSeq 2000', 'Biosample term id': 'EFO:0002067', 'Experiment accession': 'ENCSR134JRE', 'Size': '2097429588', 'Assay': 'shRNA knockdown followed by RNA-seq', 'Library lysis method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library fragmentation method': 'chemical (Illumina TruSeq)', 'Library crosslinking method': '', 'Audit ERROR': '', 'Paired with': 'ENCFF513YBI', 'Run type': 'paired-ended', 'Biosample treatments': '', 'Project': 'ENCODE', 'Technical replicate': '1', 'Audit NOT_COMPLIANT': '', 'Antibody accession': '', 'Biosample sex': 'female', 'Biosample synchronization stage': '', 'Library extraction method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library depleted in': '', 'Derived from': '', 'Mapped read length': '', 'Biosample organism': 'Homo sapiens', 'RBNS protein concentration': '', 'Experiment date released': '2014-10-16', 'Audit WARNING': 'missing flowcell_details', 'Biological replicate(s)': '2', 'Lab': 'Brenton Graveley, UConn', 'Output type': 'reads', 'Biosample term name': 'K562', 'md5sum': 'fa266ef0de969f7a65371dcecee97718', 'File accession': 'ENCFF239VZF', 'Biosample Age': '53 year', 'Read length': '100', 'Biosample type': 'immortalized cell line', 'File Status': 'released', 'Biosample subcellular fraction term name': '', 'File download URL': 'https://www.encodeproject.org/files/ENCFF239VZF/@@download/ENCFF239VZF.fastq.gz', 'Paired end': '2', 'File format': 'fastq', 'Assembly': '', 'Biosample phase': ''}, {'Controlled by': '', 'Experiment target': 'Non-specific target control-human', 'Library size range': '>200', 'Library made from': 'polyadenylated mRNA', 'Biosample life stage': 'adult', 'Audit INTERNAL_ACTION': 'inconsistent assay_term_id, NTR assay', 'Platform': 'HiSeq 2000', 'Biosample term id': 'EFO:0002067', 'Experiment accession': 'ENCSR129RWD', 'Size': '2457367686', 'Assay': 'shRNA knockdown followed by RNA-seq', 'Library lysis method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library fragmentation method': 'chemical (Illumina TruSeq)', 'Library crosslinking method': '', 'Audit ERROR': '', 'Paired with': 'ENCFF891EGO', 'Run type': 'paired-ended', 'Biosample treatments': '', 'Project': 'ENCODE', 'Technical replicate': '1', 'Audit NOT_COMPLIANT': '', 'Antibody accession': '', 'Biosample sex': 'female', 'Biosample synchronization stage': '', 'Library extraction method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library depleted in': '', 'Derived from': '', 'Mapped read length': '', 'Biosample organism': 'Homo sapiens', 'RBNS protein concentration': '', 'Experiment date released': '2014-10-16', 'Audit WARNING': 'missing flowcell_details', 'Biological replicate(s)': '2', 'Lab': 'Brenton Graveley, UConn', 'Output type': 'reads', 'Biosample term name': 'K562', 'md5sum': 'd88f780a73d5d893cc605747593f3d45', 'File accession': 'ENCFF667EXM', 'Biosample Age': '53 year', 'Read length': '100', 'Biosample type': 'immortalized cell line', 'File Status': 'released', 'Biosample subcellular fraction term name': '', 'File download URL': 'https://www.encodeproject.org/files/ENCFF667EXM/@@download/ENCFF667EXM.fastq.gz', 'Paired end': '2', 'File format': 'fastq', 'Assembly': '', 'Biosample phase': ''}, {'Controlled by': '/files/ENCFF569YVH/', 'Experiment target': 'TARDBP-human', 'Library size range': '>200', 'Library made from': 'polyadenylated mRNA', 'Biosample life stage': 'adult', 'Audit INTERNAL_ACTION': 'inconsistent assay_term_id, NTR assay', 'Platform': 'HiSeq 2000', 'Biosample term id': 'EFO:0002067', 'Experiment accession': 'ENCSR134JRE', 'Size': '2022894136', 'Assay': 'shRNA knockdown followed by RNA-seq', 'Library lysis method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library fragmentation method': 'chemical (Illumina TruSeq)', 'Library crosslinking method': '', 'Audit ERROR': '', 'Paired with': 'ENCFF624OCC', 'Run type': 'paired-ended', 'Biosample treatments': '', 'Project': 'ENCODE', 'Technical replicate': '1', 'Audit NOT_COMPLIANT': '', 'Antibody accession': '', 'Biosample sex': 'female', 'Biosample synchronization stage': '', 'Library extraction method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library depleted in': '', 'Derived from': '', 'Mapped read length': '', 'Biosample organism': 'Homo sapiens', 'RBNS protein concentration': '', 'Experiment date released': '2014-10-16', 'Audit WARNING': 'missing flowcell_details', 'Biological replicate(s)': '1', 'Lab': 'Brenton Graveley, UConn', 'Output type': 'reads', 'Biosample term name': 'K562', 'md5sum': '2cbc7a72d60147a5d3aa196a1b4fdfd6', 'File accession': 'ENCFF604UQO', 'Biosample Age': '53 year', 'Read length': '100', 'Biosample type': 'immortalized cell line', 'File Status': 'released', 'Biosample subcellular fraction term name': '', 'File download URL': 'https://www.encodeproject.org/files/ENCFF604UQO/@@download/ENCFF604UQO.fastq.gz', 'Paired end': '2', 'File format': 'fastq', 'Assembly': '', 'Biosample phase': ''}, {'Controlled by': '', 'Experiment target': 'Non-specific target control-human', 'Library size range': '>200', 'Library made from': 'polyadenylated mRNA', 'Biosample life stage': 'adult', 'Audit INTERNAL_ACTION': 'inconsistent assay_term_id, NTR assay', 'Platform': 'HiSeq 2000', 'Biosample term id': 'EFO:0002067', 'Experiment accession': 'ENCSR129RWD', 'Size': '2365716737', 'Assay': 'shRNA knockdown followed by RNA-seq', 'Library lysis method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library fragmentation method': 'chemical (Illumina TruSeq)', 'Library crosslinking method': '', 'Audit ERROR': '', 'Paired with': 'ENCFF726LTF', 'Run type': 'paired-ended', 'Biosample treatments': '', 'Project': 'ENCODE', 'Technical replicate': '1', 'Audit NOT_COMPLIANT': '', 'Antibody accession': '', 'Biosample sex': 'female', 'Biosample synchronization stage': '', 'Library extraction method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library depleted in': '', 'Derived from': '', 'Mapped read length': '', 'Biosample organism': 'Homo sapiens', 'RBNS protein concentration': '', 'Experiment date released': '2014-10-16', 'Audit WARNING': 'missing flowcell_details', 'Biological replicate(s)': '1', 'Lab': 'Brenton Graveley, UConn', 'Output type': 'reads', 'Biosample term name': 'K562', 'md5sum': 'f8b52d8de46ec0c27c9036b1863ce246', 'File accession': 'ENCFF569YVH', 'Biosample Age': '53 year', 'Read length': '100', 'Biosample type': 'immortalized cell line', 'File Status': 'released', 'Biosample subcellular fraction term name': '', 'File download URL': 'https://www.encodeproject.org/files/ENCFF569YVH/@@download/ENCFF569YVH.fastq.gz', 'Paired end': '2', 'File format': 'fastq', 'Assembly': '', 'Biosample phase': ''}, {'Controlled by': '/files/ENCFF891EGO/', 'Experiment target': 'TARDBP-human', 'Library size range': '>200', 'Library made from': 'polyadenylated mRNA', 'Biosample life stage': 'adult', 'Audit INTERNAL_ACTION': 'inconsistent assay_term_id, NTR assay', 'Platform': 'HiSeq 2000', 'Biosample term id': 'EFO:0002067', 'Experiment accession': 'ENCSR134JRE', 'Size': '2012751063', 'Assay': 'shRNA knockdown followed by RNA-seq', 'Library lysis method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library fragmentation method': 'chemical (Illumina TruSeq)', 'Library crosslinking method': '', 'Audit ERROR': '', 'Paired with': 'ENCFF239VZF', 'Run type': 'paired-ended', 'Biosample treatments': '', 'Project': 'ENCODE', 'Technical replicate': '1', 'Audit NOT_COMPLIANT': '', 'Antibody accession': '', 'Biosample sex': 'female', 'Biosample synchronization stage': '', 'Library extraction method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library depleted in': '', 'Derived from': '', 'Mapped read length': '', 'Biosample organism': 'Homo sapiens', 'RBNS protein concentration': '', 'Experiment date released': '2014-10-16', 'Audit WARNING': 'missing flowcell_details', 'Biological replicate(s)': '2', 'Lab': 'Brenton Graveley, UConn', 'Output type': 'reads', 'Biosample term name': 'K562', 'md5sum': 'd6e5a7a5295432e95198cf4c474a5122', 'File accession': 'ENCFF513YBI', 'Biosample Age': '53 year', 'Read length': '100', 'Biosample type': 'immortalized cell line', 'File Status': 'released', 'Biosample subcellular fraction term name': '', 'File download URL': 'https://www.encodeproject.org/files/ENCFF513YBI/@@download/ENCFF513YBI.fastq.gz', 'Paired end': '1', 'File format': 'fastq', 'Assembly': '', 'Biosample phase': ''}, {'Controlled by': '', 'Experiment target': 'Non-specific target control-human', 'Library size range': '>200', 'Library made from': 'polyadenylated mRNA', 'Biosample life stage': 'adult', 'Audit INTERNAL_ACTION': 'inconsistent assay_term_id, NTR assay', 'Platform': 'HiSeq 2000', 'Biosample term id': 'EFO:0002067', 'Experiment accession': 'ENCSR129RWD', 'Size': '2393221685', 'Assay': 'shRNA knockdown followed by RNA-seq', 'Library lysis method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library fragmentation method': 'chemical (Illumina TruSeq)', 'Library crosslinking method': '', 'Audit ERROR': '', 'Paired with': 'ENCFF667EXM', 'Run type': 'paired-ended', 'Biosample treatments': '', 'Project': 'ENCODE', 'Technical replicate': '1', 'Audit NOT_COMPLIANT': '', 'Antibody accession': '', 'Biosample sex': 'female', 'Biosample synchronization stage': '', 'Library extraction method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library depleted in': '', 'Derived from': '', 'Mapped read length': '', 'Biosample organism': 'Homo sapiens', 'RBNS protein concentration': '', 'Experiment date released': '2014-10-16', 'Audit WARNING': 'missing flowcell_details', 'Biological replicate(s)': '2', 'Lab': 'Brenton Graveley, UConn', 'Output type': 'reads', 'Biosample term name': 'K562', 'md5sum': '0ab5a31c32292d4ef03935837401e4d1', 'File accession': 'ENCFF891EGO', 'Biosample Age': '53 year', 'Read length': '100', 'Biosample type': 'immortalized cell line', 'File Status': 'released', 'Biosample subcellular fraction term name': '', 'File download URL': 'https://www.encodeproject.org/files/ENCFF891EGO/@@download/ENCFF891EGO.fastq.gz', 'Paired end': '1', 'File format': 'fastq', 'Assembly': '', 'Biosample phase': ''}, {'Controlled by': '/files/ENCFF726LTF/', 'Experiment target': 'TARDBP-human', 'Library size range': '>200', 'Library made from': 'polyadenylated mRNA', 'Biosample life stage': 'adult', 'Audit INTERNAL_ACTION': 'inconsistent assay_term_id, NTR assay', 'Platform': 'HiSeq 2000', 'Biosample term id': 'EFO:0002067', 'Experiment accession': 'ENCSR134JRE', 'Size': '1951450497', 'Assay': 'shRNA knockdown followed by RNA-seq', 'Library lysis method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library fragmentation method': 'chemical (Illumina TruSeq)', 'Library crosslinking method': '', 'Audit ERROR': '', 'Paired with': 'ENCFF604UQO', 'Run type': 'paired-ended', 'Biosample treatments': '', 'Project': 'ENCODE', 'Technical replicate': '1', 'Audit NOT_COMPLIANT': '', 'Antibody accession': '', 'Biosample sex': 'female', 'Biosample synchronization stage': '', 'Library extraction method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library depleted in': '', 'Derived from': '', 'Mapped read length': '', 'Biosample organism': 'Homo sapiens', 'RBNS protein concentration': '', 'Experiment date released': '2014-10-16', 'Audit WARNING': 'missing flowcell_details', 'Biological replicate(s)': '1', 'Lab': 'Brenton Graveley, UConn', 'Output type': 'reads', 'Biosample term name': 'K562', 'md5sum': '5d97fc02c972e5fdb9486625d6567523', 'File accession': 'ENCFF624OCC', 'Biosample Age': '53 year', 'Read length': '100', 'Biosample type': 'immortalized cell line', 'File Status': 'released', 'Biosample subcellular fraction term name': '', 'File download URL': 'https://www.encodeproject.org/files/ENCFF624OCC/@@download/ENCFF624OCC.fastq.gz', 'Paired end': '1', 'File format': 'fastq', 'Assembly': '', 'Biosample phase': ''}, {'Controlled by': '', 'Experiment target': 'Non-specific target control-human', 'Library size range': '>200', 'Library made from': 'polyadenylated mRNA', 'Biosample life stage': 'adult', 'Audit INTERNAL_ACTION': 'inconsistent assay_term_id, NTR assay', 'Platform': 'HiSeq 2000', 'Biosample term id': 'EFO:0002067', 'Experiment accession': 'ENCSR129RWD', 'Size': '2299772019', 'Assay': 'shRNA knockdown followed by RNA-seq', 'Library lysis method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library fragmentation method': 'chemical (Illumina TruSeq)', 'Library crosslinking method': '', 'Audit ERROR': '', 'Paired with': 'ENCFF569YVH', 'Run type': 'paired-ended', 'Biosample treatments': '', 'Project': 'ENCODE', 'Technical replicate': '1', 'Audit NOT_COMPLIANT': '', 'Antibody accession': '', 'Biosample sex': 'female', 'Biosample synchronization stage': '', 'Library extraction method': 'Maxwell 16 LEV simpleRNA Cells Kit (Promega cat#: AS1270)', 'Library depleted in': '', 'Derived from': '', 'Mapped read length': '', 'Biosample organism': 'Homo sapiens', 'RBNS protein concentration': '', 'Experiment date released': '2014-10-16', 'Audit WARNING': 'missing flowcell_details', 'Biological replicate(s)': '1', 'Lab': 'Brenton Graveley, UConn', 'Output type': 'reads', 'Biosample term name': 'K562', 'md5sum': 'bfebdf38bc093f701f09e0ba083a8ef0', 'File accession': 'ENCFF726LTF', 'Biosample Age': '53 year', 'Read length': '100', 'Biosample type': 'immortalized cell line', 'File Status': 'released', 'Biosample subcellular fraction term name': '', 'File download URL': 'https://www.encodeproject.org/files/ENCFF726LTF/@@download/ENCFF726LTF.fastq.gz', 'Paired end': '1', 'File format': 'fastq', 'Assembly': '', 'Biosample phase': ''}]}
| 4,714
| 14,074
| 0.706477
| 1,604
| 14,142
| 6.201372
| 0.104115
| 0.009651
| 0.024128
| 0.028953
| 0.901076
| 0.901076
| 0.901076
| 0.901076
| 0.901076
| 0.900573
| 0
| 0.058936
| 0.108542
| 14,142
| 3
| 14,074
| 4,714
| 0.730071
| 0.002192
| 0
| 0
| 0
| 0
| 0.776754
| 0.018143
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 13
|
42b0f11116931d50a7ec6c70ea5951ccc01c0a48
| 52
|
py
|
Python
|
tests/test_canvenom.py
|
robotpy/robotpy-playingwithfusion
|
1afa00bc88e50f2c3c25ac1fc92ce40e70d41305
|
[
"Apache-2.0"
] | 1
|
2021-03-21T11:16:52.000Z
|
2021-03-21T11:16:52.000Z
|
tests/test_canvenom.py
|
robotpy/robotpy-playingwithfusion
|
1afa00bc88e50f2c3c25ac1fc92ce40e70d41305
|
[
"Apache-2.0"
] | 4
|
2021-03-20T23:29:49.000Z
|
2022-03-04T21:28:31.000Z
|
tests/test_canvenom.py
|
robotpy/robotpy-playingwithfusion
|
1afa00bc88e50f2c3c25ac1fc92ce40e70d41305
|
[
"Apache-2.0"
] | null | null | null |
import playingwithfusion
def test_pwf():
pass
| 8.666667
| 24
| 0.730769
| 6
| 52
| 6.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211538
| 52
| 5
| 25
| 10.4
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
c4176a387685389413ae4cecf5901b7594e1c2bb
| 2,072
|
py
|
Python
|
akdigitalpy/TwosComplement.py
|
AbhijeetJK/akdigitalpy
|
94b0739cff1b37c838b13857b8d08a5a7c5243f9
|
[
"MIT"
] | null | null | null |
akdigitalpy/TwosComplement.py
|
AbhijeetJK/akdigitalpy
|
94b0739cff1b37c838b13857b8d08a5a7c5243f9
|
[
"MIT"
] | null | null | null |
akdigitalpy/TwosComplement.py
|
AbhijeetJK/akdigitalpy
|
94b0739cff1b37c838b13857b8d08a5a7c5243f9
|
[
"MIT"
] | null | null | null |
def TwosComplementOf(n):
if isinstance(n,str) == True:
binintnum = int(n)
binmun = int("{0:08b}".format(binintnum))
strnum = str(binmun)
size = len(strnum)
idx = size -1
while idx >= 0:
if strnum[idx] == '1':
break
idx = idx - 1
if idx == -1:
return '1'+strnum
position = idx-1
while position >= 0:
if strnum[position] == '1':
strnum = list(strnum)
strnum[position] ='0'
strnum = ''.join(strnum)
else:
strnum = list(strnum)
strnum[position] = '1'
strnum = ''.join(strnum)
position = position-1
return strnum
else:
binmun = int("{0:08b}".format(n))
strnum = str(binmun)
size = len(strnum)
idx = size - 1
while idx >= 0:
if strnum[idx] == '1':
break
idx = idx - 1
if idx == -1:
return '1' + strnum
position = idx - 1
while position >= 0:
if strnum[position] == '1':
strnum = list(strnum)
strnum[position] = '0'
strnum = ''.join(strnum)
else:
strnum = list(strnum)
strnum[position] = '1'
strnum = ''.join(strnum)
position = position - 1
return strnum
| 35.724138
| 58
| 0.29971
| 144
| 2,072
| 4.3125
| 0.173611
| 0.225443
| 0.057971
| 0.135266
| 0.89211
| 0.830918
| 0.830918
| 0.830918
| 0.830918
| 0.830918
| 0
| 0.037688
| 0.61583
| 2,072
| 57
| 59
| 36.350877
| 0.742462
| 0
| 0
| 0.895833
| 0
| 0
| 0.011952
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020833
| false
| 0
| 0
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c4309663ebb58238547f6314f533c8e102b28a13
| 138
|
py
|
Python
|
src/glip/__init__.py
|
anibali/glip
|
50359cdab0064ce233f368039439f4ac1e39e0f9
|
[
"Apache-2.0"
] | null | null | null |
src/glip/__init__.py
|
anibali/glip
|
50359cdab0064ce233f368039439f4ac1e39e0f9
|
[
"Apache-2.0"
] | null | null | null |
src/glip/__init__.py
|
anibali/glip
|
50359cdab0064ce233f368039439f4ac1e39e0f9
|
[
"Apache-2.0"
] | null | null | null |
from glip.config import *
from glip.gl.context import *
from glip.gl.input import *
from glip.gl.objects import *
from glip.math import *
| 23
| 29
| 0.76087
| 23
| 138
| 4.565217
| 0.391304
| 0.380952
| 0.533333
| 0.457143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144928
| 138
| 5
| 30
| 27.6
| 0.889831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
671768bd470e73b0ee10cd4ef69156114c10a7b9
| 162
|
py
|
Python
|
hulusubs_dl/__init__.py
|
HacDan/Hulusubs_dl
|
e8b5faeb8197aaf897c06b35da34942155a94c16
|
[
"MIT"
] | 42
|
2015-09-29T06:07:17.000Z
|
2020-12-27T19:34:47.000Z
|
hulusubs_dl/__init__.py
|
HacDan/Hulusubs_dl
|
e8b5faeb8197aaf897c06b35da34942155a94c16
|
[
"MIT"
] | 19
|
2016-07-10T18:14:37.000Z
|
2020-12-29T07:33:12.000Z
|
hulusubs_dl/__init__.py
|
HacDan/Hulusubs_dl
|
e8b5faeb8197aaf897c06b35da34942155a94c16
|
[
"MIT"
] | 14
|
2016-07-06T02:56:16.000Z
|
2020-12-09T00:50:57.000Z
|
# #!/usr/bin/env python
# # -*- coding: utf-8 -*-
#
# from hulusubs_dl import cust_utils
# from hulusubs_dl import __version__
# from hulusubs_dl import hulu_api
| 23.142857
| 37
| 0.722222
| 24
| 162
| 4.5
| 0.666667
| 0.333333
| 0.388889
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007299
| 0.154321
| 162
| 6
| 38
| 27
| 0.781022
| 0.907407
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67481025eda9a1c51f39055430b7b416b714a56e
| 183
|
py
|
Python
|
sc2/constants.py
|
drakonnan1st/JackBot
|
345df784098cb9eb055b3901fe7455807c58a4e1
|
[
"MIT"
] | null | null | null |
sc2/constants.py
|
drakonnan1st/JackBot
|
345df784098cb9eb055b3901fe7455807c58a4e1
|
[
"MIT"
] | null | null | null |
sc2/constants.py
|
drakonnan1st/JackBot
|
345df784098cb9eb055b3901fe7455807c58a4e1
|
[
"MIT"
] | null | null | null |
"""Groups the constants from sc2"""
from .ids.ability_id import *
from .ids.buff_id import *
from .ids.effect_id import *
from .ids.unit_typeid import *
from .ids.upgrade_id import *
| 26.142857
| 35
| 0.748634
| 30
| 183
| 4.4
| 0.466667
| 0.265152
| 0.393939
| 0.340909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006329
| 0.136612
| 183
| 6
| 36
| 30.5
| 0.829114
| 0.15847
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
674a98c7857812beebf20317b2a821bd0a835b08
| 2,134
|
py
|
Python
|
tests/types/test_string_annotations.py
|
nyejon/strawberry
|
664fde292fe2186c3e33e8cae79964b866fa5822
|
[
"MIT"
] | 1
|
2021-05-26T18:31:11.000Z
|
2021-05-26T18:31:11.000Z
|
tests/types/test_string_annotations.py
|
nyejon/strawberry
|
664fde292fe2186c3e33e8cae79964b866fa5822
|
[
"MIT"
] | 43
|
2021-07-05T22:51:03.000Z
|
2022-03-29T10:44:58.000Z
|
tests/types/test_string_annotations.py
|
nyejon/strawberry
|
664fde292fe2186c3e33e8cae79964b866fa5822
|
[
"MIT"
] | null | null | null |
# type: ignore
from typing import List, Optional
import strawberry
def test_basic_types():
@strawberry.type
class Query:
name: "str"
age: "int"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields) == 2
assert definition.fields[0].graphql_name == "name"
assert definition.fields[0].type == str
assert definition.fields[1].graphql_name == "age"
assert definition.fields[1].type == int
def test_optional():
@strawberry.type
class Query:
name: "Optional[str]"
age: "Optional[int]"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields) == 2
assert definition.fields[0].graphql_name == "name"
assert definition.fields[0].type == str
assert definition.fields[0].is_optional
assert definition.fields[1].graphql_name == "age"
assert definition.fields[1].type == int
assert definition.fields[1].is_optional
def test_basic_list():
@strawberry.type
class Query:
names: "List[str]"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields) == 1
assert definition.fields[0].graphql_name == "names"
assert definition.fields[0].is_list
assert definition.fields[0].type is None
assert definition.fields[0].is_optional is False
assert definition.fields[0].child.type == str
assert definition.fields[0].child.is_optional is False
def test_list_of_types():
global User
@strawberry.type
class User:
name: str
@strawberry.type
class Query:
users: "List[User]"
definition = Query._type_definition
assert definition.name == "Query"
assert len(definition.fields) == 1
assert definition.fields[0].graphql_name == "users"
assert definition.fields[0].is_list
assert definition.fields[0].type is None
assert definition.fields[0].is_optional is False
assert definition.fields[0].child.type == User
assert definition.fields[0].child.is_optional is False
del User
| 24.25
| 58
| 0.683693
| 271
| 2,134
| 5.273063
| 0.132841
| 0.291113
| 0.338698
| 0.273618
| 0.779566
| 0.740378
| 0.730581
| 0.730581
| 0.730581
| 0.6676
| 0
| 0.015366
| 0.207123
| 2,134
| 87
| 59
| 24.528736
| 0.829196
| 0.005623
| 0
| 0.627119
| 0
| 0
| 0.044811
| 0
| 0
| 0
| 0
| 0
| 0.508475
| 1
| 0.067797
| false
| 0
| 0.033898
| 0
| 0.305085
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
67636e67b5e0674a32f8d82c33f6a254559aada6
| 27,278
|
py
|
Python
|
python/deprecated/analysis/plt_times.py
|
JimmyZhang12/predict-T
|
8ae818b0791104de20633ce91e6d633cda7445b3
|
[
"MIT"
] | 1
|
2020-05-25T00:33:59.000Z
|
2020-05-25T00:33:59.000Z
|
python/deprecated/analysis/plt_times.py
|
JimmyZhang12/predict-T
|
8ae818b0791104de20633ce91e6d633cda7445b3
|
[
"MIT"
] | 13
|
2020-01-30T22:34:13.000Z
|
2020-06-25T13:54:57.000Z
|
python/deprecated/analysis/plt_times.py
|
JimmyZhang12/predict-T
|
8ae818b0791104de20633ce91e6d633cda7445b3
|
[
"MIT"
] | 1
|
2020-10-13T05:58:58.000Z
|
2020-10-13T05:58:58.000Z
|
import pandas as pd
import glob
import numpy as np
import math
import sys
import matplotlib.pyplot as plt
import argparse
#------------------------------------------------------------------------------------------------
# Base Systems for Characterization:
# 4 Mechanism, [decor, sensor, uarch, signature]
# 7 Benchmarks, [dijkstra, fft, ffti, qsort, sha, toast, untoast]
# 3 PDN/CPU, [mobile, laptop, desktop]
#------------------------------------------------------------------------------------------------
mobile = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [31090,63448,59109,67372,20112,36405,52475],
"IdealSensor" : [33944,65493,62643,66770,20452,34114,53154],
"uArchEvent" : [31942,67464,63322,68689,25699,42591,56181],
"Signature" : [30337,65927,62660,67223,21057,37316,52310],
"T.a.S." : [1,1,1,1,1,1,1],
"InstPending" : [1,1,1,1,1,1,1]
}
laptop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [72642,69036,68759,58794,55161,78257,100496],
"IdealSensor" : [68085,56412,53436,63161,29677,53859,45715],
"uArchEvent" : [57103,53999,51468,64172,28010,49502,50900],
"Signature" : [53732,60792,57012,58778,41080,47613,61685],
"T.a.S." : [1,1,1,1,1,1,1],
"InstPending" : [1,1,1,1,1,1,1]
}
desktop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [57711,64126,62402,68575,51750,71038,97944],
"IdealSensor" : [61854,68579,64148,76647,56137,80525,110416],
"uArchEvent" : [61218,66537,63043,70047,56806,76683,105944],
"Signature" : [57071,65799,62865,68764,53216,74062,103314],
"T.a.S." : [1,1,1,1,1,1,1],
"InstPending" : [1,1,1,1,1,1,1]
}
speedup_mobile = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [baseline/test for test,baseline in zip(mobile["DecorOnly"],mobile["DecorOnly"])],
"IdealSensor" : [baseline/test for test,baseline in zip(mobile["IdealSensor"],mobile["DecorOnly"])],
"uArchEvent" : [baseline/test for test,baseline in zip(mobile["uArchEvent"],mobile["DecorOnly"])],
"Signature" : [baseline/test for test,baseline in zip(mobile["Signature"],mobile["DecorOnly"])],
"T.a.S." : [baseline/test for test,baseline in zip(mobile["T.a.S."],mobile["DecorOnly"])],
"InstPending" : [baseline/test for test,baseline in zip(mobile["InstPending"],mobile["DecorOnly"])]
}
speedup_laptop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [baseline/test for test,baseline in zip(laptop["DecorOnly"],laptop["DecorOnly"])],
"IdealSensor" : [baseline/test for test,baseline in zip(laptop["IdealSensor"],laptop["DecorOnly"])],
"uArchEvent" : [baseline/test for test,baseline in zip(laptop["uArchEvent"],laptop["DecorOnly"])],
"Signature" : [baseline/test for test,baseline in zip(laptop["Signature"],laptop["DecorOnly"])],
"T.a.S." : [baseline/test for test,baseline in zip(laptop["T.a.S."],laptop["DecorOnly"])],
"InstPending" : [baseline/test for test,baseline in zip(laptop["InstPending"],laptop["DecorOnly"])]
}
speedup_desktop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [baseline/test for test,baseline in zip(desktop["DecorOnly"],desktop["DecorOnly"])],
"IdealSensor" : [baseline/test for test,baseline in zip(desktop["IdealSensor"],desktop["DecorOnly"])],
"uArchEvent" : [baseline/test for test,baseline in zip(desktop["uArchEvent"],desktop["DecorOnly"])],
"Signature" : [baseline/test for test,baseline in zip(desktop["Signature"],desktop["DecorOnly"])],
"T.a.S." : [baseline/test for test,baseline in zip(desktop["T.a.S."],desktop["DecorOnly"])],
"InstPending" : [baseline/test for test,baseline in zip(desktop["InstPending"],desktop["DecorOnly"])]
}
data = [speedup_mobile, speedup_laptop, speedup_desktop]
name = ["Mobile", "Laptop", "Desktop"]
tick_labels = ["DecorOnly", "IdealSensor", "uArchEvent", "Signature"]
benchmarks = ["dijkstra","fft","ffti","qsort","sha","toast","untoast"]
fname = ["speedup_mobile.png", "speedup_laptop.png", "speedup_desktop.png"]
bounds = [[0.0,2.5,0.1],[0.0,2.5,0.1],[0.0,2.5,0.1]]
for k in range(len(data)):
df=[data[k]["DecorOnly"],data[k]["IdealSensor"],data[k]["uArchEvent"],data[k]["Signature"]]
pos = list(range(len(df)))
width = 0.125
fig, ax = plt.subplots(figsize=(10,5))
i=0
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="dijkstra", color="w", hatch="/"*1, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="fft", color="w", hatch="o"*2, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="ffti", color="w", hatch="X"*4, fill="False", linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="qsort", color="w", hatch="/"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="sha", color="w", hatch="-"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="toast", color="w", hatch='\\'*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="untoast", color="w", hatch="."*4, fill=True, linewidth=1, edgecolor="k")
ax.set_ylabel('Speedup (X)')
ax.set_title(name[k])
ax.set_xticks([p + 1.5 * width for p in pos])
ax.set_yticks(np.arange(bounds[k][0],bounds[k][1],bounds[k][2]))
ax.set_ylim(bounds[k][0],bounds[k][1])
ax.set_axisbelow(True)
ax.grid(zorder=0, color="#c4c4c4", linestyle="-", linewidth=1, axis="y")
ax.set_xticklabels(tick_labels)
#plt.legend(benchmarks, loc='upper left')
plt.legend(benchmarks, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.subplots_adjust(left=0.1, right=0.8, top=0.9, bottom=0.1)
plt.savefig(fname[k])
plt.show()
it = \
{
"names" : ["Mobile","Laptop","Desktop"],
"IdealSensor" : [sum(speedup_mobile["IdealSensor"])/len(speedup_mobile["IdealSensor"]), \
sum(speedup_laptop["IdealSensor"])/len(speedup_laptop["IdealSensor"]), \
sum(speedup_desktop["IdealSensor"])/len(speedup_desktop["IdealSensor"])],
"uArchEvent" : [sum(speedup_mobile["uArchEvent"])/len(speedup_mobile["uArchEvent"]), \
sum(speedup_laptop["uArchEvent"])/len(speedup_laptop["uArchEvent"]), \
sum(speedup_desktop["uArchEvent"])/len(speedup_desktop["uArchEvent"])],
"Signature" : [sum(speedup_mobile["Signature"])/len(speedup_mobile["Signature"]), \
sum(speedup_laptop["Signature"])/len(speedup_laptop["Signature"]), \
sum(speedup_desktop["Signature"])/len(speedup_desktop["Signature"])],
"T.a.S." : [sum(speedup_mobile["T.a.S."])/len(speedup_mobile["T.a.S."]), \
sum(speedup_laptop["T.a.S."])/len(speedup_laptop["T.a.S."]), \
sum(speedup_desktop["T.a.S."])/len(speedup_desktop["T.a.S."])],
"InstPending" : [sum(speedup_mobile["InstPending"])/len(speedup_mobile["InstPending"]), \
sum(speedup_laptop["InstPending"])/len(speedup_laptop["InstPending"]), \
sum(speedup_desktop["InstPending"])/len(speedup_desktop["InstPending"])],
}
name = ["Average Speedup Across Systems"]
tick_labels = ["IdealSensor", "uArchEvent", "Signature"]
benchmarks = ["Mobile","Latop","Desktop"]
fname = ["speedup_original.png"]
bounds = [[0,2,0.1,0]]
df=[it["IdealSensor"],it["uArchEvent"],it["Signature"]]
pos = list(range(len(df)))
width = 0.2
fig, ax = plt.subplots(figsize=(10,5))
i=0
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label=benchmarks[i], color="w", hatch="/"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label=benchmarks[i], color="w", hatch="\\"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label=benchmarks[i], color="w", hatch="X"*4, fill="True", linewidth=1, edgecolor="k")
ax.set_ylabel('Speedup (X)')
ax.set_title(name[0])
ax.set_xticks([p + 1.5 * width for p in pos])
ax.set_yticks(np.arange(bounds[0][0],bounds[0][1],bounds[0][2]))
ax.set_ylim(bounds[0][0],bounds[0][1])
ax.set_axisbelow(True)
ax.grid(zorder=0, color="#c4c4c4", linestyle="-", linewidth=1, axis="y")
b = ax.get_ygridlines()
b[bounds[0][3]].set_color('k')
ax.set_xticklabels(tick_labels)
#plt.legend(benchmarks, loc='upper left')
plt.legend(benchmarks, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.subplots_adjust(left=0.1, right=0.8, top=0.9, bottom=0.1)
plt.savefig(fname[0])
plt.show()
#------------------------------------------------------------------------------------------------
# System w/throttle on restore from DeCoR:
# 6 Mechanism, [decor, sensor, uarch, signature, T.a.S., InstPending]
# 7 Benchmarks, [dijkstra, fft, ffti, qsort, sha, toast, untoast]
# 3 PDN/CPU, [mobile, laptop, desktop]
#------------------------------------------------------------------------------------------------
mobile = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [28098,67612,62139,67636,20724,40895,52236],
"IdealSensor" : [28514,66632,63939,68171,19947,42651,54294],
"uArchEvent" : [32041,68891,65767,70439,23166,43036,56343],
"Signature" : [29793,66790,64205,67635,19823,41566,52882],
"T.a.S." : [40470,67396,63857,66647,25576,44067,63489],
"InstPending" : [32003,65364,60882,68886,22917,45615,54567]
}
laptop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [70445,65184,62909,62385,54590,62252,82334],
"IdealSensor" : [44243,56213,52974,63043,27914,49397,41887],
"uArchEvent" : [40433,53999,51468,64540,29108,50093,47674],
"Signature" : [63819,60527,57595,62340,43048,53177,61285],
"T.a.S." : [59462,53868,50729,63739,29195,46762,43347],
"InstPending" : [67375,60215,63580,64120,40961,53401,51883]
}
desktop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [60137,61940,63114,70465,54424,77832,105528],
"IdealSensor" : [61576,68604,64093,77183,56344,80881,111091],
"uArchEvent" : [61624,63859,63581,70383,54852,75369,107261],
"Signature" : [60594,67262,62996,72555,55553,77343,106184],
"T.a.S." : [60265,64140,63860,69893,54406,79301,108210],
"InstPending" : [60049,62074,63189,71477,55950,76971,108127]
}
speedup_mobile = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [baseline/test for test,baseline in zip(mobile["DecorOnly"],mobile["DecorOnly"])],
"IdealSensor" : [baseline/test for test,baseline in zip(mobile["IdealSensor"],mobile["DecorOnly"])],
"uArchEvent" : [baseline/test for test,baseline in zip(mobile["uArchEvent"],mobile["DecorOnly"])],
"Signature" : [baseline/test for test,baseline in zip(mobile["Signature"],mobile["DecorOnly"])],
"T.a.S." : [baseline/test for test,baseline in zip(mobile["T.a.S."],mobile["DecorOnly"])],
"InstPending" : [baseline/test for test,baseline in zip(mobile["InstPending"],mobile["DecorOnly"])]
}
speedup_laptop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [baseline/test for test,baseline in zip(laptop["DecorOnly"],laptop["DecorOnly"])],
"IdealSensor" : [baseline/test for test,baseline in zip(laptop["IdealSensor"],laptop["DecorOnly"])],
"uArchEvent" : [baseline/test for test,baseline in zip(laptop["uArchEvent"],laptop["DecorOnly"])],
"Signature" : [baseline/test for test,baseline in zip(laptop["Signature"],laptop["DecorOnly"])],
"T.a.S." : [baseline/test for test,baseline in zip(laptop["T.a.S."],laptop["DecorOnly"])],
"InstPending" : [baseline/test for test,baseline in zip(laptop["InstPending"],laptop["DecorOnly"])]
}
speedup_desktop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [baseline/test for test,baseline in zip(desktop["DecorOnly"],desktop["DecorOnly"])],
"IdealSensor" : [baseline/test for test,baseline in zip(desktop["IdealSensor"],desktop["DecorOnly"])],
"uArchEvent" : [baseline/test for test,baseline in zip(desktop["uArchEvent"],desktop["DecorOnly"])],
"Signature" : [baseline/test for test,baseline in zip(desktop["Signature"],desktop["DecorOnly"])],
"T.a.S." : [baseline/test for test,baseline in zip(desktop["T.a.S."],desktop["DecorOnly"])],
"InstPending" : [baseline/test for test,baseline in zip(desktop["InstPending"],desktop["DecorOnly"])]
}
data = [speedup_mobile, speedup_laptop, speedup_desktop]
name = ["Mobile Throttle on Restore", "Laptop Throttle on Restore", "Desktop Throttle on Restore"]
tick_labels = ["DecorOnly", "IdealSensor", "uArchEvent", "Signature", "T.a.S", "InstPending"]
benchmarks = ["dijkstra","fft","ffti","qsort","sha","toast","untoast"]
fname = ["speedup_mobile_tor.png", "speedup_laptop_tor.png", "speedup_desktop_tor.png"]
bounds = [[0.0,2.0,0.1],[0.0,2.0,0.1],[0.0,2.0,0.1]]
for k in range(len(data)):
df=[data[k]["DecorOnly"],data[k]["IdealSensor"],data[k]["uArchEvent"],data[k]["Signature"],data[k]["T.a.S."],data[k]["InstPending"]]
pos = list(range(len(df)))
width = 0.125
fig, ax = plt.subplots(figsize=(10,5))
i=0
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="dijkstra", color="w", hatch="/"*1, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="fft", color="w", hatch="o"*2, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="ffti", color="w", hatch="X"*4, fill="False", linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="qsort", color="w", hatch="/"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="sha", color="w", hatch="-"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="toast", color="w", hatch='\\'*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="untoast", color="w", hatch="."*4, fill=True, linewidth=1, edgecolor="k")
ax.set_ylabel('Speedup (X)')
ax.set_title(name[k])
ax.set_xticks([p + 1.5 * width for p in pos])
ax.set_yticks(np.arange(bounds[k][0],bounds[k][1],bounds[k][2]))
ax.set_ylim(bounds[k][0],bounds[k][1])
ax.set_axisbelow(True)
ax.grid(zorder=0, color="#c4c4c4", linestyle="-", linewidth=1, axis="y")
ax.set_xticklabels(tick_labels)
#plt.legend(benchmarks, loc='upper left')
plt.legend(benchmarks, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.subplots_adjust(left=0.1, right=0.8, top=0.9, bottom=0.1)
plt.savefig(fname[k])
plt.show()
it = \
{
"names" : ["Mobile","Laptop","Desktop"],
"IdealSensor" : [sum(speedup_mobile["IdealSensor"])/len(speedup_mobile["IdealSensor"]), \
sum(speedup_laptop["IdealSensor"])/len(speedup_laptop["IdealSensor"]), \
sum(speedup_desktop["IdealSensor"])/len(speedup_desktop["IdealSensor"])],
"uArchEvent" : [sum(speedup_mobile["uArchEvent"])/len(speedup_mobile["uArchEvent"]), \
sum(speedup_laptop["uArchEvent"])/len(speedup_laptop["uArchEvent"]), \
sum(speedup_desktop["uArchEvent"])/len(speedup_desktop["uArchEvent"])],
"Signature" : [sum(speedup_mobile["Signature"])/len(speedup_mobile["Signature"]), \
sum(speedup_laptop["Signature"])/len(speedup_laptop["Signature"]), \
sum(speedup_desktop["Signature"])/len(speedup_desktop["Signature"])],
"T.a.S." : [sum(speedup_mobile["T.a.S."])/len(speedup_mobile["T.a.S."]), \
sum(speedup_laptop["T.a.S."])/len(speedup_laptop["T.a.S."]), \
sum(speedup_desktop["T.a.S."])/len(speedup_desktop["T.a.S."])],
"InstPending" : [sum(speedup_mobile["InstPending"])/len(speedup_mobile["InstPending"]), \
sum(speedup_laptop["InstPending"])/len(speedup_laptop["InstPending"]), \
sum(speedup_desktop["InstPending"])/len(speedup_desktop["InstPending"])],
}
name = ["Average Speedup Across Systems"]
tick_labels = ["IdealSensor", "uArchEvent", "Signature", "T.a.S.", "InstPending"]
benchmarks = ["Mobile","Latop","Desktop"]
fname = ["speedup_constrained.png"]
bounds = [[0,2,0.1,0]]
df=[it["IdealSensor"],it["uArchEvent"],it["Signature"],it["T.a.S."],it["InstPending"]]
pos = list(range(len(df)))
width = 0.2
fig, ax = plt.subplots(figsize=(10,5))
i=0
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label=benchmarks[i], color="w", hatch="/"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label=benchmarks[i], color="w", hatch="\\"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label=benchmarks[i], color="w", hatch="X"*4, fill="True", linewidth=1, edgecolor="k")
ax.set_ylabel('Speedup (X)')
ax.set_title(name[0])
ax.set_xticks([p + 1.5 * width for p in pos])
ax.set_yticks(np.arange(bounds[0][0],bounds[0][1],bounds[0][2]))
ax.set_ylim(bounds[0][0],bounds[0][1])
ax.set_axisbelow(True)
ax.grid(zorder=0, color="#c4c4c4", linestyle="-", linewidth=1, axis="y")
b = ax.get_ygridlines()
b[bounds[0][3]].set_color('k')
ax.set_xticklabels(tick_labels)
#plt.legend(benchmarks, loc='upper left')
plt.legend(benchmarks, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.subplots_adjust(left=0.1, right=0.8, top=0.9, bottom=0.1)
plt.savefig(fname[0])
plt.show()
#------------------------------------------------------------------------------------------------
# System w/throttle on restore from DeCoR and Harvard PDNs for each
# 6 Mechanism, [decor, sensor, uarch, signature, T.a.S., InstPending]
# 7 Benchmarks, [dijkstra, fft, ffti, qsort, sha, toast, untoast]
# 3 PDN/CPU, [mobile, laptop, desktop]
#------------------------------------------------------------------------------------------------
mobile = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [32846,58489,56254,68327,19005,33934,50720],
"IdealSensor" : [42124,58621,56386,69668,19580,35955,55040],
"uArchEvent" : [38324,59028,56121,68284,19163,37111,51255],
"Signature" : [34422,59126,55847,68771,19229,35038,50343],
"T.a.S." : [37677,58643,56266,70464,25292,41389,61009],
"InstPending" : [33424,58625,56390,70242,23759,40431,52858]
}
laptop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [14453,53604,50600,61527,17487,28794,36208],
"IdealSensor" : [31336,55554,53482,62805,25856,40182,43012],
"uArchEvent" : [23599,53906,50600,63586,18579,33920,36303],
"Signature" : [21452,53593,49958,61492,17532,28485,37635],
"T.a.S." : [19093,53746,50851,64126,20117,33115,42520],
"InstPending" : [18413,53608,50727,59645,24249,37304,42428]
}
desktop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [15520,57650,55070,61485,21185,34999,37827],
"IdealSensor" : [22163,57918,55622,62625,22357,36333,41556],
"uArchEvent" : [21050,56276,53668,67881,18891,34582,38341],
"Signature" : [27095,58104,54685,61782,21687,34974,36409],
"T.a.S." : [14870,56012,53404,68220,21883,37333,42417],
"InstPending" : [13947,57122,53535,63290,23935,37471,42282]
}
speedup_mobile = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [baseline/test for test,baseline in zip(mobile["DecorOnly"],mobile["DecorOnly"])],
"IdealSensor" : [baseline/test for test,baseline in zip(mobile["IdealSensor"],mobile["DecorOnly"])],
"uArchEvent" : [baseline/test for test,baseline in zip(mobile["uArchEvent"],mobile["DecorOnly"])],
"Signature" : [baseline/test for test,baseline in zip(mobile["Signature"],mobile["DecorOnly"])],
"T.a.S." : [baseline/test for test,baseline in zip(mobile["T.a.S."],mobile["DecorOnly"])],
"InstPending" : [baseline/test for test,baseline in zip(mobile["InstPending"],mobile["DecorOnly"])]
}
speedup_laptop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [baseline/test for test,baseline in zip(laptop["DecorOnly"],laptop["DecorOnly"])],
"IdealSensor" : [baseline/test for test,baseline in zip(laptop["IdealSensor"],laptop["DecorOnly"])],
"uArchEvent" : [baseline/test for test,baseline in zip(laptop["uArchEvent"],laptop["DecorOnly"])],
"Signature" : [baseline/test for test,baseline in zip(laptop["Signature"],laptop["DecorOnly"])],
"T.a.S." : [baseline/test for test,baseline in zip(laptop["T.a.S."],laptop["DecorOnly"])],
"InstPending" : [baseline/test for test,baseline in zip(laptop["InstPending"],laptop["DecorOnly"])]
}
speedup_desktop = \
{
"names" : ["dijkstra","fft","ffti","qsort","sha","toast","untoast"],
"DecorOnly" : [baseline/test for test,baseline in zip(desktop["DecorOnly"],desktop["DecorOnly"])],
"IdealSensor" : [baseline/test for test,baseline in zip(desktop["IdealSensor"],desktop["DecorOnly"])],
"uArchEvent" : [baseline/test for test,baseline in zip(desktop["uArchEvent"],desktop["DecorOnly"])],
"Signature" : [baseline/test for test,baseline in zip(desktop["Signature"],desktop["DecorOnly"])],
"T.a.S." : [baseline/test for test,baseline in zip(desktop["T.a.S."],desktop["DecorOnly"])],
"InstPending" : [baseline/test for test,baseline in zip(desktop["InstPending"],desktop["DecorOnly"])]
}
data = [speedup_mobile, speedup_laptop, speedup_desktop]
name = ["Mobile Throttle on Restore and Harvard PDN", "Laptop Throttle on Restore and Harvard PDN", "Desktop Throttle on Restore and Harvard PDN"]
tick_labels = ["DecorOnly", "IdealSensor", "uArchEvent", "Signature", "T.a.S", "InstPending"]
benchmarks = ["dijkstra","fft","ffti","qsort","sha","toast","untoast"]
fname = ["speedup_mobile_harvard_tor.png", "speedup_laptop_harvard_tor.png", "speedup_desktop_harvard_tor.png"]
bounds = [[0.0,1.5,0.1],[0.0,1.5,0.1],[0.0,1.5,0.1]]
for k in range(len(data)):
df=[data[k]["DecorOnly"],data[k]["IdealSensor"],data[k]["uArchEvent"],data[k]["Signature"],data[k]["T.a.S."],data[k]["InstPending"]]
pos = list(range(len(df)))
width = 0.125
fig, ax = plt.subplots(figsize=(10,5))
i=0
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="dijkstra", color="w", hatch="/"*1, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="fft", color="w", hatch="o"*2, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="ffti", color="w", hatch="X"*4, fill="False", linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="qsort", color="w", hatch="/"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="sha", color="w", hatch="-"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="toast", color="w", hatch='\\'*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label="untoast", color="w", hatch="."*4, fill=True, linewidth=1, edgecolor="k")
ax.set_ylabel('Speedup (X)')
ax.set_title(name[k])
ax.set_xticks([p + 1.5 * width for p in pos])
ax.set_yticks(np.arange(bounds[k][0],bounds[k][1],bounds[k][2]))
ax.set_ylim(bounds[k][0],bounds[k][1])
ax.set_axisbelow(True)
ax.grid(zorder=0, color="#c4c4c4", linestyle="-", linewidth=1, axis="y")
ax.set_xticklabels(tick_labels)
#plt.legend(benchmarks, loc='upper left')
plt.legend(benchmarks, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.subplots_adjust(left=0.1, right=0.8, top=0.9, bottom=0.1)
plt.savefig(fname[k])
plt.show()
it = \
{
"names" : ["Mobile","Laptop","Desktop"],
"IdealSensor" : [sum(speedup_mobile["IdealSensor"])/len(speedup_mobile["IdealSensor"]), \
sum(speedup_laptop["IdealSensor"])/len(speedup_laptop["IdealSensor"]), \
sum(speedup_desktop["IdealSensor"])/len(speedup_desktop["IdealSensor"])],
"uArchEvent" : [sum(speedup_mobile["uArchEvent"])/len(speedup_mobile["uArchEvent"]), \
sum(speedup_laptop["uArchEvent"])/len(speedup_laptop["uArchEvent"]), \
sum(speedup_desktop["uArchEvent"])/len(speedup_desktop["uArchEvent"])],
"Signature" : [sum(speedup_mobile["Signature"])/len(speedup_mobile["Signature"]), \
sum(speedup_laptop["Signature"])/len(speedup_laptop["Signature"]), \
sum(speedup_desktop["Signature"])/len(speedup_desktop["Signature"])],
"T.a.S." : [sum(speedup_mobile["T.a.S."])/len(speedup_mobile["T.a.S."]), \
sum(speedup_laptop["T.a.S."])/len(speedup_laptop["T.a.S."]), \
sum(speedup_desktop["T.a.S."])/len(speedup_desktop["T.a.S."])],
"InstPending" : [sum(speedup_mobile["InstPending"])/len(speedup_mobile["InstPending"]), \
sum(speedup_laptop["InstPending"])/len(speedup_laptop["InstPending"]), \
sum(speedup_desktop["InstPending"])/len(speedup_desktop["InstPending"])],
}
name = ["Average Speedup Across Systems"]
tick_labels = ["IdealSensor", "uArchEvent", "Signature", "T.a.S.", "InstPending"]
benchmarks = ["Mobile","Latop","Desktop"]
fname = ["speedup_unconstrained.png"]
bounds = [[0,2,0.1,0]]
df=[it["IdealSensor"],it["uArchEvent"],it["Signature"],it["T.a.S."],it["InstPending"]]
pos = list(range(len(df)))
width = 0.2
fig, ax = plt.subplots(figsize=(10,5))
i=0
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label=benchmarks[i], color="w", hatch="/"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label=benchmarks[i], color="w", hatch="\\"*4, fill=True, linewidth=1, edgecolor="k")
i+=1
plt.bar([p + width*i for p in pos], [j[i] for j in df], width, label=benchmarks[i], color="w", hatch="X"*4, fill="True", linewidth=1, edgecolor="k")
ax.set_ylabel('Speedup (X)')
ax.set_title(name[0])
ax.set_xticks([p + 1.5 * width for p in pos])
ax.set_yticks(np.arange(bounds[0][0],bounds[0][1],bounds[0][2]))
ax.set_ylim(bounds[0][0],bounds[0][1])
ax.set_axisbelow(True)
ax.grid(zorder=0, color="#c4c4c4", linestyle="-", linewidth=1, axis="y")
b = ax.get_ygridlines()
b[bounds[0][3]].set_color('k')
ax.set_xticklabels(tick_labels)
#plt.legend(benchmarks, loc='upper left')
plt.legend(benchmarks, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.subplots_adjust(left=0.1, right=0.8, top=0.9, bottom=0.1)
plt.savefig(fname[0])
plt.show()
| 55.218623
| 148
| 0.63696
| 3,909
| 27,278
| 4.391916
| 0.121514
| 0.01398
| 0.010135
| 0.059762
| 0.858225
| 0.854322
| 0.844886
| 0.844886
| 0.844886
| 0.844886
| 0
| 0.088925
| 0.126439
| 27,278
| 493
| 149
| 55.330629
| 0.631541
| 0.053376
| 0
| 0.744344
| 0
| 0
| 0.21052
| 0.007985
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015837
| 0
| 0.015837
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
677f2c1379f69c3827b0bbb5bff618981bc47106
| 13,584
|
py
|
Python
|
devel/lib/python2.7/dist-packages/polled_camera/srv/_GetPolledImage.py
|
EveVengerov/Gesture-Controlling-Drone
|
8fe38dbfdc496472e13e76bcdb55b471f51b42ea
|
[
"MIT"
] | 2
|
2021-09-22T19:06:19.000Z
|
2021-09-22T20:22:40.000Z
|
devel/lib/python2.7/dist-packages/polled_camera/srv/_GetPolledImage.py
|
EveVengerov/Gesture-Controlling-Drone
|
8fe38dbfdc496472e13e76bcdb55b471f51b42ea
|
[
"MIT"
] | null | null | null |
devel/lib/python2.7/dist-packages/polled_camera/srv/_GetPolledImage.py
|
EveVengerov/Gesture-Controlling-Drone
|
8fe38dbfdc496472e13e76bcdb55b471f51b42ea
|
[
"MIT"
] | null | null | null |
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from polled_camera/GetPolledImageRequest.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import genpy
import sensor_msgs.msg
class GetPolledImageRequest(genpy.Message):
_md5sum = "c77ed43e530fd48e9e7a2a93845e154c"
_type = "polled_camera/GetPolledImageRequest"
_has_header = False # flag to mark the presence of a Header object
_full_text = """
string response_namespace
duration timeout
uint32 binning_x
uint32 binning_y
sensor_msgs/RegionOfInterest roi
================================================================================
MSG: sensor_msgs/RegionOfInterest
# This message is used to specify a region of interest within an image.
#
# When used to specify the ROI setting of the camera when the image was
# taken, the height and width fields should either match the height and
# width fields for the associated image; or height = width = 0
# indicates that the full resolution image was captured.
uint32 x_offset # Leftmost pixel of the ROI
# (0 if the ROI includes the left edge of the image)
uint32 y_offset # Topmost pixel of the ROI
# (0 if the ROI includes the top edge of the image)
uint32 height # Height of ROI
uint32 width # Width of ROI
# True if a distinct rectified ROI should be calculated from the "raw"
# ROI in this message. Typically this should be False if the full image
# is captured (ROI not used), and True if a subwindow is captured (ROI
# used).
bool do_rectify
"""
__slots__ = ['response_namespace','timeout','binning_x','binning_y','roi']
_slot_types = ['string','duration','uint32','uint32','sensor_msgs/RegionOfInterest']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
response_namespace,timeout,binning_x,binning_y,roi
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetPolledImageRequest, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.response_namespace is None:
self.response_namespace = ''
if self.timeout is None:
self.timeout = genpy.Duration()
if self.binning_x is None:
self.binning_x = 0
if self.binning_y is None:
self.binning_y = 0
if self.roi is None:
self.roi = sensor_msgs.msg.RegionOfInterest()
else:
self.response_namespace = ''
self.timeout = genpy.Duration()
self.binning_x = 0
self.binning_y = 0
self.roi = sensor_msgs.msg.RegionOfInterest()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.response_namespace
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2i6IB().pack(_x.timeout.secs, _x.timeout.nsecs, _x.binning_x, _x.binning_y, _x.roi.x_offset, _x.roi.y_offset, _x.roi.height, _x.roi.width, _x.roi.do_rectify))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.timeout is None:
self.timeout = genpy.Duration()
if self.roi is None:
self.roi = sensor_msgs.msg.RegionOfInterest()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.response_namespace = str[start:end].decode('utf-8', 'rosmsg')
else:
self.response_namespace = str[start:end]
_x = self
start = end
end += 33
(_x.timeout.secs, _x.timeout.nsecs, _x.binning_x, _x.binning_y, _x.roi.x_offset, _x.roi.y_offset, _x.roi.height, _x.roi.width, _x.roi.do_rectify,) = _get_struct_2i6IB().unpack(str[start:end])
self.roi.do_rectify = bool(self.roi.do_rectify)
self.timeout.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.response_namespace
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2i6IB().pack(_x.timeout.secs, _x.timeout.nsecs, _x.binning_x, _x.binning_y, _x.roi.x_offset, _x.roi.y_offset, _x.roi.height, _x.roi.width, _x.roi.do_rectify))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.timeout is None:
self.timeout = genpy.Duration()
if self.roi is None:
self.roi = sensor_msgs.msg.RegionOfInterest()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.response_namespace = str[start:end].decode('utf-8', 'rosmsg')
else:
self.response_namespace = str[start:end]
_x = self
start = end
end += 33
(_x.timeout.secs, _x.timeout.nsecs, _x.binning_x, _x.binning_y, _x.roi.x_offset, _x.roi.y_offset, _x.roi.height, _x.roi.width, _x.roi.do_rectify,) = _get_struct_2i6IB().unpack(str[start:end])
self.roi.do_rectify = bool(self.roi.do_rectify)
self.timeout.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2i6IB = None
def _get_struct_2i6IB():
global _struct_2i6IB
if _struct_2i6IB is None:
_struct_2i6IB = struct.Struct("<2i6IB")
return _struct_2i6IB
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from polled_camera/GetPolledImageResponse.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import genpy
class GetPolledImageResponse(genpy.Message):
_md5sum = "dbf1f851bc511800e6129ccd5a3542ab"
_type = "polled_camera/GetPolledImageResponse"
_has_header = False # flag to mark the presence of a Header object
_full_text = """bool success
string status_message
time stamp
"""
__slots__ = ['success','status_message','stamp']
_slot_types = ['bool','string','time']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
success,status_message,stamp
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetPolledImageResponse, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.success is None:
self.success = False
if self.status_message is None:
self.status_message = ''
if self.stamp is None:
self.stamp = genpy.Time()
else:
self.success = False
self.status_message = ''
self.stamp = genpy.Time()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.success
buff.write(_get_struct_B().pack(_x))
_x = self.status_message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.stamp.secs, _x.stamp.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.stamp is None:
self.stamp = genpy.Time()
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status_message = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status_message = str[start:end]
_x = self
start = end
end += 8
(_x.stamp.secs, _x.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
self.stamp.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.success
buff.write(_get_struct_B().pack(_x))
_x = self.status_message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.stamp.secs, _x.stamp.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.stamp is None:
self.stamp = genpy.Time()
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status_message = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status_message = str[start:end]
_x = self
start = end
end += 8
(_x.stamp.secs, _x.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
self.stamp.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2I = None
def _get_struct_2I():
global _struct_2I
if _struct_2I is None:
_struct_2I = struct.Struct("<2I")
return _struct_2I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
class GetPolledImage(object):
_type = 'polled_camera/GetPolledImage'
_md5sum = '1f3fb0d09d6e1c72d4a7eeb9822d9030'
_request_class = GetPolledImageRequest
_response_class = GetPolledImageResponse
| 34.741688
| 197
| 0.656876
| 1,879
| 13,584
| 4.558808
| 0.116019
| 0.029886
| 0.023115
| 0.019846
| 0.782979
| 0.77294
| 0.768737
| 0.768737
| 0.758697
| 0.754611
| 0
| 0.016195
| 0.222688
| 13,584
| 390
| 198
| 34.830769
| 0.795056
| 0.183009
| 0
| 0.753571
| 1
| 0
| 0.171541
| 0.033542
| 0
| 0
| 0.001869
| 0
| 0
| 1
| 0.060714
| false
| 0
| 0.039286
| 0
| 0.207143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67819e29fb60bf9f0a1b2da25e369636f3b7f863
| 31,386
|
py
|
Python
|
capybara/node/matchers.py
|
dwt/capybara.py
|
eafd9ac50d02e8b57ef90d767493c8fa2be0739a
|
[
"MIT"
] | null | null | null |
capybara/node/matchers.py
|
dwt/capybara.py
|
eafd9ac50d02e8b57ef90d767493c8fa2be0739a
|
[
"MIT"
] | null | null | null |
capybara/node/matchers.py
|
dwt/capybara.py
|
eafd9ac50d02e8b57ef90d767493c8fa2be0739a
|
[
"MIT"
] | null | null | null |
from functools import wraps
import capybara
from capybara.compat import Hashable
from capybara.exceptions import ExpectationNotMet
from capybara.helpers import expects_none, matches_count
from capybara.selector import selectors
from capybara.queries.selector_query import SelectorQuery
from capybara.queries.style_query import StyleQuery
from capybara.queries.text_query import TextQuery
def predicate(func):
@wraps(func)
def wrapper(*args, **kwargs):
if "wait" not in kwargs and not capybara.predicates_wait:
kwargs["wait"] = 0
try:
return func(*args, **kwargs)
except ExpectationNotMet:
return False
return wrapper
class MatchersMixin(object):
@predicate
def has_selector(self, *args, **kwargs):
"""
Checks if a given selector is on the page or a descendant of the current node. ::
page.has_selector("p#foo")
page.has_selector("xpath", ".//p[@id='foo']")
By default it will check if the expression occurs at least once, but a different number can
be specified. ::
page.has_selector("p.foo", count=4)
This will check if the expression occurs exactly 4 times.
It also accepts all options that :meth:`find_all` accepts, such as ``text`` and
``visible``. ::
page.has_selector("li", text="Horse", visible=True)
``has_selector`` can also accept XPath expressions generated by the ``xpath-py`` package::
from xpath import dsl as x
page.has_selector("xpath", x.descendant("p"))
Args:
*args: Variable length argument list for :class:`SelectorQuery`.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: If the expression exists.
"""
return self.assert_selector(*args, **kwargs)
@predicate
def has_no_selector(self, *args, **kwargs):
"""
Checks if a given selector is not on the page or a descendant of the current node. Usage is
identical to :meth:`has_selector`.
Args:
*args: Variable length argument list for :class:`SelectorQuery`.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it doesn't exist.
"""
return self.assert_no_selector(*args, **kwargs)
@predicate
def has_style(self, styles, **kwargs):
"""
Checks if an element has the specified CSS styles. ::
element.has_style({"color": "rgb(0,0,255)", "font-size": re.compile(r"px")})
Args:
styles (Dict[str, str | RegexObject]): The expected styles.
Returns:
bool: Whether the styles match.
"""
return self.assert_style(styles, **kwargs)
@predicate
def has_all_of_selectors(self, selector, *locators, **kwargs):
"""
Checks if allof the provided selectors are present on the given page or descendants of the
current node. If options are provided, the assertion will check that each locator is present
with those options as well (other than ``wait``). ::
page.has_all_of_selectors("custom", "Tom", "Joe", visible="all")
page.has_all_of_selectors("css", "#my_dif", "a.not_clicked")
It accepts all options that :meth:`find_all` accepts, such as ``text`` and ``visible``.
The ``wait`` option applies to all of the selectors as a group, so all of the locators must
be present within ``wait`` (defaults to :data:`capybara.default_max_wait_time`) seconds.
If the given selector is not a valid selector, the first argument is assumed to be a locator
and the default selector will be used.
Args:
selector (str, optional): The name of the selector to use. Defaults to
:data:`capybara.default_selector`.
*locators (str): Variable length list of locators.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
"""
return self.assert_all_of_selectors(selector, *locators, **kwargs)
@predicate
def has_none_of_selectors(self, selector, *locators, **kwargs):
"""
Checks if none of the provided selectors are present on the given page or descendants of the
current node. If options are provided, the assertion will check that each locator is present
with those options as well (other than ``wait``). ::
page.has_none_of_selectors("custom", "Tom", "Joe", visible="all")
page.has_none_of_selectors("css", "#my_div", "a.not_clicked")
It accepts all options that :meth:`find_all` accepts, such as ``text`` and ``visible``.
The ``wait`` option applies to all of the selectors as a group, so none of the locators must
be present with ``wait`` (defaults to :data:`capybara.default_max_wait_time`) seconds.
If the given selector is not a valid selector, the first argument is assumed to be a locator
and the default selector will be used.
Args:
selector (str, optional): The name of the selector to use. Defaults to
:data:`capybara.default_selector`.
*locators (str): Variable length list of locators.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
"""
return self.assert_none_of_selectors(selector, *locators, **kwargs)
@predicate
def matches_selector(self, *args, **kwargs):
"""
Checks if the current node matches the given selector.
Args:
*args: Variable length argument list for :class:`SelectorQuery`.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it matches.
"""
return self.assert_matches_selector(*args, **kwargs)
@predicate
def not_match_selector(self, *args, **kwargs):
"""
Checks if the current node does not match the given selector. Usage is identical to
:meth:`has_selector`.
Args:
*args: Variable length argument list for :class:`SelectorQuery`.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it doesn't match.
"""
return self.assert_not_match_selector(*args, **kwargs)
def assert_selector(self, *args, **kwargs):
"""
Asserts that a given selector is on the page or a descendant of the current node. ::
page.assert_selector("p#foo")
By default it will check if the expression occurs at least once, but a different number can
be specified. ::
page.assert_selector("p.foo", count=4)
This will check if the expression occurs exactly 4 times. See :meth:`find_all` for other
available result size options.
If a ``count`` of 0 is specified, it will behave like :meth:`assert_no_selector`; however,
use of that method is preferred over this one.
It also accepts all options that :meth:`find_all` accepts, such as ``text`` and
``visible``. ::
page.assert_selector("li", text="Horse", visible=True)
``assert_selector`` can also accept XPath expressions generated by the ``xpath-py``
package::
from xpath import dsl as x
page.assert_selector("xpath", x.descendant("p"))
Args:
*args: Variable length argument list for :class:`SelectorQuery`.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
True
Raises:
ExpectationNotMet: The given selector did not match.
"""
query = SelectorQuery(*args, **kwargs)
@self.synchronize(wait=query.wait)
def assert_selector():
result = query.resolve_for(self)
if not (result.matches_count and
(len(result) > 0 or expects_none(query.options))):
raise ExpectationNotMet(result.failure_message)
return True
return assert_selector()
def assert_style(self, styles, **kwargs):
"""
Asserts that an element has the specified CSS styles. ::
element.assert_style({"color": "rgb(0,0,255)", "font-size": re.compile(r"px")})
Args:
styles (Dict[str, str | RegexObject]): The expected styles.
Returns:
True
Raises:
ExpectationNotMet: The element doesn't have the specified styles.
"""
query = StyleQuery(styles, **kwargs)
@self.synchronize(wait=query.wait)
def assert_style():
if not query.resolves_for(self):
raise ExpectationNotMet(query.failure_message)
return True
return assert_style()
def assert_all_of_selectors(self, selector, *locators, **kwargs):
"""
Asserts that all of the provided selectors are present on the given page or descendants of
the current node. If options are provided, the assertion will check that each locator is
present with those options as well (other than ``wait``). ::
page.assert_all_of_selectors("custom", "Tom", "Joe", visible="all")
page.assert_all_of_selectors("css", "#my_dif", "a.not_clicked")
It accepts all options that :meth:`find_all` accepts, such as ``text`` and ``visible``.
The ``wait`` option applies to all of the selectors as a group, so all of the locators must
be present within ``wait`` (defaults to :data:`capybara.default_max_wait_time`) seconds.
If the given selector is not a valid selector, the first argument is assumed to be a locator
and the default selector will be used.
Args:
selector (str, optional): The name of the selector to use. Defaults to
:data:`capybara.default_selector`.
*locators (str): Variable length list of locators.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
"""
wait = kwargs['wait'] if 'wait' in kwargs else capybara.default_max_wait_time
if not isinstance(selector, Hashable) or selector not in selectors:
locators = (selector,) + locators
selector = capybara.default_selector
@self.synchronize(wait=wait)
def assert_all_of_selectors():
for locator in locators:
self.assert_selector(selector, locator, **kwargs)
return True
return assert_all_of_selectors()
def assert_none_of_selectors(self, selector, *locators, **kwargs):
"""
Asserts that none of the provided selectors are present on the given page or descendants of
the current node. If options are provided, the assertion will check that each locator is
present with those options as well (other than ``wait``). ::
page.assert_none_of_selectors("custom", "Tom", "Joe", visible="all")
page.assert_none_of_selectors("css", "#my_div", "a.not_clicked")
It accepts all options that :meth:`find_all` accepts, such as ``text`` and ``visible``.
The ``wait`` option applies to all of the selectors as a group, so none of the locators must
be present with ``wait`` (defaults to :data:`capybara.default_max_wait_time`) seconds.
If the given selector is not a valid selector, the first argument is assumed to be a locator
and the default selector will be used.
Args:
selector (str, optional): The name of the selector to use. Defaults to
:data:`capybara.default_selector`.
*locators (str): Variable length list of locators.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
"""
wait = kwargs['wait'] if 'wait' in kwargs else capybara.default_max_wait_time
if not isinstance(selector, Hashable) or selector not in selectors:
locators = (selector,) + locators
selector = capybara.default_selector
@self.synchronize(wait=wait)
def assert_none_of_selectors():
for locator in locators:
self.assert_no_selector(selector, locator, **kwargs)
return True
return assert_none_of_selectors()
def assert_no_selector(self, *args, **kwargs):
"""
Asserts that a given selector is not on the page or a descendant of the current node. Usage
is identical to :meth:`assert_selector`.
Query options such as ``count``, ``minimum``, and ``between`` are considered to be an
integral part of the selector. This will return True, for example, if a page contains 4
anchors but the query expects 5::
page.assert_no_selector("a", minimum=1) # Found, raises ExpectationNotMet
page.assert_no_selector("a", count=4) # Found, raises ExpectationNotMet
page.assert_no_selector("a", count=5) # Not Found, returns True
Args:
*args: Variable length argument list for :class:`SelectorQuery`.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
True
Raises:
ExpectationNotMet: The given selector matched.
"""
query = SelectorQuery(*args, **kwargs)
@self.synchronize(wait=query.wait)
def assert_no_selector():
result = query.resolve_for(self)
if result.matches_count and (
len(result) > 0 or expects_none(query.options)):
raise ExpectationNotMet(result.negative_failure_message)
return True
return assert_no_selector()
refute_selector = assert_no_selector
""" Alias for :meth:`assert_no_selector`. """
def assert_matches_selector(self, *args, **kwargs):
"""
Asserts that the current node matches a given selector. ::
node.assert_matches_selector("p#foo")
node.assert_matches_selector("xpath", "//p[@id='foo']")
It also accepts all options that :meth:`find_all` accepts, such as ``text`` and
``visible``. ::
node.assert_matches_selector("li", text="Horse", visible=True)
Args:
*args: Variable length argument list for :class:`SelectorQuery`.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
True
Raises:
ExpectationNotMet: If the selector does not match.
"""
query = SelectorQuery(*args, **kwargs)
@self.synchronize(wait=query.wait)
def assert_matches_selector():
result = query.resolve_for(self.find_first("xpath", "./parent::*", minimum=0) or self.query_scope)
if self not in result:
raise ExpectationNotMet("Item does not match the provided selector")
return True
return assert_matches_selector()
def assert_not_match_selector(self, *args, **kwargs):
"""
Asserts that the current node does not match a given selector. See
:meth:`assert_matches_selector`.
Args:
*args: Variable length argument list for :class:`SelectorQuery`.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
True
Raises:
ExpectationNotMet: If the selector matches.
"""
query = SelectorQuery(*args, **kwargs)
@self.synchronize(wait=query.wait)
def assert_not_match_selector():
result = query.resolve_for(self.find_first("xpath", "./parent::*", minimum=0) or self.query_scope)
if self in result:
raise ExpectationNotMet("Item matched the provided selector")
return True
return assert_not_match_selector()
refute_matches_selector = assert_not_match_selector
""" Alias for :meth:`assert_not_match_selector`. """
def matches_xpath(self, xpath, **kwargs):
"""
Checks if the current node matches the given XPath expression.
Args:
xpath (str | xpath.expression.Expression): The XPath expression to match against the
current node.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Return:
bool: Whether it matches.
"""
return self.matches_selector("xpath", xpath, **kwargs)
def not_match_xpath(self, xpath, **kwargs):
"""
Checks if the current node does not match the given XPath expression.
Args:
xpath (str | xpath.expression.Expression): The XPath expression to match against the
current node.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Return:
bool: Whether it doesn't match.
"""
return self.not_match_selector("xpath", xpath, **kwargs)
def matches_css(self, css, **kwargs):
"""
Checks if the current node matches the given CSS selector.
Args:
css (str): The CSS selector to match against the current node.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Return:
bool: Whether it matches.
"""
return self.matches_selector("css", css, **kwargs)
def not_match_css(self, css, **kwargs):
"""
Checks if the current node does not match the given CSS selector.
Args:
css (str): The CSS selector to match against the current node.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Return:
bool: Whether it doesn't match.
"""
return self.not_match_selector("css", css, **kwargs)
def has_xpath(self, query, **kwargs):
"""
Checks if a given XPath expression is on the page or a descendant of the current node. ::
session.has_xpath(".//p[@id='foo']")
``has_xpath`` can also accept XPath expressions generated by the ``xpath-py`` package::
from xpath import dsl as x
session.has_xpath(x.descendant("p"))
Args:
query (str): An XPath expression.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Return:
bool: If the expression exists.
"""
return self.has_selector("xpath", query, **kwargs)
def has_no_xpath(self, path, **kwargs):
"""
Checks if a given XPath expression is not on the page or a descendant of the current node.
Usage is identical to :meth:`has_xpath`.
Args:
path (str): An XPath expression.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Return:
bool: If the expression does not exist.
"""
return self.has_no_selector("xpath", path, **kwargs)
def has_css(self, path, **kwargs):
"""
Checks if a given CSS selector is on the page or a descendant of the current node. ::
page.has_css("p#foo")
By default it will check if the selector occurs at least once, but a different number can
be specified. ::
page.has_css("p#foo", count=4)
This will check if the selector occurs exactly 4 times.
It also accepts all options that :meth:`find_all` accepts, such as ``text`` and
``visible``. ::
page.has_css("li", text="Horse", visible=True)
Args:
path (str): A CSS selector.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Return:
bool: If the selector exists.
"""
return self.has_selector("css", path, **kwargs)
def has_no_css(self, path, **kwargs):
"""
Checks if a given CSS selector is not on the page or a descendant of the current node.
Usage is identical to :meth:`has_css`.
Args:
path (str): A CSS Selector.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Return:
bool: If the selector does not exist.
"""
return self.has_no_selector("css", path, **kwargs)
def has_button(self, locator, **kwargs):
"""
Checks if the page or current node has a button with the given text, value, or id.
Args:
locator (str): The text, value, or id of a button to check for.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it exists.
"""
return self.has_selector("button", locator, **kwargs)
def has_no_button(self, locator, **kwargs):
"""
Checks if the page or current node has no button with the given text, value, or id.
Args:
locator (str): The text, value, or id of a button to check for.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it doesn't exist.
"""
return self.has_no_selector("button", locator, **kwargs)
def has_checked_field(self, locator, **kwargs):
"""
Checks if the page or current node has a radio button or checkbox with the given label,
value, or id, that is currently checked.
Args:
locator (str): The label, name, or id of a checked field.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it exists.
"""
kwargs["checked"] = True
return self.has_selector("field", locator, **kwargs)
def has_no_checked_field(self, locator, **kwargs):
"""
Checks if the page or current node has no radio button or checkbox with the given label,
value, or id that is currently checked.
Args:
locator (str): The label, name, or id of a checked field.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it doesn't exist.
"""
kwargs["checked"] = True
return self.has_no_selector("field", locator, **kwargs)
def has_field(self, locator, **kwargs):
"""
Checks if the page or current node has a form field with the given label, name, or id.
For text fields and other textual fields, such as textareas and HTML5 email/url/etc. fields,
it's possible to specify a ``value`` argument to specify the text the field should contain::
page.has_field("Name", value="Jonas")
Args:
locator (str): The label, name, or id of a field to check for.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it exists.
"""
return self.has_selector("field", locator, **kwargs)
def has_no_field(self, locator, **kwargs):
"""
Checks if the page or current node has no form field with the given label, name, or id. See
:meth:`has_field`.
Args:
locator (str): The label, name, or id of a field to check for.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it doesn't exist.
"""
return self.has_no_selector("field", locator, **kwargs)
def has_link(self, locator, **kwargs):
"""
Checks if the page or current node has a link with the given text or id.
Args:
locator (str): The text or id of a link to check for.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it exists.
"""
return self.has_selector("link", locator, **kwargs)
def has_no_link(self, locator, **kwargs):
"""
Checks if the page or current node has no link with the given text or id.
Args:
locator (str): The text or id of a link to check for.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it doesn't exist.
"""
return self.has_no_selector("link", locator, **kwargs)
def has_select(self, locator, **kwargs):
"""
Checks if the page or current node has a select field with the given label, name, or id.
It can be specified which option should currently be selected::
page.has_select("Language", selected="German")
For multiple select boxes, several options may be specified::
page.has_select("Language", selected=["English", "German"])
It's also possible to check if the exact set of options exists for this select box::
page.has_select("Language", options=["English", "German", "Spanish"])
You can also check for a partial set of options::
page.has_select("Language", with_options=["English", "German"])
Args:
locator (str): The label, name, or id of a select box.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it exists.
"""
return self.has_selector("select", locator, **kwargs)
def has_no_select(self, locator, **kwargs):
"""
Checks if the page or current node has no select field with the given label, name, or id.
See :meth:`has_select`.
Args:
locator (str): The label, name, or id of a select box.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it doesn't exist.
"""
return self.has_no_selector("select", locator, **kwargs)
def has_table(self, locator, **kwargs):
"""
Checks if the page or current node has a table with the given id or caption::
page.has_table("People")
Args:
locator (str): The id or caption of a table.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it exists.
"""
return self.has_selector("table", locator, **kwargs)
def has_no_table(self, locator, **kwargs):
"""
Checks if the page or current node has no table with the given id or caption. See
:meth:`has_table`.
Args:
locator (str): The id or caption of a table.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it doesn't exist.
"""
return self.has_no_selector("table", locator, **kwargs)
def has_unchecked_field(self, locator, **kwargs):
"""
Checks if the page or current node has a radio button or checkbox with the given label,
value, or id, that is currently unchecked.
Args:
locator (str): The label, name, or id of an unchecked field.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it exists.
"""
kwargs["checked"] = False
return self.has_selector("field", locator, **kwargs)
def has_no_unchecked_field(self, locator, **kwargs):
"""
Checks if the page or current node has no radio button or checkbox with the given label,
value, or id, that is currently unchecked.
Args:
locator (str): The label, name, or id of an unchecked field.
**kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`.
Returns:
bool: Whether it doesn't exist.
"""
kwargs["checked"] = False
return self.has_no_selector("field", locator, **kwargs)
def assert_text(self, *args, **kwargs):
"""
Asserts that the page or current node has the given text content, ignoring any HTML tags.
Args:
*args: Variable length argument list for :class:`TextQuery`.
**kwargs: Arbitrary keyword arguments for :class:`TextQuery`.
Returns:
True
Raises:
ExpectationNotMet: If the assertion hasn't succeeded during the wait time.
"""
query = TextQuery(*args, **kwargs)
@self.synchronize(wait=query.wait)
def assert_text():
count = query.resolve_for(self)
if not (matches_count(count, query.options) and
(count > 0 or expects_none(query.options))):
raise ExpectationNotMet(query.failure_message)
return True
return assert_text()
def assert_no_text(self, *args, **kwargs):
"""
Asserts that the page or current node doesn't have the given text content, ignoring any
HTML tags.
Args:
*args: Variable length argument list for :class:`TextQuery`.
**kwargs: Arbitrary keyword arguments for :class:`TextQuery`.
Returns:
True
Raises:
ExpectationNotMet: If the assertion hasn't succeeded during the wait time.
"""
query = TextQuery(*args, **kwargs)
@self.synchronize(wait=query.wait)
def assert_no_text():
count = query.resolve_for(self)
if matches_count(count, query.options) and (
count > 0 or expects_none(query.options)):
raise ExpectationNotMet(query.negative_failure_message)
return True
return assert_no_text()
@predicate
def has_text(self, *args, **kwargs):
"""
Checks if the page or current node has the given text content, ignoring any HTML tags.
Whitespaces are normalized in both the node's text and the passed text parameter. Note that
whitespace isn't normalized in a passed regular expression as normalizing whitespace in a
regular expression isn't easy and doesn't seem to be worth it.
By default it will check if the text occurs at least once, but a different number can be
specified. ::
page.has_text("lorem ipsum", between=range(2, 5))
Args:
*args: Variable length argument list for :class:`TextQuery`.
**kwargs: Arbitrary keyword arguments for :class:`TextQuery`.
Returns:
bool: Whether it exists.
"""
return self.assert_text(*args, **kwargs)
has_content = has_text
""" Alias for :meth:`has_text`. """
@predicate
def has_no_text(self, *args, **kwargs):
"""
Checks if the page or current node does not have the given text content, ignoring any HTML
tags and normalizing whitespace.
Args:
*args: Variable length argument list for :class:`TextQuery`.
**kwargs: Arbitrary keyword arguments for :class:`TextQuery`.
Returns:
bool: Whether it doesn't exist.
"""
return self.assert_no_text(*args, **kwargs)
has_no_content = has_no_text
""" Alias for :meth:`has_no_text`. """
| 34.115217
| 110
| 0.612247
| 3,846
| 31,386
| 4.910296
| 0.075663
| 0.021181
| 0.046704
| 0.062378
| 0.857718
| 0.8278
| 0.79947
| 0.774689
| 0.735504
| 0.687742
| 0
| 0.001446
| 0.294749
| 31,386
| 919
| 111
| 34.15234
| 0.851728
| 0.584783
| 0
| 0.326203
| 0
| 0
| 0.028399
| 0
| 0
| 0
| 0
| 0
| 0.213904
| 1
| 0.272727
| false
| 0
| 0.048128
| 0
| 0.625668
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
67f89c358e93a036c5c1a6f1c5ed4be3f114e428
| 3,919
|
py
|
Python
|
.c9/metadata/workspace/notMyRIOT.py
|
zb9c/LeargueDataAnalysis
|
669e95c5012fa621be9ec5c7fbc56239e174b661
|
[
"MIT"
] | null | null | null |
.c9/metadata/workspace/notMyRIOT.py
|
zb9c/LeargueDataAnalysis
|
669e95c5012fa621be9ec5c7fbc56239e174b661
|
[
"MIT"
] | null | null | null |
.c9/metadata/workspace/notMyRIOT.py
|
zb9c/LeargueDataAnalysis
|
669e95c5012fa621be9ec5c7fbc56239e174b661
|
[
"MIT"
] | null | null | null |
{"changed":true,"filter":false,"title":"notMyRIOT.py","tooltip":"/notMyRIOT.py","value":"import requests\nimport json\nimport io\ntry:\n to_unicode = unicode\nexcept NameError:\n to_unicode = str\nimport xlsxwriter\nimport time\n\ndef getID(sumName):\n URL = \"https://na1.api.riotgames.com/lol/summoner/v3/summoners/by-name/\" + sumName + \"?api_key=\" + APIKey\n response = requests.get(URL).json()\n ids = response['accountId']\n ids = str(ids)\n return ids\ndef getMatches(id):\n url = \"https://na1.api.riotgames.com/lol/match/v3/matchlists/by-account/\" + id + \"?champion=16\" + \"&api_key=\" + APIKey #Used to get Soraka Games\n #url = \"https://na1.api.riotgames.com/lol/match/v3/matchlists/by-account/\" + id + \"?api_key=\" + APIKey\n response = requests.get(url).json()\n return response\ndef matchInfo(matchID):\n url = \"https://na1.api.riotgames.com/lol/match/v3/matches/\" + matchID + \"?api_key=\" + APIKey\n response = requests.get(url).json()\n #print url\n return response\n\nAPIKey = \"RGAPI-735c8023-0382-41c9-89e5-a608e910533f\"\ngameIds = []\nWins = []\ndragons = []\nwardsPurchased = []\n\nwho = 'CheekyBastard'\nmatches = getMatches(str(getID(who)))\n\n#'clean' out matches array before saving it\nfor x in range(len(matches[\"matches\"])):\n \n gameIds.append(matches[\"matches\"][x][\"gameId\"])\n \n del matches[\"matches\"][x][\"queue\"]\n del matches[\"matches\"][x][\"season\"]\n del matches[\"matches\"][x][\"role\"]\n del matches[\"matches\"][x][\"timestamp\"]\n del matches[\"matches\"][x][\"lane\"]\n del matches[\"matches\"][x][\"platformId\"]\n \n#Save matches as WHO.json using the matches \nwith io.open( who + '.json', 'w', encoding='utf8') as outfile:\n str_ = json.dumps(matches,\n indent=4, sort_keys=True,\n separators=(',', ': '), ensure_ascii=True)\n outfile.write(to_unicode(str_))\n\n#Saves all matches as 0-99.json after cleaning them\nfor x in range(len(matches[\"matches\"])):\n time.sleep(2)\n \n #Pulls the data from Riot API\n match = matchInfo(str(gameIds[x]))\n \n #Clean data pulled from RiotAPI\n del match['platformId']\n del match['gameCreation']\n del match['gameDuration']\n del match['gameType']\n del match['gameVersion']\n del match['seasonId']\n \n \n \n \n #Saves response as json file\n with io.open(str(x) + who + '.json', 'w', encoding='utf8') as outfile:\n str_ = json.dumps(match,\n indent=4, sort_keys=True,\n separators=(',', ': '), ensure_ascii=False)\n outfile.write(to_unicode(str_))\n\n#Using that json we need to create a csv file that saves all that data\n","undoManager":{"mark":0,"position":-1,"stack":[[{"start":{"row":27,"column":10},"end":{"row":27,"column":52},"action":"remove","lines":["RGAPI-735c8023-0382-41c9-89e5-a608e910533f"],"id":1}]]},"ace":{"folds":[{"start":{"row":3,"column":4},"end":{"row":4,"column":24},"placeholder":"..."},{"start":{"row":5,"column":17},"end":{"row":6,"column":20},"placeholder":"..."},{"start":{"row":10,"column":19},"end":{"row":15,"column":14},"placeholder":"..."},{"start":{"row":16,"column":19},"end":{"row":20,"column":19},"placeholder":"..."},{"start":{"row":21,"column":23},"end":{"row":25,"column":19},"placeholder":"..."},{"start":{"row":37,"column":40},"end":{"row":46,"column":43},"placeholder":"..."},{"start":{"row":49,"column":62},"end":{"row":53,"column":35},"placeholder":"..."},{"start":{"row":56,"column":40},"end":{"row":78,"column":39},"placeholder":"..."}],"scrolltop":0,"scrollleft":0,"selection":{"start":{"row":9,"column":0},"end":{"row":9,"column":0},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":{"row":25,"state":"start","mode":"ace/mode/python"}},"timestamp":1505603879000}
| 3,919
| 3,919
| 0.622097
| 559
| 3,919
| 4.332737
| 0.352415
| 0.009909
| 0.043353
| 0.044591
| 0.341453
| 0.272089
| 0.241536
| 0.241536
| 0.182081
| 0.182081
| 0
| 0.046559
| 0.117632
| 3,919
| 1
| 3,919
| 3,919
| 0.653846
| 0
| 0
| 0
| 0
| 7
| 0.719388
| 0.12551
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 9
|
67ff9fc3f7cbae9c2768c511a1d34bb8b325af72
| 155,288
|
py
|
Python
|
sdk/lusid/api/portfolio_groups_api.py
|
mneedham/lusid-sdk-python
|
edabec16b357ba3fc48a53f3faacb4f94b18843e
|
[
"MIT"
] | null | null | null |
sdk/lusid/api/portfolio_groups_api.py
|
mneedham/lusid-sdk-python
|
edabec16b357ba3fc48a53f3faacb4f94b18843e
|
[
"MIT"
] | null | null | null |
sdk/lusid/api/portfolio_groups_api.py
|
mneedham/lusid-sdk-python
|
edabec16b357ba3fc48a53f3faacb4f94b18843e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.2808
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from lusid.api_client import ApiClient
from lusid.exceptions import (
ApiTypeError,
ApiValueError
)
class PortfolioGroupsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_portfolio_to_group(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Add portfolio to group # noqa: E501
Add a single portfolio to a portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_portfolio_to_group(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to add a portfolio to. (required)
:param str code: The code of the portfolio group to add a portfolio to. Together with the scope this uniquely identifies the portfolio group. (required)
:param datetime effective_at: The effective datetime from which the portfolio will be added to the group.
:param ResourceId resource_id: The resource identifier of the portfolio to add to the portfolio group.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PortfolioGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.add_portfolio_to_group_with_http_info(scope, code, **kwargs) # noqa: E501
def add_portfolio_to_group_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Add portfolio to group # noqa: E501
Add a single portfolio to a portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_portfolio_to_group_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to add a portfolio to. (required)
:param str code: The code of the portfolio group to add a portfolio to. Together with the scope this uniquely identifies the portfolio group. (required)
:param datetime effective_at: The effective datetime from which the portfolio will be added to the group.
:param ResourceId resource_id: The resource identifier of the portfolio to add to the portfolio group.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PortfolioGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'effective_at', 'resource_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_portfolio_to_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `add_portfolio_to_group`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `add_portfolio_to_group`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `add_portfolio_to_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `add_portfolio_to_group`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `add_portfolio_to_group`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `add_portfolio_to_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'resource_id' in local_var_params:
body_params = local_var_params['resource_id']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/portfolios', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def add_sub_group_to_group(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Add sub group to group # noqa: E501
Add a portfolio group to a portfolio group as a sub group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_sub_group_to_group(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to add a portfolio group to. (required)
:param str code: The code of the portfolio group to add a portfolio group to. Together with the scope this uniquely identifies the portfolio group. (required)
:param datetime effective_at: The effective datetime from which the sub group will be added to the group.
:param ResourceId resource_id: The resource identifier of the portfolio group to add to the portfolio group as a sub group.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PortfolioGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.add_sub_group_to_group_with_http_info(scope, code, **kwargs) # noqa: E501
def add_sub_group_to_group_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Add sub group to group # noqa: E501
Add a portfolio group to a portfolio group as a sub group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_sub_group_to_group_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to add a portfolio group to. (required)
:param str code: The code of the portfolio group to add a portfolio group to. Together with the scope this uniquely identifies the portfolio group. (required)
:param datetime effective_at: The effective datetime from which the sub group will be added to the group.
:param ResourceId resource_id: The resource identifier of the portfolio group to add to the portfolio group as a sub group.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PortfolioGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'effective_at', 'resource_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_sub_group_to_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `add_sub_group_to_group`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `add_sub_group_to_group`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `add_sub_group_to_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `add_sub_group_to_group`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `add_sub_group_to_group`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `add_sub_group_to_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'resource_id' in local_var_params:
body_params = local_var_params['resource_id']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/subgroups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def build_transactions_for_portfolio_group(self, scope, code, transaction_query_parameters, **kwargs): # noqa: E501
"""[EARLY ACCESS] Build transactions for transaction portfolios in a portfolio group # noqa: E501
Build transactions for transaction portfolios in a portfolio group over a given interval of effective time. When the specified portfolio in a portfolio group is a derived transaction portfolio, the returned set of transactions is the union set of all transactions of the parent (and any grandparents etc.) and the specified derived transaction portfolio itself. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.build_transactions_for_portfolio_group(scope, code, transaction_query_parameters, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group. (required)
:param str code: The code of the portfolio group. Together with the scope this uniquely identifies the portfolio group. (required)
:param TransactionQueryParameters transaction_query_parameters: The query queryParameters which control how the output transactions are built. (required)
:param datetime as_at: The asAt datetime at which to build the transactions. Defaults to return the latest version of each transaction if not specified.
:param str filter: Expression to filter the result set. For example, to filter on the Transaction Type, use \"type eq 'Buy'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param list[str] property_keys: A list of property keys from the \"Instrument\" or \"Transaction\" domain to decorate onto the transactions. These take the format {domain}/{scope}/{code} e.g. \"Instrument/system/Name\" or \"Transaction/strategy/quantsignal\".
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: VersionedResourceListOfOutputTransaction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.build_transactions_for_portfolio_group_with_http_info(scope, code, transaction_query_parameters, **kwargs) # noqa: E501
def build_transactions_for_portfolio_group_with_http_info(self, scope, code, transaction_query_parameters, **kwargs): # noqa: E501
"""[EARLY ACCESS] Build transactions for transaction portfolios in a portfolio group # noqa: E501
Build transactions for transaction portfolios in a portfolio group over a given interval of effective time. When the specified portfolio in a portfolio group is a derived transaction portfolio, the returned set of transactions is the union set of all transactions of the parent (and any grandparents etc.) and the specified derived transaction portfolio itself. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.build_transactions_for_portfolio_group_with_http_info(scope, code, transaction_query_parameters, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group. (required)
:param str code: The code of the portfolio group. Together with the scope this uniquely identifies the portfolio group. (required)
:param TransactionQueryParameters transaction_query_parameters: The query queryParameters which control how the output transactions are built. (required)
:param datetime as_at: The asAt datetime at which to build the transactions. Defaults to return the latest version of each transaction if not specified.
:param str filter: Expression to filter the result set. For example, to filter on the Transaction Type, use \"type eq 'Buy'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param list[str] property_keys: A list of property keys from the \"Instrument\" or \"Transaction\" domain to decorate onto the transactions. These take the format {domain}/{scope}/{code} e.g. \"Instrument/system/Name\" or \"Transaction/strategy/quantsignal\".
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(VersionedResourceListOfOutputTransaction, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'transaction_query_parameters', 'as_at', 'filter', 'property_keys'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method build_transactions_for_portfolio_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'transaction_query_parameters' is set
if ('transaction_query_parameters' not in local_var_params or
local_var_params['transaction_query_parameters'] is None):
raise ApiValueError("Missing the required parameter `transaction_query_parameters` when calling `build_transactions_for_portfolio_group`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `build_transactions_for_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `build_transactions_for_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `build_transactions_for_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `build_transactions_for_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `build_transactions_for_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `build_transactions_for_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'filter' in local_var_params:
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
if 'property_keys' in local_var_params:
query_params.append(('propertyKeys', local_var_params['property_keys'])) # noqa: E501
collection_formats['propertyKeys'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'transaction_query_parameters' in local_var_params:
body_params = local_var_params['transaction_query_parameters']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/transactions/$build', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VersionedResourceListOfOutputTransaction', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_portfolio_group(self, scope, **kwargs): # noqa: E501
"""[EARLY ACCESS] Create portfolio group # noqa: E501
Create a portfolio group in a specific scope. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_group(scope, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope that the portfolio group will be created in. (required)
:param CreatePortfolioGroupRequest create_portfolio_group_request: The definition and details of the portfolio group.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PortfolioGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_portfolio_group_with_http_info(scope, **kwargs) # noqa: E501
def create_portfolio_group_with_http_info(self, scope, **kwargs): # noqa: E501
"""[EARLY ACCESS] Create portfolio group # noqa: E501
Create a portfolio group in a specific scope. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_portfolio_group_with_http_info(scope, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope that the portfolio group will be created in. (required)
:param CreatePortfolioGroupRequest create_portfolio_group_request: The definition and details of the portfolio group.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PortfolioGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'create_portfolio_group_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_portfolio_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_portfolio_group_request' in local_var_params:
body_params = local_var_params['create_portfolio_group_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_group_properties(self, scope, code, request_body, **kwargs): # noqa: E501
"""[EARLY ACCESS] Delete group properties # noqa: E501
Delete one or more properties from a single portfolio group. If the properties are time variant then an effective date time from which the properties will be deleted must be specified. If the properties are perpetual then it is invalid to specify an effective date time for deletion. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_group_properties(scope, code, request_body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the group to delete properties from. (required)
:param str code: The code of the group to delete properties from. Together with the scope this uniquely identifies the group. (required)
:param list[str] request_body: The property keys of the properties to delete. These take the format {domain}/{scope}/{code} e.g. \"PortfolioGroup/Manager/Id\". Each property must be from the \"PortfolioGroup\" domain. (required)
:param str effective_at: The effective datetime or cut label at which to delete the properties.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: DeletedEntityResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_group_properties_with_http_info(scope, code, request_body, **kwargs) # noqa: E501
def delete_group_properties_with_http_info(self, scope, code, request_body, **kwargs): # noqa: E501
"""[EARLY ACCESS] Delete group properties # noqa: E501
Delete one or more properties from a single portfolio group. If the properties are time variant then an effective date time from which the properties will be deleted must be specified. If the properties are perpetual then it is invalid to specify an effective date time for deletion. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_group_properties_with_http_info(scope, code, request_body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the group to delete properties from. (required)
:param str code: The code of the group to delete properties from. Together with the scope this uniquely identifies the group. (required)
:param list[str] request_body: The property keys of the properties to delete. These take the format {domain}/{scope}/{code} e.g. \"PortfolioGroup/Manager/Id\". Each property must be from the \"PortfolioGroup\" domain. (required)
:param str effective_at: The effective datetime or cut label at which to delete the properties.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(DeletedEntityResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'request_body', 'effective_at'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_group_properties" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'request_body' is set
if ('request_body' not in local_var_params or
local_var_params['request_body'] is None):
raise ApiValueError("Missing the required parameter `request_body` when calling `delete_group_properties`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_group_properties`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_group_properties`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_group_properties`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_group_properties`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_group_properties`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_group_properties`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request_body' in local_var_params:
body_params = local_var_params['request_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/properties/$delete', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeletedEntityResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_portfolio_from_group(self, scope, code, portfolio_scope, portfolio_code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Delete portfolio from group # noqa: E501
Remove a single portfolio from a portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_portfolio_from_group(scope, code, portfolio_scope, portfolio_code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to remove the portfolio from. (required)
:param str code: The code of the portfolio group to remove the portfolio from. Together with the scope this uniquely identifies the portfolio group. (required)
:param str portfolio_scope: The scope of the portfolio being removed from the portfolio group. (required)
:param str portfolio_code: The code of the portfolio being removed from the portfolio group. Together with the scope this uniquely identifies the portfolio to remove. (required)
:param datetime effective_at: The effective datetime from which the portfolio will be removed from the portfolio group.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PortfolioGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_portfolio_from_group_with_http_info(scope, code, portfolio_scope, portfolio_code, **kwargs) # noqa: E501
def delete_portfolio_from_group_with_http_info(self, scope, code, portfolio_scope, portfolio_code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Delete portfolio from group # noqa: E501
Remove a single portfolio from a portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_portfolio_from_group_with_http_info(scope, code, portfolio_scope, portfolio_code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to remove the portfolio from. (required)
:param str code: The code of the portfolio group to remove the portfolio from. Together with the scope this uniquely identifies the portfolio group. (required)
:param str portfolio_scope: The scope of the portfolio being removed from the portfolio group. (required)
:param str portfolio_code: The code of the portfolio being removed from the portfolio group. Together with the scope this uniquely identifies the portfolio to remove. (required)
:param datetime effective_at: The effective datetime from which the portfolio will be removed from the portfolio group.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PortfolioGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'portfolio_scope', 'portfolio_code', 'effective_at'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_portfolio_from_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_portfolio_from_group`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_portfolio_from_group`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_portfolio_from_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_portfolio_from_group`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_portfolio_from_group`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_portfolio_from_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
if 'portfolio_scope' in local_var_params:
path_params['portfolioScope'] = local_var_params['portfolio_scope'] # noqa: E501
if 'portfolio_code' in local_var_params:
path_params['portfolioCode'] = local_var_params['portfolio_code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/portfolios/{portfolioScope}/{portfolioCode}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_portfolio_group(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Delete portfolio group # noqa: E501
Delete a single portfolio group. A portfolio group can be deleted while it still contains portfolios or sub groups. In this case any portfolios or sub groups contained in this group will not be deleted, however they will no longer be grouped together by this portfolio group. The deletion will be valid from the portfolio group's creation datetime, ie. the portfolio group will no longer exist at any effective datetime from the asAt datetime of deletion. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_portfolio_group(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to delete. (required)
:param str code: The code of the portfolio group to delete. Together with the scope this uniquely identifies the portfolio group to delete. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: DeletedEntityResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_portfolio_group_with_http_info(scope, code, **kwargs) # noqa: E501
def delete_portfolio_group_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Delete portfolio group # noqa: E501
Delete a single portfolio group. A portfolio group can be deleted while it still contains portfolios or sub groups. In this case any portfolios or sub groups contained in this group will not be deleted, however they will no longer be grouped together by this portfolio group. The deletion will be valid from the portfolio group's creation datetime, ie. the portfolio group will no longer exist at any effective datetime from the asAt datetime of deletion. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_portfolio_group_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to delete. (required)
:param str code: The code of the portfolio group to delete. Together with the scope this uniquely identifies the portfolio group to delete. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(DeletedEntityResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_portfolio_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeletedEntityResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_sub_group_from_group(self, scope, code, subgroup_scope, subgroup_code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Delete sub group from group # noqa: E501
Remove a single portfolio group (sub group) from a portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_sub_group_from_group(scope, code, subgroup_scope, subgroup_code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to remove the sub group from. (required)
:param str code: The code of the portfolio group to remove the sub group from. Together with the scope this uniquely identifies the portfolio group. (required)
:param str subgroup_scope: The scope of the sub group to remove from the portfolio group. (required)
:param str subgroup_code: The code of the sub group to remove from the portfolio group. Together with the scope this uniquely identifies the sub group. (required)
:param datetime effective_at: The effective datetime from which the sub group will be removed from the portfolio group.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PortfolioGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_sub_group_from_group_with_http_info(scope, code, subgroup_scope, subgroup_code, **kwargs) # noqa: E501
def delete_sub_group_from_group_with_http_info(self, scope, code, subgroup_scope, subgroup_code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Delete sub group from group # noqa: E501
Remove a single portfolio group (sub group) from a portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_sub_group_from_group_with_http_info(scope, code, subgroup_scope, subgroup_code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to remove the sub group from. (required)
:param str code: The code of the portfolio group to remove the sub group from. Together with the scope this uniquely identifies the portfolio group. (required)
:param str subgroup_scope: The scope of the sub group to remove from the portfolio group. (required)
:param str subgroup_code: The code of the sub group to remove from the portfolio group. Together with the scope this uniquely identifies the sub group. (required)
:param datetime effective_at: The effective datetime from which the sub group will be removed from the portfolio group.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PortfolioGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'subgroup_scope', 'subgroup_code', 'effective_at'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_sub_group_from_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_sub_group_from_group`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_sub_group_from_group`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_sub_group_from_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_sub_group_from_group`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_sub_group_from_group`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_sub_group_from_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
if 'subgroup_scope' in local_var_params:
path_params['subgroupScope'] = local_var_params['subgroup_scope'] # noqa: E501
if 'subgroup_code' in local_var_params:
path_params['subgroupCode'] = local_var_params['subgroup_code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/subgroups/{subgroupScope}/{subgroupCode}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_group_properties(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get group properties # noqa: E501
List all the properties of a single portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_group_properties(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the group to list the properties for. (required)
:param str code: The code of the group to list the properties for. Together with the scope this uniquely identifies the group. (required)
:param str effective_at: The effective date time or cut label at which to list the group's properties. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt date time at which to list the group's properties. Defaults to return the latest version of each property if not specified.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PortfolioGroupProperties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_group_properties_with_http_info(scope, code, **kwargs) # noqa: E501
def get_group_properties_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get group properties # noqa: E501
List all the properties of a single portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_group_properties_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the group to list the properties for. (required)
:param str code: The code of the group to list the properties for. Together with the scope this uniquely identifies the group. (required)
:param str effective_at: The effective date time or cut label at which to list the group's properties. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt date time at which to list the group's properties. Defaults to return the latest version of each property if not specified.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PortfolioGroupProperties, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'effective_at', 'as_at'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_group_properties" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_group_properties`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_group_properties`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_group_properties`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `get_group_properties`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `get_group_properties`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_group_properties`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/properties', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioGroupProperties', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_holdings_for_portfolio_group(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get holdings for transaction portfolios in portfolio group # noqa: E501
Get the holdings of transaction portfolios in specified portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_holdings_for_portfolio_group(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group. (required)
:param str code: The code of the portfolio group. Together with the scope this uniquely identifies the portfolio group. (required)
:param str effective_at: The effective datetime or cut label at which to retrieve the holdings of transaction portfolios in the portfolio group. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to retrieve the holdings of transaction portfolios in the portfolio group. Defaults to return the latest version of the holdings if not specified.
:param str filter: Expression to filter the result set. Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param list[str] property_keys: A list of property keys from the \"Instrument\" or \"Holding\" domain to decorate onto the holdings. These take the format {domain}/{scope}/{code} e.g. \"Instrument/system/Name\" or \"Holding/system/Cost\".
:param bool by_taxlots: Whether or not to expand the holdings to return the underlying tax-lots. Defaults to False.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: VersionedResourceListOfPortfolioHolding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_holdings_for_portfolio_group_with_http_info(scope, code, **kwargs) # noqa: E501
def get_holdings_for_portfolio_group_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get holdings for transaction portfolios in portfolio group # noqa: E501
Get the holdings of transaction portfolios in specified portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_holdings_for_portfolio_group_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group. (required)
:param str code: The code of the portfolio group. Together with the scope this uniquely identifies the portfolio group. (required)
:param str effective_at: The effective datetime or cut label at which to retrieve the holdings of transaction portfolios in the portfolio group. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to retrieve the holdings of transaction portfolios in the portfolio group. Defaults to return the latest version of the holdings if not specified.
:param str filter: Expression to filter the result set. Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param list[str] property_keys: A list of property keys from the \"Instrument\" or \"Holding\" domain to decorate onto the holdings. These take the format {domain}/{scope}/{code} e.g. \"Instrument/system/Name\" or \"Holding/system/Cost\".
:param bool by_taxlots: Whether or not to expand the holdings to return the underlying tax-lots. Defaults to False.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(VersionedResourceListOfPortfolioHolding, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'effective_at', 'as_at', 'filter', 'property_keys', 'by_taxlots'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_holdings_for_portfolio_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_holdings_for_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_holdings_for_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_holdings_for_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `get_holdings_for_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `get_holdings_for_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_holdings_for_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'filter' in local_var_params:
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
if 'property_keys' in local_var_params:
query_params.append(('propertyKeys', local_var_params['property_keys'])) # noqa: E501
collection_formats['propertyKeys'] = 'multi' # noqa: E501
if 'by_taxlots' in local_var_params:
query_params.append(('byTaxlots', local_var_params['by_taxlots'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/holdings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VersionedResourceListOfPortfolioHolding', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_portfolio_group(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get portfolio group # noqa: E501
Retrieve the definition of a single portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_portfolio_group(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to retrieve the definition for. (required)
:param str code: The code of the portfolio group to retrieve the definition for. Together with the scope this uniquely identifies the portfolio group. (required)
:param datetime effective_at: The effective datetime at which to retrieve the portfolio group definition. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to retrieve the portfolio group definition. Defaults to return the latest version of the portfolio group definition if not specified.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PortfolioGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_portfolio_group_with_http_info(scope, code, **kwargs) # noqa: E501
def get_portfolio_group_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get portfolio group # noqa: E501
Retrieve the definition of a single portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_portfolio_group_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to retrieve the definition for. (required)
:param str code: The code of the portfolio group to retrieve the definition for. Together with the scope this uniquely identifies the portfolio group. (required)
:param datetime effective_at: The effective datetime at which to retrieve the portfolio group definition. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to retrieve the portfolio group definition. Defaults to return the latest version of the portfolio group definition if not specified.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PortfolioGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'effective_at', 'as_at'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_portfolio_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `get_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `get_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_portfolio_group_commands(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get portfolio group commands # noqa: E501
Gets all the commands that modified a single portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_portfolio_group_commands(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to retrieve the commands for. (required)
:param str code: The code of the portfolio group to retrieve the commands for. Together with the scope this uniquely identifies the portfolio group. (required)
:param datetime from_as_at: The lower bound asAt datetime (inclusive) from which to retrieve commands. There is no lower bound if this is not specified.
:param datetime to_as_at: The upper bound asAt datetime (inclusive) from which to retrieve commands. There is no upper bound if this is not specified.
:param str filter: Expression to filter the result set. For example, to filter on the User ID, use \"userId.id eq 'string'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ResourceListOfProcessedCommand
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_portfolio_group_commands_with_http_info(scope, code, **kwargs) # noqa: E501
def get_portfolio_group_commands_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get portfolio group commands # noqa: E501
Gets all the commands that modified a single portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_portfolio_group_commands_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to retrieve the commands for. (required)
:param str code: The code of the portfolio group to retrieve the commands for. Together with the scope this uniquely identifies the portfolio group. (required)
:param datetime from_as_at: The lower bound asAt datetime (inclusive) from which to retrieve commands. There is no lower bound if this is not specified.
:param datetime to_as_at: The upper bound asAt datetime (inclusive) from which to retrieve commands. There is no upper bound if this is not specified.
:param str filter: Expression to filter the result set. For example, to filter on the User ID, use \"userId.id eq 'string'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ResourceListOfProcessedCommand, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'from_as_at', 'to_as_at', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_portfolio_group_commands" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'from_as_at' in local_var_params:
query_params.append(('fromAsAt', local_var_params['from_as_at'])) # noqa: E501
if 'to_as_at' in local_var_params:
query_params.append(('toAsAt', local_var_params['to_as_at'])) # noqa: E501
if 'filter' in local_var_params:
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/commands', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceListOfProcessedCommand', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_portfolio_group_expansion(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get portfolio group expansion # noqa: E501
List all the portfolios in a group, including all portfolios within sub groups in the group. Each portfolio will be decorated with all of its properties unless a property filter is specified. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_portfolio_group_expansion(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to expand. (required)
:param str code: The code of the portfolio group to expand. Together with the scope this uniquely identifies the portfolio group to expand. (required)
:param datetime effective_at: The effective datetime at which to expand the portfolio group. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to expand the portfolio group. Defaults to return the latest version of each portfolio in the group if not specified.
:param list[str] property_filter: The restricted list of property keys from the \"Portfolio\" domain which will be decorated onto each portfolio. These take the format {domain}/{scope}/{code} e.g. \"Portfolio/Manager/Id\".
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ExpandedGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_portfolio_group_expansion_with_http_info(scope, code, **kwargs) # noqa: E501
def get_portfolio_group_expansion_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get portfolio group expansion # noqa: E501
List all the portfolios in a group, including all portfolios within sub groups in the group. Each portfolio will be decorated with all of its properties unless a property filter is specified. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_portfolio_group_expansion_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to expand. (required)
:param str code: The code of the portfolio group to expand. Together with the scope this uniquely identifies the portfolio group to expand. (required)
:param datetime effective_at: The effective datetime at which to expand the portfolio group. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to expand the portfolio group. Defaults to return the latest version of each portfolio in the group if not specified.
:param list[str] property_filter: The restricted list of property keys from the \"Portfolio\" domain which will be decorated onto each portfolio. These take the format {domain}/{scope}/{code} e.g. \"Portfolio/Manager/Id\".
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ExpandedGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'effective_at', 'as_at', 'property_filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_portfolio_group_expansion" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_portfolio_group_expansion`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_portfolio_group_expansion`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_portfolio_group_expansion`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `get_portfolio_group_expansion`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `get_portfolio_group_expansion`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_portfolio_group_expansion`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'property_filter' in local_var_params:
query_params.append(('propertyFilter', local_var_params['property_filter'])) # noqa: E501
collection_formats['propertyFilter'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/expansion', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ExpandedGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_portfolio_group_relations(self, scope, code, **kwargs): # noqa: E501
"""[DEPRECATED] Get Relations for Portfolio Group # noqa: E501
Get relations for the specified Portfolio Group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_portfolio_group_relations(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group. (required)
:param str code: The code of the portfolio group. Together with the scope this uniquely identifies the portfolio group. (required)
:param str effective_at: The effective datetime or cut label at which to retrieve relations. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to retrieve relations. Defaults to return the latest LUSID AsAt time if not specified.
:param str filter: Expression to filter the relations. Users should provide null or empty string for this field until further notice.
:param list[str] identifier_types: Identifiers types (as property keys) used for referencing Persons or Legal Entities. These take the format {domain}/{scope}/{code} e.g. \"Person/CompanyDetails/Role\". They must be from the \"Person\" or \"LegalEntity\" domain. Only identifier types stated will be used to look up relevant entities in relations. If not applicable, provide an empty array.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ResourceListOfRelation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_portfolio_group_relations_with_http_info(scope, code, **kwargs) # noqa: E501
def get_portfolio_group_relations_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[DEPRECATED] Get Relations for Portfolio Group # noqa: E501
Get relations for the specified Portfolio Group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_portfolio_group_relations_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group. (required)
:param str code: The code of the portfolio group. Together with the scope this uniquely identifies the portfolio group. (required)
:param str effective_at: The effective datetime or cut label at which to retrieve relations. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to retrieve relations. Defaults to return the latest LUSID AsAt time if not specified.
:param str filter: Expression to filter the relations. Users should provide null or empty string for this field until further notice.
:param list[str] identifier_types: Identifiers types (as property keys) used for referencing Persons or Legal Entities. These take the format {domain}/{scope}/{code} e.g. \"Person/CompanyDetails/Role\". They must be from the \"Person\" or \"LegalEntity\" domain. Only identifier types stated will be used to look up relevant entities in relations. If not applicable, provide an empty array.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ResourceListOfRelation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'effective_at', 'as_at', 'filter', 'identifier_types'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_portfolio_group_relations" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_portfolio_group_relations`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_portfolio_group_relations`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_portfolio_group_relations`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `get_portfolio_group_relations`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `get_portfolio_group_relations`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_portfolio_group_relations`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'filter' in local_var_params:
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
if 'identifier_types' in local_var_params:
query_params.append(('identifierTypes', local_var_params['identifier_types'])) # noqa: E501
collection_formats['identifierTypes'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/relations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceListOfRelation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_transactions_for_portfolio_group(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get transactions for transaction portfolios in a portfolio group # noqa: E501
Get transactions for transaction portfolios in a portfolio group over a given interval of effective time. When the specified portfolio in a portfolio group is a derived transaction portfolio, the returned set of transactions is the union set of all transactions of the parent (and any grandparents etc.) and the specified derived transaction portfolio itself. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transactions_for_portfolio_group(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group. (required)
:param str code: The code of the portfolio group. Together with the scope this uniquely identifies the portfolio group. (required)
:param str from_transaction_date: The lower bound effective datetime or cut label (inclusive) from which to retrieve the transactions. There is no lower bound if this is not specified.
:param str to_transaction_date: The upper bound effective datetime or cut label (inclusive) from which to retrieve transactions. There is no upper bound if this is not specified.
:param datetime as_at: The asAt datetime at which to retrieve the transactions. Defaults to return the latest version of each transaction if not specified.
:param str filter: Expression to filter the result set. For example, to filter on the Transaction Type, use \"type eq 'Buy'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param list[str] property_keys: A list of property keys from the \"Instrument\" or \"Transaction\" domain to decorate onto the transactions. These take the format {domain}/{scope}/{code} e.g. \"Instrument/system/Name\" or \"Transaction/strategy/quantsignal\".
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: VersionedResourceListOfTransaction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_transactions_for_portfolio_group_with_http_info(scope, code, **kwargs) # noqa: E501
def get_transactions_for_portfolio_group_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Get transactions for transaction portfolios in a portfolio group # noqa: E501
Get transactions for transaction portfolios in a portfolio group over a given interval of effective time. When the specified portfolio in a portfolio group is a derived transaction portfolio, the returned set of transactions is the union set of all transactions of the parent (and any grandparents etc.) and the specified derived transaction portfolio itself. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transactions_for_portfolio_group_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group. (required)
:param str code: The code of the portfolio group. Together with the scope this uniquely identifies the portfolio group. (required)
:param str from_transaction_date: The lower bound effective datetime or cut label (inclusive) from which to retrieve the transactions. There is no lower bound if this is not specified.
:param str to_transaction_date: The upper bound effective datetime or cut label (inclusive) from which to retrieve transactions. There is no upper bound if this is not specified.
:param datetime as_at: The asAt datetime at which to retrieve the transactions. Defaults to return the latest version of each transaction if not specified.
:param str filter: Expression to filter the result set. For example, to filter on the Transaction Type, use \"type eq 'Buy'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param list[str] property_keys: A list of property keys from the \"Instrument\" or \"Transaction\" domain to decorate onto the transactions. These take the format {domain}/{scope}/{code} e.g. \"Instrument/system/Name\" or \"Transaction/strategy/quantsignal\".
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(VersionedResourceListOfTransaction, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'from_transaction_date', 'to_transaction_date', 'as_at', 'filter', 'property_keys'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_transactions_for_portfolio_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_transactions_for_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_transactions_for_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_transactions_for_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `get_transactions_for_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `get_transactions_for_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_transactions_for_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'from_transaction_date' in local_var_params:
query_params.append(('fromTransactionDate', local_var_params['from_transaction_date'])) # noqa: E501
if 'to_transaction_date' in local_var_params:
query_params.append(('toTransactionDate', local_var_params['to_transaction_date'])) # noqa: E501
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'filter' in local_var_params:
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
if 'property_keys' in local_var_params:
query_params.append(('propertyKeys', local_var_params['property_keys'])) # noqa: E501
collection_formats['propertyKeys'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/transactions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VersionedResourceListOfTransaction', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_portfolio_groups(self, scope, **kwargs): # noqa: E501
"""[EARLY ACCESS] List portfolio groups # noqa: E501
List all the portfolio groups in a single scope. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_portfolio_groups(scope, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope to list the portfolio groups in. (required)
:param datetime effective_at: The effective datetime at which to list the portfolio groups. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to list the portfolio groups. Defaults to return the latest version of each portfolio group if not specified.
:param str filter: Expression to filter the result set. For example, to filter on the Display Name, use \"displayName eq 'string'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ResourceListOfPortfolioGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_portfolio_groups_with_http_info(scope, **kwargs) # noqa: E501
def list_portfolio_groups_with_http_info(self, scope, **kwargs): # noqa: E501
"""[EARLY ACCESS] List portfolio groups # noqa: E501
List all the portfolio groups in a single scope. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_portfolio_groups_with_http_info(scope, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope to list the portfolio groups in. (required)
:param datetime effective_at: The effective datetime at which to list the portfolio groups. Defaults to the current LUSID system datetime if not specified.
:param datetime as_at: The asAt datetime at which to list the portfolio groups. Defaults to return the latest version of each portfolio group if not specified.
:param str filter: Expression to filter the result set. For example, to filter on the Display Name, use \"displayName eq 'string'\" Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ResourceListOfPortfolioGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'effective_at', 'as_at', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_portfolio_groups" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'filter' in local_var_params:
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceListOfPortfolioGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_portfolio_group(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Update portfolio group # noqa: E501
Update the definition of a single portfolio group. Not all elements within a portfolio group definition are modifiable due to the potential implications for data already stored against the portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_portfolio_group(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to update the definition for. (required)
:param str code: The code of the portfolio group to update the definition for. Together with the scope this uniquely identifies the portfolio group. (required)
:param datetime effective_at: The effective datetime at which to update the definition.
:param UpdatePortfolioGroupRequest update_portfolio_group_request: The updated portfolio group definition.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PortfolioGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_portfolio_group_with_http_info(scope, code, **kwargs) # noqa: E501
def update_portfolio_group_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Update portfolio group # noqa: E501
Update the definition of a single portfolio group. Not all elements within a portfolio group definition are modifiable due to the potential implications for data already stored against the portfolio group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_portfolio_group_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the portfolio group to update the definition for. (required)
:param str code: The code of the portfolio group to update the definition for. Together with the scope this uniquely identifies the portfolio group. (required)
:param datetime effective_at: The effective datetime at which to update the definition.
:param UpdatePortfolioGroupRequest update_portfolio_group_request: The updated portfolio group definition.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PortfolioGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'effective_at', 'update_portfolio_group_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_portfolio_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `update_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `update_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `update_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `update_portfolio_group`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `update_portfolio_group`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `update_portfolio_group`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'effective_at' in local_var_params:
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_portfolio_group_request' in local_var_params:
body_params = local_var_params['update_portfolio_group_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def upsert_group_properties(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Upsert group properties # noqa: E501
Update or insert one or more properties onto a single group. A property will be updated if it already exists and inserted if it does not. All properties must be of the domain 'PortfolioGroup'. Properties have an <i>effectiveFrom</i> datetime for which the property is valid, and an <i>effectiveUntil</i> datetime until which the property is valid. Not supplying an <i>effectiveUntil</i> datetime results in the property being valid indefinitely, or until the next <i>effectiveFrom</i> datetime of the property. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upsert_group_properties(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the group to update or insert the properties onto. (required)
:param str code: The code of the group to update or insert the properties onto. Together with the scope this uniquely identifies the group. (required)
:param dict(str, ModelProperty) request_body: The properties to be updated or inserted onto the group. Each property in the request must be keyed by its unique property key. This has the format {domain}/{scope}/{code} e.g. \"PortfolioGroup/Manager/Id\".
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PortfolioGroupProperties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.upsert_group_properties_with_http_info(scope, code, **kwargs) # noqa: E501
def upsert_group_properties_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EARLY ACCESS] Upsert group properties # noqa: E501
Update or insert one or more properties onto a single group. A property will be updated if it already exists and inserted if it does not. All properties must be of the domain 'PortfolioGroup'. Properties have an <i>effectiveFrom</i> datetime for which the property is valid, and an <i>effectiveUntil</i> datetime until which the property is valid. Not supplying an <i>effectiveUntil</i> datetime results in the property being valid indefinitely, or until the next <i>effectiveFrom</i> datetime of the property. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upsert_group_properties_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: The scope of the group to update or insert the properties onto. (required)
:param str code: The code of the group to update or insert the properties onto. Together with the scope this uniquely identifies the group. (required)
:param dict(str, ModelProperty) request_body: The properties to be updated or inserted onto the group. Each property in the request must be keyed by its unique property key. This has the format {domain}/{scope}/{code} e.g. \"PortfolioGroup/Manager/Id\".
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PortfolioGroupProperties, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'request_body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method upsert_group_properties" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `upsert_group_properties`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `upsert_group_properties`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `upsert_group_properties`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `upsert_group_properties`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `upsert_group_properties`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `upsert_group_properties`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request_body' in local_var_params:
body_params = local_var_params['request_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.2808'
return self.api_client.call_api(
'/api/portfoliogroups/{scope}/{code}/properties/$upsert', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortfolioGroupProperties', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 61.451524
| 550
| 0.644892
| 19,297
| 155,288
| 4.994818
| 0.021869
| 0.043658
| 0.071173
| 0.028718
| 0.978067
| 0.975318
| 0.972848
| 0.968698
| 0.965181
| 0.961498
| 0
| 0.01585
| 0.273138
| 155,288
| 2,526
| 551
| 61.475851
| 0.838075
| 0.466314
| 0
| 0.7856
| 1
| 0.072
| 0.308479
| 0.087935
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0296
| false
| 0
| 0.004
| 0
| 0.0632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
db090c564c3149498b75a1c0932152d88c3d7046
| 182
|
py
|
Python
|
tests/test_classical/__init__.py
|
infinitemugen/genrl
|
602587417ce167380c90a726764a3efa4643dc38
|
[
"MIT"
] | null | null | null |
tests/test_classical/__init__.py
|
infinitemugen/genrl
|
602587417ce167380c90a726764a3efa4643dc38
|
[
"MIT"
] | null | null | null |
tests/test_classical/__init__.py
|
infinitemugen/genrl
|
602587417ce167380c90a726764a3efa4643dc38
|
[
"MIT"
] | null | null | null |
from tests.test_classical.test_common import TestTrainer, TestModels
from tests.test_classical.test_bandits import TestBandit
from tests.test_classical.test_agents import TestAgents
| 45.5
| 68
| 0.89011
| 25
| 182
| 6.24
| 0.48
| 0.173077
| 0.25
| 0.423077
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 182
| 3
| 69
| 60.666667
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c040f6ee38cd450de39f9b43f73efed874ded3e1
| 19,375
|
py
|
Python
|
pynos/versions/base/yang/brocade_fcoe.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/base/yang/brocade_fcoe.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/base/yang/brocade_fcoe.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_fcoe(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def fcoe_fsb_fcoe_fsb_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe_fsb = ET.SubElement(config, "fcoe-fsb", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fsb_enable = ET.SubElement(fcoe_fsb, "fcoe-fsb-enable")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fabric_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name.text = kwargs.pop('fcoe_fabric_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fabric_map_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fabric_map_vlan = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-vlan")
fcoe_fabric_map_vlan.text = kwargs.pop('fcoe_fabric_map_vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fabric_map_priority(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fabric_map_priority = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-priority")
fcoe_fabric_map_priority.text = kwargs.pop('fcoe_fabric_map_priority')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fabric_map_virtual_fabric(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fabric_map_virtual_fabric = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-virtual-fabric")
fcoe_fabric_map_virtual_fabric.text = kwargs.pop('fcoe_fabric_map_virtual_fabric')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fabric_map_fcmap(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fabric_map_fcmap = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-fcmap")
fcoe_fabric_map_fcmap.text = kwargs.pop('fcoe_fabric_map_fcmap')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fip_advertisement_fcoe_fip_advertisement_interval(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fip_advertisement = ET.SubElement(fcoe_fabric_map, "fcoe-fip-advertisement")
fcoe_fip_advertisement_interval = ET.SubElement(fcoe_fip_advertisement, "fcoe-fip-advertisement-interval")
fcoe_fip_advertisement_interval.text = kwargs.pop('fcoe_fip_advertisement_interval')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fip_keep_alive_fcoe_fip_keep_alive_timeout(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fip_keep_alive = ET.SubElement(fcoe_fabric_map, "fcoe-fip-keep-alive")
fcoe_fip_keep_alive_timeout = ET.SubElement(fcoe_fip_keep_alive, "fcoe-fip-keep-alive-timeout")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_map_fcoe_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_map = ET.SubElement(fcoe, "fcoe-map")
fcoe_map_name = ET.SubElement(fcoe_map, "fcoe-map-name")
fcoe_map_name.text = kwargs.pop('fcoe_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_map_fcoe_map_fabric_map_fcoe_map_fabric_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_map = ET.SubElement(fcoe, "fcoe-map")
fcoe_map_name_key = ET.SubElement(fcoe_map, "fcoe-map-name")
fcoe_map_name_key.text = kwargs.pop('fcoe_map_name')
fcoe_map_fabric_map = ET.SubElement(fcoe_map, "fcoe-map-fabric-map")
fcoe_map_fabric_map_name = ET.SubElement(fcoe_map_fabric_map, "fcoe-map-fabric-map-name")
fcoe_map_fabric_map_name.text = kwargs.pop('fcoe_map_fabric_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_map_fcoe_map_cee_map_fcoe_map_cee_map_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_map = ET.SubElement(fcoe, "fcoe-map")
fcoe_map_name_key = ET.SubElement(fcoe_map, "fcoe-map-name")
fcoe_map_name_key.text = kwargs.pop('fcoe_map_name')
fcoe_map_cee_map = ET.SubElement(fcoe_map, "fcoe-map-cee-map")
fcoe_map_cee_map_leaf = ET.SubElement(fcoe_map_cee_map, "fcoe-map-cee-map-leaf")
fcoe_map_cee_map_leaf.text = kwargs.pop('fcoe_map_cee_map_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fcf_map_fcf_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fcf_map = ET.SubElement(fcoe, "fcoe-fcf-map")
fcf_map_name = ET.SubElement(fcoe_fcf_map, "fcf-map-name")
fcf_map_name.text = kwargs.pop('fcf_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fcf_map_fcf_map_fcoe_map_fcf_map_fcoe_map_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fcf_map = ET.SubElement(fcoe, "fcoe-fcf-map")
fcf_map_name_key = ET.SubElement(fcoe_fcf_map, "fcf-map-name")
fcf_map_name_key.text = kwargs.pop('fcf_map_name')
fcf_map_fcoe_map = ET.SubElement(fcoe_fcf_map, "fcf-map-fcoe-map")
fcf_map_fcoe_map_leaf = ET.SubElement(fcf_map_fcoe_map, "fcf-map-fcoe-map-leaf")
fcf_map_fcoe_map_leaf.text = kwargs.pop('fcf_map_fcoe_map_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fcf_map_fcf_map_ag_rbid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fcf_map = ET.SubElement(fcoe, "fcoe-fcf-map")
fcf_map_name_key = ET.SubElement(fcoe_fcf_map, "fcf-map-name")
fcf_map_name_key.text = kwargs.pop('fcf_map_name')
fcf_map_ag_rbid = ET.SubElement(fcoe_fcf_map, "fcf-map-ag-rbid")
fcf_map_ag_rbid.text = kwargs.pop('fcf_map_ag_rbid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fsb_fcoe_fsb_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe_fsb = ET.SubElement(config, "fcoe-fsb", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fsb_enable = ET.SubElement(fcoe_fsb, "fcoe-fsb-enable")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fabric_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name.text = kwargs.pop('fcoe_fabric_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fabric_map_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fabric_map_vlan = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-vlan")
fcoe_fabric_map_vlan.text = kwargs.pop('fcoe_fabric_map_vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fabric_map_priority(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fabric_map_priority = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-priority")
fcoe_fabric_map_priority.text = kwargs.pop('fcoe_fabric_map_priority')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fabric_map_virtual_fabric(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fabric_map_virtual_fabric = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-virtual-fabric")
fcoe_fabric_map_virtual_fabric.text = kwargs.pop('fcoe_fabric_map_virtual_fabric')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fabric_map_fcmap(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fabric_map_fcmap = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-fcmap")
fcoe_fabric_map_fcmap.text = kwargs.pop('fcoe_fabric_map_fcmap')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fip_advertisement_fcoe_fip_advertisement_interval(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fip_advertisement = ET.SubElement(fcoe_fabric_map, "fcoe-fip-advertisement")
fcoe_fip_advertisement_interval = ET.SubElement(fcoe_fip_advertisement, "fcoe-fip-advertisement-interval")
fcoe_fip_advertisement_interval.text = kwargs.pop('fcoe_fip_advertisement_interval')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fabric_map_fcoe_fip_keep_alive_fcoe_fip_keep_alive_timeout(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fabric_map = ET.SubElement(fcoe, "fcoe-fabric-map")
fcoe_fabric_map_name_key = ET.SubElement(fcoe_fabric_map, "fcoe-fabric-map-name")
fcoe_fabric_map_name_key.text = kwargs.pop('fcoe_fabric_map_name')
fcoe_fip_keep_alive = ET.SubElement(fcoe_fabric_map, "fcoe-fip-keep-alive")
fcoe_fip_keep_alive_timeout = ET.SubElement(fcoe_fip_keep_alive, "fcoe-fip-keep-alive-timeout")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_map_fcoe_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_map = ET.SubElement(fcoe, "fcoe-map")
fcoe_map_name = ET.SubElement(fcoe_map, "fcoe-map-name")
fcoe_map_name.text = kwargs.pop('fcoe_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_map_fcoe_map_fabric_map_fcoe_map_fabric_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_map = ET.SubElement(fcoe, "fcoe-map")
fcoe_map_name_key = ET.SubElement(fcoe_map, "fcoe-map-name")
fcoe_map_name_key.text = kwargs.pop('fcoe_map_name')
fcoe_map_fabric_map = ET.SubElement(fcoe_map, "fcoe-map-fabric-map")
fcoe_map_fabric_map_name = ET.SubElement(fcoe_map_fabric_map, "fcoe-map-fabric-map-name")
fcoe_map_fabric_map_name.text = kwargs.pop('fcoe_map_fabric_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_map_fcoe_map_cee_map_fcoe_map_cee_map_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_map = ET.SubElement(fcoe, "fcoe-map")
fcoe_map_name_key = ET.SubElement(fcoe_map, "fcoe-map-name")
fcoe_map_name_key.text = kwargs.pop('fcoe_map_name')
fcoe_map_cee_map = ET.SubElement(fcoe_map, "fcoe-map-cee-map")
fcoe_map_cee_map_leaf = ET.SubElement(fcoe_map_cee_map, "fcoe-map-cee-map-leaf")
fcoe_map_cee_map_leaf.text = kwargs.pop('fcoe_map_cee_map_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fcf_map_fcf_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fcf_map = ET.SubElement(fcoe, "fcoe-fcf-map")
fcf_map_name = ET.SubElement(fcoe_fcf_map, "fcf-map-name")
fcf_map_name.text = kwargs.pop('fcf_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fcf_map_fcf_map_fcoe_map_fcf_map_fcoe_map_leaf(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fcf_map = ET.SubElement(fcoe, "fcoe-fcf-map")
fcf_map_name_key = ET.SubElement(fcoe_fcf_map, "fcf-map-name")
fcf_map_name_key.text = kwargs.pop('fcf_map_name')
fcf_map_fcoe_map = ET.SubElement(fcoe_fcf_map, "fcf-map-fcoe-map")
fcf_map_fcoe_map_leaf = ET.SubElement(fcf_map_fcoe_map, "fcf-map-fcoe-map-leaf")
fcf_map_fcoe_map_leaf.text = kwargs.pop('fcf_map_fcoe_map_leaf')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def fcoe_fcoe_fcf_map_fcf_map_ag_rbid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_fcf_map = ET.SubElement(fcoe, "fcoe-fcf-map")
fcf_map_name_key = ET.SubElement(fcoe_fcf_map, "fcf-map-name")
fcf_map_name_key.text = kwargs.pop('fcf_map_name')
fcf_map_ag_rbid = ET.SubElement(fcoe_fcf_map, "fcf-map-ag-rbid")
fcf_map_ag_rbid.text = kwargs.pop('fcf_map_ag_rbid')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 49.55243
| 114
| 0.677781
| 2,630
| 19,375
| 4.656274
| 0.020152
| 0.135228
| 0.176221
| 0.080516
| 0.989384
| 0.989384
| 0.989384
| 0.989384
| 0.989384
| 0.989384
| 0
| 0
| 0.19969
| 19,375
| 391
| 115
| 49.55243
| 0.789759
| 0.044335
| 0
| 0.985294
| 1
| 0
| 0.203037
| 0.093081
| 0
| 0
| 0
| 0
| 0
| 1
| 0.106618
| false
| 0
| 0.003676
| 0
| 0.216912
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c0567dc47314bcaa4a712ac3fe3a0180b2592eb0
| 799
|
py
|
Python
|
tests/test_commandline_history.py
|
sameerkhurana10/Montreal-Forced-Aligner
|
b64b6bf4aa0394d59236510a05453740e7e7ebf5
|
[
"MIT"
] | null | null | null |
tests/test_commandline_history.py
|
sameerkhurana10/Montreal-Forced-Aligner
|
b64b6bf4aa0394d59236510a05453740e7e7ebf5
|
[
"MIT"
] | null | null | null |
tests/test_commandline_history.py
|
sameerkhurana10/Montreal-Forced-Aligner
|
b64b6bf4aa0394d59236510a05453740e7e7ebf5
|
[
"MIT"
] | null | null | null |
from montreal_forced_aligner.command_line.mfa import parser, print_history
def test_mfa_history(
multilingual_ipa_tg_corpus_dir, english_ipa_acoustic_model, english_us_ipa_dictionary, temp_dir
):
command = ["history", "--depth", "60"]
args, unknown = parser.parse_known_args(command)
print_history(args)
command = ["history"]
args, unknown = parser.parse_known_args(command)
print_history(args)
def test_mfa_history_verbose(
multilingual_ipa_tg_corpus_dir, english_ipa_acoustic_model, english_us_ipa_dictionary, temp_dir
):
command = ["history", "-v", "--depth", "60"]
args, unknown = parser.parse_known_args(command)
print_history(args)
command = ["history", "-v"]
args, unknown = parser.parse_known_args(command)
print_history(args)
| 28.535714
| 99
| 0.739675
| 104
| 799
| 5.269231
| 0.307692
| 0.120438
| 0.124088
| 0.160584
| 0.806569
| 0.806569
| 0.806569
| 0.806569
| 0.806569
| 0.806569
| 0
| 0.0059
| 0.151439
| 799
| 27
| 100
| 29.592593
| 0.80236
| 0
| 0
| 0.631579
| 0
| 0
| 0.062578
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.052632
| 0
| 0.157895
| 0.263158
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c05c0f0212eb35a5145d50c8092904defe4d0772
| 3,580
|
py
|
Python
|
app/forms.py
|
SeulYoung/ContractManagement
|
4a5ae774c340df6718aaa9af8b9f291924fb8a33
|
[
"BSD-3-Clause"
] | 2
|
2018-12-11T02:51:51.000Z
|
2020-11-17T13:42:18.000Z
|
app/forms.py
|
SeulYoung/ContractManagement
|
4a5ae774c340df6718aaa9af8b9f291924fb8a33
|
[
"BSD-3-Clause"
] | 6
|
2020-02-11T23:29:03.000Z
|
2022-02-10T10:27:38.000Z
|
app/forms.py
|
SeulYoung/ContractManagement
|
4a5ae774c340df6718aaa9af8b9f291924fb8a33
|
[
"BSD-3-Clause"
] | 1
|
2019-07-12T09:07:42.000Z
|
2019-07-12T09:07:42.000Z
|
from django import forms
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db.models import Q
class RegistrationForm(forms.Form):
username = forms.CharField(label='Username', max_length=40)
email = forms.EmailField(label='Email')
password1 = forms.CharField(label='Password1', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password2', widget=forms.PasswordInput)
def clean_username(self):
username = self.cleaned_data.get('username')
if len(username) < 4:
raise forms.ValidationError("用户名最短4个字符")
elif len(username) > 50:
raise forms.ValidationError("用户名最长50个字符")
else:
filter_result = User.objects.filter(username=username)
if len(filter_result) > 0:
raise forms.ValidationError("用户名已存在")
return username
def clean_email(self):
try:
email = self.cleaned_data.get('email')
except ValidationError:
raise forms.ValidationError("邮箱格式错误")
filter_result = User.objects.filter(email=email)
if len(filter_result) > 0:
raise forms.ValidationError("邮箱已被注册")
return email
def clean_password1(self):
password1 = self.cleaned_data.get('password1')
if len(password1) < 4:
raise forms.ValidationError("密码最短6个字符")
elif len(password1) > 20:
raise forms.ValidationError("密码最长20个字符")
return password1
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2 and password1 != password2:
raise forms.ValidationError("两次密码不一致")
return password2
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=40)
password = forms.CharField(label='Password', widget=forms.PasswordInput)
def clean_username(self):
username = self.cleaned_data.get('username')
filter_result = User.objects.filter(Q(username=username) | Q(email=username))
if not filter_result:
raise forms.ValidationError("用户不存在")
return username
class EmailForm(forms.Form):
email = forms.EmailField(label='Email')
password = forms.CharField(label='Password', widget=forms.PasswordInput)
def clean_email(self):
try:
email = self.cleaned_data.get('email')
except ValidationError:
raise forms.ValidationError("邮箱格式错误")
filter_result = User.objects.filter(email=email)
if len(filter_result) > 0:
raise forms.ValidationError("邮箱已被注册")
return email
class PasswordForm(forms.Form):
old_password = forms.CharField(label='OldPassword', widget=forms.PasswordInput)
password1 = forms.CharField(label='Password1', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password2', widget=forms.PasswordInput)
def clean_password1(self):
password1 = self.cleaned_data.get('password1')
if len(password1) < 6:
raise forms.ValidationError("密码最短6个字符")
elif len(password1) > 20:
raise forms.ValidationError("密码最长20个字符")
return password1
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2 and password1 != password2:
raise forms.ValidationError("两次密码不一致")
return password2
| 36.907216
| 85
| 0.667318
| 382
| 3,580
| 6.17801
| 0.175393
| 0.059322
| 0.148305
| 0.076271
| 0.783475
| 0.733475
| 0.733475
| 0.733475
| 0.715254
| 0.715254
| 0
| 0.023585
| 0.230168
| 3,580
| 96
| 86
| 37.291667
| 0.832729
| 0
| 0
| 0.7375
| 0
| 0
| 0.075698
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0.35
| 0.05
| 0
| 0.4375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
fbfeda7759d4eae6c6cbe29577b1a67f0ef08511
| 4,055
|
py
|
Python
|
resources/migrations/0091_add_two_permissions.py
|
mskoskel/respa
|
b1e148021b36e7153b9da60cff4a4a1167d23c90
|
[
"MIT"
] | 49
|
2015-10-21T06:25:31.000Z
|
2022-03-20T07:24:20.000Z
|
resources/migrations/0091_add_two_permissions.py
|
mskoskel/respa
|
b1e148021b36e7153b9da60cff4a4a1167d23c90
|
[
"MIT"
] | 728
|
2015-06-24T13:26:54.000Z
|
2022-03-24T12:18:41.000Z
|
resources/migrations/0091_add_two_permissions.py
|
digipointtku/respa
|
a529e0df4d3f072df7801adb5bf97a5f4abd1243
|
[
"MIT"
] | 46
|
2015-06-26T10:52:57.000Z
|
2021-12-17T09:38:25.000Z
|
# Generated by Django 2.2.10 on 2020-03-25 10:40
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('resources', '0090_more_changes_to_resource_price_fields'),
]
operations = [
migrations.AlterModelOptions(
name='resourcegroup',
options={'ordering': ('name',), 'permissions': [('group:can_approve_reservation', 'Can approve reservation'), ('group:can_make_reservations', 'Can make reservations'), ('group:can_modify_reservations', 'Can modify reservations'), ('group:can_ignore_opening_hours', 'Can make reservations outside opening hours'), ('group:can_view_reservation_access_code', 'Can view reservation access code'), ('group:can_view_reservation_extra_fields', 'Can view reservation extra fields'), ('group:can_view_reservation_user', 'Can view reservation user'), ('group:can_access_reservation_comments', 'Can access reservation comments'), ('group:can_comment_reservations', 'Can create comments for a reservation'), ('group:can_view_reservation_catering_orders', 'Can view reservation catering orders'), ('group:can_modify_reservation_catering_orders', 'Can modify reservation catering orders'), ('group:can_view_reservation_product_orders', 'Can view reservation product orders'), ('group:can_modify_paid_reservations', 'Can modify paid reservations'), ('group:can_bypass_payment', 'Can bypass payment for paid reservations'), ('group:can_create_staff_event', 'Can create a reservation that is a staff event'), ('group:can_create_special_type_reservation', 'Can create reservations of a non-normal type'), ('group:can_bypass_manual_confirmation', 'Can bypass manual confirmation requirement for resources'), ('group:can_create_reservations_for_other_users', 'Can create reservations for other registered users'), ('group:can_create_overlapping_reservations', 'Can create overlapping reservations'), ('group:can_ignore_max_reservations_per_user', 'Can ignore resources max reservations per user rule'), ('group:can_ignore_max_period', 'Can ignore resources max period rule')], 'verbose_name': 'Resource group', 'verbose_name_plural': 'Resource groups'},
),
migrations.AlterModelOptions(
name='unit',
options={'ordering': ('name',), 'permissions': [('unit:can_approve_reservation', 'Can approve reservation'), ('unit:can_make_reservations', 'Can make reservations'), ('unit:can_modify_reservations', 'Can modify reservations'), ('unit:can_ignore_opening_hours', 'Can make reservations outside opening hours'), ('unit:can_view_reservation_access_code', 'Can view reservation access code'), ('unit:can_view_reservation_extra_fields', 'Can view reservation extra fields'), ('unit:can_view_reservation_user', 'Can view reservation user'), ('unit:can_access_reservation_comments', 'Can access reservation comments'), ('unit:can_comment_reservations', 'Can create comments for a reservation'), ('unit:can_view_reservation_catering_orders', 'Can view reservation catering orders'), ('unit:can_modify_reservation_catering_orders', 'Can modify reservation catering orders'), ('unit:can_view_reservation_product_orders', 'Can view reservation product orders'), ('unit:can_modify_paid_reservations', 'Can modify paid reservations'), ('unit:can_bypass_payment', 'Can bypass payment for paid reservations'), ('unit:can_create_staff_event', 'Can create a reservation that is a staff event'), ('unit:can_create_special_type_reservation', 'Can create reservations of a non-normal type'), ('unit:can_bypass_manual_confirmation', 'Can bypass manual confirmation requirement for resources'), ('unit:can_create_reservations_for_other_users', 'Can create reservations for other registered users'), ('unit:can_create_overlapping_reservations', 'Can create overlapping reservations'), ('unit:can_ignore_max_reservations_per_user', 'Can ignore resources max reservations per user rule'), ('unit:can_ignore_max_period', 'Can ignore resources max period rule')], 'verbose_name': 'unit', 'verbose_name_plural': 'units'},
),
]
| 184.318182
| 1,845
| 0.778052
| 509
| 4,055
| 5.907662
| 0.165029
| 0.05587
| 0.119721
| 0.038244
| 0.819421
| 0.813435
| 0.732291
| 0.732291
| 0.589957
| 0.52012
| 0
| 0.005504
| 0.103822
| 4,055
| 21
| 1,846
| 193.095238
| 0.821959
| 0.011344
| 0
| 0.266667
| 1
| 0
| 0.795857
| 0.372099
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.133333
| 0.066667
| 0
| 0.266667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
220ddd7b2680d607a7991a6c8b74cf2908d31a24
| 362
|
py
|
Python
|
product_management_models/product_supplies/views.py
|
reimibeta/django-product-management-models
|
f51e94cc6ae605ea21706ffe2baedc53b980112f
|
[
"Apache-2.0"
] | null | null | null |
product_management_models/product_supplies/views.py
|
reimibeta/django-product-management-models
|
f51e94cc6ae605ea21706ffe2baedc53b980112f
|
[
"Apache-2.0"
] | null | null | null |
product_management_models/product_supplies/views.py
|
reimibeta/django-product-management-models
|
f51e94cc6ae605ea21706ffe2baedc53b980112f
|
[
"Apache-2.0"
] | null | null | null |
# product supply
from product_management_models.product_supplies.class_views.product_supply_view_set import *
# product supply stock
from product_management_models.product_supplies.class_views.product_supply_stock_view_set import *
# product supply delivery
from product_management_models.product_supplies.class_views.product_supply_delivery_view_set import *
| 40.222222
| 101
| 0.889503
| 49
| 362
| 6.102041
| 0.265306
| 0.26087
| 0.210702
| 0.270903
| 0.826087
| 0.652174
| 0.652174
| 0.652174
| 0.652174
| 0.652174
| 0
| 0
| 0.069061
| 362
| 8
| 102
| 45.25
| 0.88724
| 0.162983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
224e52df1f90b29102d137741bcfefaf8b203f92
| 142
|
py
|
Python
|
Oefeningen/standalone/functions_8.py
|
Seviran/Python_3
|
e30ead250129d25bbc0a7ee2f6298775b2f4529a
|
[
"MIT"
] | null | null | null |
Oefeningen/standalone/functions_8.py
|
Seviran/Python_3
|
e30ead250129d25bbc0a7ee2f6298775b2f4529a
|
[
"MIT"
] | null | null | null |
Oefeningen/standalone/functions_8.py
|
Seviran/Python_3
|
e30ead250129d25bbc0a7ee2f6298775b2f4529a
|
[
"MIT"
] | null | null | null |
def multiple_letter_count(string):
return {letter: string.count(letter) for letter in string}
print(multiple_letter_count('awesome'))
| 20.285714
| 62
| 0.767606
| 19
| 142
| 5.526316
| 0.526316
| 0.266667
| 0.361905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126761
| 142
| 7
| 63
| 20.285714
| 0.846774
| 0
| 0
| 0
| 0
| 0
| 0.049296
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
224fa58f5d04d462331d8cccd967419c7638905c
| 23,156
|
py
|
Python
|
cronus/tests/test_cronus.py
|
ryanmwhitephd/cronus
|
7b0b995977789540e77d9a672a2ac40d7420938a
|
[
"Apache-2.0"
] | null | null | null |
cronus/tests/test_cronus.py
|
ryanmwhitephd/cronus
|
7b0b995977789540e77d9a672a2ac40d7420938a
|
[
"Apache-2.0"
] | 1
|
2020-05-22T15:47:01.000Z
|
2020-05-29T18:22:31.000Z
|
cronus/tests/test_cronus.py
|
ryanmwhitephd/cronus
|
7b0b995977789540e77d9a672a2ac40d7420938a
|
[
"Apache-2.0"
] | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © Her Majesty the Queen in Right of Canada, as represented
# by the Minister of Statistics Canada, 2019.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test Class for Artemis MetaStore
"""
import unittest
import logging
import tempfile
import os, shutil
from pathlib import Path
import pyarrow as pa
from cronus.core.cronus import BaseObjectStore, JobBuilder
from artemis_format.pymodels.cronus_pb2 import (
CronusStore,
CronusObjectStore,
CronusObject,
)
from artemis_format.pymodels.cronus_pb2 import (
DummyMessage,
FileObjectInfo,
MenuObjectInfo,
ConfigObjectInfo,
DatasetObjectInfo,
)
from artemis_format.pymodels.menu_pb2 import Menu as Menu_pb
from artemis_format.pymodels.configuration_pb2 import Configuration
import uuid
logging.getLogger().setLevel(logging.INFO)
class CronusTestCase(unittest.TestCase):
def setUp(self):
print("================================================")
print("Beginning new TestCase %s" % self._testMethodName)
print("================================================")
pass
def tearDown(self):
pass
def test_menu(self):
testmenu = Menu_pb()
print(type(testmenu))
print(testmenu)
testmenu.uuid = str(uuid.uuid4())
testmenu.name = f"{testmenu.uuid}.menu.dat"
menuinfo = MenuObjectInfo()
menuinfo.created.GetCurrentTime()
bufmenu = pa.py_buffer(testmenu.SerializeToString())
with tempfile.TemporaryDirectory() as dirpath:
_path = dirpath + "/test"
store = BaseObjectStore(
str(_path), "test"
) # wrapper to the CronusStore message
menu_uuid = store.register_content(testmenu, menuinfo).uuid
store.put(menu_uuid, testmenu)
amenu = Menu_pb()
store.get(menu_uuid, amenu)
self.assertEqual(testmenu.name, amenu.name)
self.assertEqual(testmenu.uuid, amenu.uuid)
def test_config(self):
myconfig = Configuration()
myconfig.uuid = str(uuid.uuid4())
myconfig.name = f"{myconfig.uuid}.config.dat"
configinfo = ConfigObjectInfo()
configinfo.created.GetCurrentTime()
bufconfig = pa.py_buffer(myconfig.SerializeToString())
with tempfile.TemporaryDirectory() as dirpath:
_path = dirpath + "/test"
store = BaseObjectStore(
str(_path), "test"
) # wrapper to the CronusStore message
config_uuid = store.register_content(myconfig, configinfo).uuid
store.put(config_uuid, myconfig)
aconfig = Configuration()
store.get(config_uuid, aconfig)
self.assertEqual(myconfig.name, aconfig.name)
self.assertEqual(myconfig.uuid, aconfig.uuid)
def test_arrow(self):
data = [
pa.array([1, 2, 3, 4]),
pa.array(["foo", "bar", "baz", None]),
pa.array([True, None, False, True]),
]
batch = pa.RecordBatch.from_arrays(data, ["f0", "f1", "f2"])
sink = pa.BufferOutputStream()
writer = pa.RecordBatchFileWriter(sink, batch.schema)
for i in range(10):
writer.write_batch(batch)
writer.close()
buf = sink.getvalue()
mymsg = DummyMessage()
mymsg.name = "dummy"
mymsg.description = "really dumb"
mymenu = Menu_pb()
mymenu.uuid = str(uuid.uuid4())
mymenu.name = f"{mymenu.uuid}.menu.dat"
menuinfo = MenuObjectInfo()
menuinfo.created.GetCurrentTime()
bufmenu = pa.py_buffer(mymenu.SerializeToString())
myconfig = Configuration()
myconfig.uuid = str(uuid.uuid4())
myconfig.name = f"{myconfig.uuid}.config.dat"
configinfo = ConfigObjectInfo()
configinfo.created.GetCurrentTime()
bufconfig = pa.py_buffer(myconfig.SerializeToString())
with tempfile.TemporaryDirectory() as dirpath:
_path = dirpath + "/test"
store = BaseObjectStore(
str(_path), "test"
) # wrapper to the CronusStore message
fileinfo = FileObjectInfo()
fileinfo.type = 5
fileinfo.aux.description = "Some dummy data"
menu_uuid = store.register_content(mymenu, menuinfo).uuid
config_uuid = store.register_content(myconfig, configinfo).uuid
print(menu_uuid)
print(config_uuid)
dataset = store.register_dataset(menu_uuid, config_uuid)
store.new_partition(dataset.uuid, "key")
job_id = store.new_job(dataset.uuid)
id_ = store.register_content(
buf, fileinfo, dataset_id=dataset.uuid, job_id=0, partition_key="key"
).uuid
print(store[id_].address)
store.put(id_, buf)
for item in Path(_path).iterdir():
print(item)
buf = pa.py_buffer(store.get(id_))
reader = pa.ipc.open_file(buf)
self.assertEqual(reader.num_record_batches, 10)
reader = store.open(id_)
self.assertEqual(reader.num_record_batches, 10)
def test_register_object(self):
data = [
pa.array([1, 2, 3, 4]),
pa.array(["foo", "bar", "baz", None]),
pa.array([True, None, False, True]),
]
batch = pa.RecordBatch.from_arrays(data, ["f0", "f1", "f2"])
sink = pa.BufferOutputStream()
writer = pa.RecordBatchFileWriter(sink, batch.schema)
for i in range(10):
writer.write_batch(batch)
writer.close()
buf = sink.getvalue()
mymsg = DummyMessage()
mymsg.name = "dummy"
mymsg.description = "really dumb"
mymenu = CronusObject()
mymenu.name = "menu"
menuinfo = MenuObjectInfo()
menuinfo.created.GetCurrentTime()
bufmenu = pa.py_buffer(mymenu.SerializeToString())
myconfig = Configuration()
myconfig.uuid = str(uuid.uuid4())
myconfig.name = f"{myconfig.uuid}.config.dat"
configinfo = ConfigObjectInfo()
configinfo.created.GetCurrentTime()
bufconfig = pa.py_buffer(myconfig.SerializeToString())
with tempfile.TemporaryDirectory() as dirpath:
_path = dirpath + "/test"
store = BaseObjectStore(
str(_path), "test"
) # wrapper to the CronusStore message
fileinfo = FileObjectInfo()
fileinfo.type = 5
fileinfo.aux.description = "Some dummy data"
menu_uuid = store.register_content(mymenu, menuinfo).uuid
config_uuid = store.register_content(myconfig, configinfo).uuid
dataset = store.register_dataset(menu_uuid, config_uuid)
store.new_partition(dataset.uuid, "key")
path = dirpath + "/test/dummy.arrow"
with pa.OSFile(str(path), "wb") as f:
f.write(sink.getvalue())
id_ = store.register_content(
path, fileinfo, dataset_id=dataset.uuid, partition_key="key"
).uuid
print(store[id_].address)
buf = pa.py_buffer(store.get(id_))
reader = pa.ipc.open_file(buf)
self.assertEqual(reader.num_record_batches, 10)
def test_identical_files(self):
print("Testing add file from path")
data = [
pa.array([1, 2, 3, 4]),
pa.array(["foo", "bar", "baz", None]),
pa.array([True, None, False, True]),
]
batch = pa.RecordBatch.from_arrays(data, ["f0", "f1", "f2"])
sink = pa.BufferOutputStream()
writer = pa.RecordBatchFileWriter(sink, batch.schema)
for i in range(10):
writer.write_batch(batch)
writer.close()
buf = sink.getvalue()
mymsg = DummyMessage()
mymsg.name = "dummy"
mymsg.description = "really dumb"
mymenu = Menu_pb()
mymenu.uuid = str(uuid.uuid4())
mymenu.name = f"{mymenu.uuid}.menu.dat"
menuinfo = MenuObjectInfo()
menuinfo.created.GetCurrentTime()
bufmenu = pa.py_buffer(mymenu.SerializeToString())
myconfig = Configuration()
myconfig.uuid = str(uuid.uuid4())
myconfig.name = f"{myconfig.uuid}.config.dat"
configinfo = ConfigObjectInfo()
configinfo.created.GetCurrentTime()
bufconfig = pa.py_buffer(myconfig.SerializeToString())
with tempfile.TemporaryDirectory() as dirpath:
_path = dirpath + "/test"
store = BaseObjectStore(
str(_path), "test"
) # wrapper to the CronusStore message
fileinfo = FileObjectInfo()
fileinfo.type = 5
fileinfo.aux.description = "Some dummy data"
menu_uuid = store.register_content(mymenu, menuinfo).uuid
config_uuid = store.register_content(myconfig, configinfo).uuid
dataset = store.register_dataset(menu_uuid, config_uuid)
store.new_partition(dataset.uuid, "key")
path = dirpath + "/test/dummy.arrow"
with pa.OSFile(str(path), "wb") as f:
f.write(sink.getvalue())
id_ = store.register_content(
path, fileinfo, dataset_id=dataset.uuid, partition_key="key"
).uuid
print(id_, store[id_].address)
buf = pa.py_buffer(store._get_object(id_))
reader = pa.ipc.open_file(buf)
self.assertEqual(reader.num_record_batches, 10)
path = dirpath + "/test/dummy2.arrow"
with pa.OSFile(str(path), "wb") as f:
f.write(sink.getvalue())
id_ = store.register_content(
path, fileinfo, dataset_id=dataset.uuid, partition_key="key"
).uuid
print(id_, store[id_].address)
buf = pa.py_buffer(store.get(id_))
reader = pa.ipc.open_file(buf)
self.assertEqual(reader.num_record_batches, 10)
print("Test Done ===========================")
def test_dir_glob(self):
print("Testing directory globbing")
data = [
pa.array([1, 2, 3, 4]),
pa.array(["foo", "bar", "baz", None]),
pa.array([True, None, False, True]),
]
batch = pa.RecordBatch.from_arrays(data, ["f0", "f1", "f2"])
sink = pa.BufferOutputStream()
writer = pa.RecordBatchFileWriter(sink, batch.schema)
mymenu = Menu_pb()
mymenu.uuid = str(uuid.uuid4())
mymenu.name = f"{mymenu.uuid}.menu.dat"
menuinfo = MenuObjectInfo()
menuinfo.created.GetCurrentTime()
bufmenu = pa.py_buffer(mymenu.SerializeToString())
myconfig = Configuration()
myconfig.uuid = str(uuid.uuid4())
myconfig.name = f"{myconfig.uuid}.config.dat"
configinfo = ConfigObjectInfo()
configinfo.created.GetCurrentTime()
bufconfig = pa.py_buffer(myconfig.SerializeToString())
for i in range(10):
writer.write_batch(batch)
writer.close()
buf = sink.getvalue()
mymsg = DummyMessage()
mymsg.name = "dummy"
mymsg.description = "really dumb"
store_id = str(uuid.uuid4())
mystore = CronusObjectStore()
mystore.name = "test"
mystore.uuid = str(store_id)
mystore.parent_uuid = "" # top level store
with tempfile.TemporaryDirectory() as dirpath:
mystore.address = dirpath + "/test"
_path = Path(mystore.address)
_path.mkdir()
store = BaseObjectStore(
str(_path), "test"
) # wrapper to the CronusStore message
fileinfo = FileObjectInfo()
fileinfo.type = 5
fileinfo.aux.description = "Some dummy data"
menu_uuid = store.register_content(mymenu, menuinfo).uuid
config_uuid = store.register_content(myconfig, configinfo).uuid
dataset = store.register_dataset(menu_uuid, config_uuid)
store.new_partition(dataset.uuid, "key")
path = dirpath + "/test/dummy.arrow"
with pa.OSFile(str(path), "wb") as f:
f.write(sink.getvalue())
path = dirpath + "/test/dummy2.arrow"
with pa.OSFile(str(path), "wb") as f:
f.write(sink.getvalue())
objs_ = store.register_content(
mystore.address,
fileinfo,
glob="*arrow",
dataset_id=dataset.uuid,
partition_key="key",
)
for obj_ in objs_:
print(obj_.uuid, store[obj_.uuid].address)
buf = pa.py_buffer(store.get(obj_.uuid))
reader = pa.ipc.open_file(buf)
self.assertEqual(reader.num_record_batches, 10)
ds = store.list(suffix="dataset")
for d in ds:
p = d.uuid + ".part_key"
f = store.list(prefix=p, suffix="arrow")
print(f)
print("Test Done ===========================")
def test_register_dataset(self):
# Create a fake dataset
# from a menu_id and menu msg
# from a config_id and config msg
# add files
# add tables
mymenu = Menu_pb()
mymenu.uuid = str(uuid.uuid4())
mymenu.name = f"{mymenu.uuid}.menu.dat"
menuinfo = MenuObjectInfo()
menuinfo.created.GetCurrentTime()
bufmenu = pa.py_buffer(mymenu.SerializeToString())
myconfig = Configuration()
myconfig.uuid = str(uuid.uuid4())
myconfig.name = f"{myconfig.uuid}.config.dat"
configinfo = ConfigObjectInfo()
configinfo.created.GetCurrentTime()
bufconfig = pa.py_buffer(myconfig.SerializeToString())
store_id = str(uuid.uuid4())
mystore = CronusObjectStore()
mystore.name = "test"
mystore.uuid = str(store_id)
mystore.parent_uuid = "" # top level store
print("Testing directory globbing")
data = [
pa.array([1, 2, 3, 4]),
pa.array(["foo", "bar", "baz", None]),
pa.array([True, None, False, True]),
]
batch = pa.RecordBatch.from_arrays(data, ["f0", "f1", "f2"])
# schema = batch.schema.to_pybytes()
sink = pa.BufferOutputStream()
writer = pa.RecordBatchFileWriter(sink, batch.schema)
for i in range(10):
writer.write_batch(batch)
writer.close()
buf = sink.getvalue()
fileinfo = FileObjectInfo()
fileinfo.type = 5
fileinfo.aux.num_columns = 3
with tempfile.TemporaryDirectory() as dirpath:
_path = dirpath + "/test"
store = BaseObjectStore(str(_path), "test")
store_id = store.store_uuid
print(store.store_info.created.ToDatetime())
menu_uuid = store.register_content(mymenu, menuinfo).uuid
config_uuid = store.register_content(myconfig, configinfo).uuid
print(menu_uuid)
print(config_uuid)
dataset = store.register_dataset(menu_uuid, config_uuid)
store.new_partition(dataset.uuid, "key")
job_id = store.new_job(dataset.uuid)
store.register_content(
buf,
fileinfo,
dataset_id=dataset.uuid,
partition_key="key",
job_id=job_id,
)
ds = store.list(suffix="dataset")
print(ds)
def test_validation(self):
print("Simulate production")
data = [
pa.array([1, 2, 3, 4]),
pa.array(["foo", "bar", "baz", None]),
pa.array([True, None, False, True]),
]
batch = pa.RecordBatch.from_arrays(data, ["f0", "f1", "f2"])
sink = pa.BufferOutputStream()
writer = pa.RecordBatchFileWriter(sink, batch.schema)
for i in range(10):
writer.write_batch(batch)
writer.close()
buf = sink.getvalue()
mymenu = Menu_pb()
mymenu.uuid = str(uuid.uuid4())
mymenu.name = f"{mymenu.uuid}.menu.dat"
menuinfo = MenuObjectInfo()
menuinfo.created.GetCurrentTime()
bufmenu = pa.py_buffer(mymenu.SerializeToString())
myconfig = Configuration()
myconfig.uuid = str(uuid.uuid4())
myconfig.name = f"{myconfig.uuid}.config.dat"
configinfo = ConfigObjectInfo()
configinfo.created.GetCurrentTime()
bufconfig = pa.py_buffer(myconfig.SerializeToString())
with tempfile.TemporaryDirectory() as dirpath:
_path = dirpath + "/test"
store = BaseObjectStore(
str(_path), "test"
) # wrapper to the CronusStore message
menu_uuid = store.register_content(mymenu, menuinfo).uuid
config_uuid = store.register_content(myconfig, configinfo).uuid
dataset = store.register_dataset(menu_uuid, config_uuid)
# Multiple streams
store.new_partition(dataset.uuid, "key1")
store.new_partition(dataset.uuid, "key2")
store.new_partition(dataset.uuid, "key3")
fileinfo = FileObjectInfo()
fileinfo.type = 5
fileinfo.aux.description = "Some dummy data"
ids_ = []
parts = store.list_partitions(dataset.uuid)
# reload menu and config
newmenu = Menu_pb()
store.get(menu_uuid, newmenu)
newconfig = Configuration()
store.get(config_uuid, newconfig)
print(parts)
for _ in range(10):
job_id = store.new_job(dataset.uuid)
for key in parts:
ids_.append(
store.register_content(
buf,
fileinfo,
dataset_id=dataset.uuid,
job_id=job_id,
partition_key=key,
).uuid
)
store.put(ids_[-1], buf)
for id_ in ids_:
buf = pa.py_buffer(store.get(id_))
reader = pa.ipc.open_file(buf)
self.assertEqual(reader.num_record_batches, 10)
# Save the store, reload
store.save_store()
newstore = BaseObjectStore(str(_path), "test", store_uuid=store.store_uuid)
for id_ in ids_:
print("Get object %s", id_)
print(type(id_))
buf = pa.py_buffer(newstore.get(id_))
reader = pa.ipc.open_file(buf)
self.assertEqual(reader.num_record_batches, 10)
print(newmenu)
print(newconfig)
print("Simulation Test Done ===========================")
def test_validation(self):
print("Simulate production")
data = [
pa.array([1, 2, 3, 4]),
pa.array(["foo", "bar", "baz", None]),
pa.array([True, None, False, True]),
]
batch = pa.RecordBatch.from_arrays(data, ["f0", "f1", "f2"])
sink = pa.BufferOutputStream()
writer = pa.RecordBatchFileWriter(sink, batch.schema)
for i in range(10):
writer.write_batch(batch)
writer.close()
buf = sink.getvalue()
mymenu = Menu_pb()
mymenu.uuid = str(uuid.uuid4())
mymenu.name = f"{mymenu.uuid}.menu.dat"
menuinfo = MenuObjectInfo()
menuinfo.created.GetCurrentTime()
bufmenu = pa.py_buffer(mymenu.SerializeToString())
myconfig = Configuration()
myconfig.uuid = str(uuid.uuid4())
myconfig.name = f"{myconfig.uuid}.config.dat"
configinfo = ConfigObjectInfo()
configinfo.created.GetCurrentTime()
bufconfig = pa.py_buffer(myconfig.SerializeToString())
with tempfile.TemporaryDirectory() as dirpath:
_path = dirpath + "/test"
store = BaseObjectStore(
str(_path), "test"
) # wrapper to the CronusStore message
# Following puts the menu and config to the datastore
menu_uuid = store.register_content(mymenu, menuinfo).uuid
config_uuid = store.register_content(myconfig, configinfo).uuid
dataset = store.register_dataset(menu_uuid, config_uuid)
# Multiple streams
store.new_partition(dataset.uuid, "key1")
store.new_partition(dataset.uuid, "key2")
store.new_partition(dataset.uuid, "key3")
fileinfo = FileObjectInfo()
fileinfo.type = 5
fileinfo.aux.description = "Some dummy data"
ids_ = []
parts = store.list_partitions(dataset.uuid)
# reload menu and config
newmenu = Menu_pb()
store.get(menu_uuid, newmenu)
newconfig = Configuration()
store.get(config_uuid, newconfig)
print(parts)
for _ in range(10):
job_id = store.new_job(dataset.uuid)
for key in parts:
ids_.append(
store.register_content(
buf,
fileinfo,
dataset_id=dataset.uuid,
job_id=job_id,
partition_key=key,
).uuid
)
store.put(ids_[-1], buf)
for id_ in ids_:
buf = pa.py_buffer(store.get(id_))
reader = pa.ipc.open_file(buf)
self.assertEqual(reader.num_record_batches, 10)
# Save the store, reload
store.save_store()
newstore = BaseObjectStore(
str(_path), store._name, store_uuid=store.store_uuid
)
for id_ in ids_:
print("Get object %s", id_)
print(type(id_))
buf = pa.py_buffer(newstore.get(id_))
reader = pa.ipc.open_file(buf)
self.assertEqual(reader.num_record_batches, 10)
print(newmenu)
print(newconfig)
print("Simulation Test Done ===========================")
if __name__ == "__main__":
unittest.main()
| 35.734568
| 87
| 0.563785
| 2,396
| 23,156
| 5.316778
| 0.11227
| 0.031635
| 0.019625
| 0.032028
| 0.831462
| 0.818746
| 0.817019
| 0.802182
| 0.79394
| 0.785776
| 0
| 0.008707
| 0.320522
| 23,156
| 647
| 88
| 35.789799
| 0.800877
| 0.058473
| 0
| 0.804264
| 0
| 0
| 0.062012
| 0.02611
| 0
| 0
| 0
| 0
| 0.027132
| 1
| 0.021318
| false
| 0.003876
| 0.023256
| 0
| 0.046512
| 0.071705
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
225b8ec6e0023088ba05f8100946997b3cbd3685
| 358
|
py
|
Python
|
nighres/data/__init__.py
|
marcobarilari/nighres
|
e503bb96a6a73f73020c5d9d7b540bc5f17699a8
|
[
"Apache-2.0"
] | 41
|
2017-08-15T12:23:31.000Z
|
2022-02-28T15:12:22.000Z
|
nighres/data/__init__.py
|
marcobarilari/nighres
|
e503bb96a6a73f73020c5d9d7b540bc5f17699a8
|
[
"Apache-2.0"
] | 130
|
2017-07-27T11:09:09.000Z
|
2022-03-31T10:05:07.000Z
|
nighres/data/__init__.py
|
marcobarilari/nighres
|
e503bb96a6a73f73020c5d9d7b540bc5f17699a8
|
[
"Apache-2.0"
] | 35
|
2017-08-17T17:05:41.000Z
|
2022-03-28T12:22:14.000Z
|
from nighres.data.download_data import download_7T_TRT
from nighres.data.download_data import download_DTI_2mm
from nighres.data.download_data import download_DOTS_atlas
from nighres.data.download_data import download_MASSP_atlas
from nighres.data.download_data import download_MP2RAGEME_sample
from nighres.data.download_data import download_AHEAD_template
| 51.142857
| 64
| 0.899441
| 54
| 358
| 5.62963
| 0.296296
| 0.217105
| 0.296053
| 0.453947
| 0.842105
| 0.842105
| 0.842105
| 0.302632
| 0
| 0
| 0
| 0.008982
| 0.067039
| 358
| 6
| 65
| 59.666667
| 0.901198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
2274ca6476f1e50112da58a409d41b44bf844021
| 2,491
|
py
|
Python
|
tests/integration/users/test_authentication.py
|
e-dang/Autogarden
|
b15217e5d4755fc028b8dc4255cbdcb77ead80f4
|
[
"MIT"
] | null | null | null |
tests/integration/users/test_authentication.py
|
e-dang/Autogarden
|
b15217e5d4755fc028b8dc4255cbdcb77ead80f4
|
[
"MIT"
] | null | null | null |
tests/integration/users/test_authentication.py
|
e-dang/Autogarden
|
b15217e5d4755fc028b8dc4255cbdcb77ead80f4
|
[
"MIT"
] | null | null | null |
import pytest
from django.http import HttpRequest
from users.authentication import EmailBackend
@pytest.mark.integration
class TestEmailBackend:
@pytest.mark.django_db
def test_authenticate_returns_none_when_email_does_not_belong_to_any_user(self, create_user, test_password):
request = HttpRequest()
backend = EmailBackend()
user = create_user()
email = user.email + 'some_random_chars'
ret_val = backend.authenticate(request, email=email, password=test_password)
assert ret_val is None
@pytest.mark.django_db
def test_authenticate_returns_none_when_password_is_incorrect(self, create_user, test_password):
request = HttpRequest()
backend = EmailBackend()
user = create_user()
password = test_password + 'some_random_chars'
ret_val = backend.authenticate(request, email=user.email, password=password)
assert ret_val is None
@pytest.mark.django_db
def test_authenticate_returns_none_when_user_is_inactive(self, create_user, test_password):
request = HttpRequest()
backend = EmailBackend()
user = create_user(is_active=False)
ret_val = backend.authenticate(request, email=user.email, password=test_password)
assert ret_val is None
@pytest.mark.django_db
def test_authenticate_returns_user_if_both_email_and_password_are_correct_and_user_is_active(self, create_user, test_password):
request = HttpRequest()
backend = EmailBackend()
user = create_user(is_active=True)
ret_val = backend.authenticate(request, email=user.email, password=test_password)
assert ret_val == user
@pytest.mark.django_db
def test_get_user_returns_none_if_provided_id_does_not_match_any_users(self, create_user):
user = create_user()
backend = EmailBackend()
ret_val = backend.get_user(user.pk + 1)
assert ret_val is None
@pytest.mark.django_db
def test_get_user_returns_none_if_specified_user_is_inactive(self, create_user):
user = create_user(is_active=False)
backend = EmailBackend()
ret_val = backend.get_user(user.pk)
assert ret_val is None
@pytest.mark.django_db
def test_get_user_returns_specified_user_when_user_is_active(self, create_user):
user = create_user(is_active=True)
backend = EmailBackend()
ret_val = backend.get_user(user.pk)
assert ret_val == user
| 32.350649
| 131
| 0.718988
| 321
| 2,491
| 5.196262
| 0.174455
| 0.083933
| 0.067146
| 0.07554
| 0.814149
| 0.814149
| 0.755396
| 0.755396
| 0.71223
| 0.61271
| 0
| 0.000507
| 0.20835
| 2,491
| 76
| 132
| 32.776316
| 0.845335
| 0
| 0
| 0.679245
| 0
| 0
| 0.013649
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 1
| 0.132075
| false
| 0.169811
| 0.056604
| 0
| 0.207547
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
97e26b8a239b14375b224fa6ad990e5f15461370
| 21,030
|
py
|
Python
|
src/tf_transformers/models/t5/convert.py
|
legacyai/tf-transformers
|
65a5f9a4bcb3236483daa598a37b91673f56cb97
|
[
"Apache-2.0"
] | 116
|
2021-03-15T09:48:41.000Z
|
2022-03-24T05:15:51.000Z
|
src/tf_transformers/models/t5/convert.py
|
legacyai/tf-transformers
|
65a5f9a4bcb3236483daa598a37b91673f56cb97
|
[
"Apache-2.0"
] | 4
|
2021-03-20T11:20:57.000Z
|
2022-01-05T04:59:07.000Z
|
src/tf_transformers/models/t5/convert.py
|
legacyai/tf-transformers
|
65a5f9a4bcb3236483daa598a37b91673f56cb97
|
[
"Apache-2.0"
] | 9
|
2021-03-17T04:14:48.000Z
|
2021-09-13T07:15:31.000Z
|
import numpy as np
import tensorflow as tf
from absl import logging
from tf_transformers.core import keras_utils
def convert_t5_pt(model, config, model_name):
"""PT converter
Args:
model_hf: HuggingFace Model (TF)
model: tf_transformers model/layer
config: dict
Returns:
a function
"""
# When dropout, use_auto_regressive is enabled assertion won't work
SKIP_ASSERT = False
try:
# LegacyLayer
local_config = model._config_dict['decoder']
except Exception as e:
# LegacyModel
local_config = model.model_config['decoder']
if local_config['use_dropout']:
logging.warn("Note: As `use_dropout` is True we will skip Assertions, please verify the model.")
SKIP_ASSERT = True
if local_config['use_auto_regressive']:
raise ValueError(
"Please save model checkpoint without `use_auto_regressive` and then reload it with `use_auto_regressive`."
)
SKIP_ASSERT = True
import torch
import transformers
transformers.logging.set_verbosity_error()
from_model_vars = [
"encoder.block.{}.layer.0.SelfAttention.q.weight",
"encoder.block.{}.layer.0.SelfAttention.k.weight",
"encoder.block.{}.layer.0.SelfAttention.v.weight",
"encoder.block.{}.layer.0.SelfAttention.o.weight",
"encoder.block.{}.layer.0.layer_norm.weight",
"encoder.block.{}.layer.1.DenseReluDense.wi.weight",
"encoder.block.{}.layer.1.DenseReluDense.wo.weight",
"encoder.block.{}.layer.1.layer_norm.weight",
]
to_model_vars = [
"tf_transformers/t5_encoder/transformer/layer_{}/self_attention/query/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/self_attention/key/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/self_attention/value/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/self_attention_output/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/pre_attention_norm/weight:0",
"tf_transformers/t5_encoder/transformer/layer_{}/intermediate/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/output/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/self_attention_layer_norm/weight:0",
]
# Simple Assertion
assert len(from_model_vars) == len(to_model_vars)
mapping_dict = {}
for index in range(len(from_model_vars)):
for i in range(config["num_hidden_layers"]):
mapping_dict[from_model_vars[index].format(i)] = to_model_vars[index].format(i)
# Only Layer 0
mapping_dict[
"encoder.block.0.layer.0.SelfAttention.relative_attention_bias.weight"
] = "tf_transformers/t5_encoder/transformer/layer_0/self_attention/relative_attention_bias/embeddings:0"
# Word Embedding
mapping_dict["shared.weight"] = "tf_transformers/t5_encoder/word_embeddings/embeddings:0"
# Final Layer Norm weight
mapping_dict["encoder.final_layer_norm.weight"] = "tf_transformers/t5_encoder/last_layer_norm/weight:0"
# T5Model
from transformers import T5Model as PTT5Model
model_hf = PTT5Model.from_pretrained(model_name)
# HF model variable name to variable values, for fast retrieval
from_to_variable_dict = {name: var.detach().numpy() for name, var in model_hf.named_parameters()}
tf_transformers_model_index_dict = {}
for index, var in enumerate(model.variables):
tf_transformers_model_index_dict[var.name] = index
# legacy_ai <-- hub
assigned_map = []
# assigned_map_values = []
for original_var, legacy_var in mapping_dict.items():
index = tf_transformers_model_index_dict[legacy_var]
# If not in mapping_dict, then mostly it is from attention layer
if "query/kernel:0" in legacy_var or "key/kernel:0" in legacy_var or "value/kernel:0" in legacy_var:
# hub (2D) to tf_transformers (3D)
model.variables[index].assign(
np.reshape(
np.transpose(from_to_variable_dict.get(original_var)),
(
config["embedding_size"],
config["num_attention_heads"],
config["attention_head_size"],
),
)
)
assigned_map.append((original_var, legacy_var))
continue
elif "kernel:0" in legacy_var:
if list(model.variables[index].shape) == list(from_to_variable_dict.get(original_var).shape):
model.variables[index].assign(np.transpose(from_to_variable_dict.get(original_var)))
assigned_map.append((original_var, legacy_var))
continue
else:
model.variables[index].assign(np.transpose(from_to_variable_dict.get(original_var)))
assigned_map.append((original_var, legacy_var))
continue
model.variables[index].assign(from_to_variable_dict.get(original_var))
assigned_map.append((original_var, legacy_var))
# Decoder Side
# From vars (Transformer variables)
from_model_vars = [
"decoder.block.{}.layer.0.SelfAttention.q.weight",
"decoder.block.{}.layer.0.SelfAttention.k.weight",
"decoder.block.{}.layer.0.SelfAttention.v.weight",
"decoder.block.{}.layer.0.SelfAttention.o.weight",
"decoder.block.{}.layer.0.layer_norm.weight",
"decoder.block.{}.layer.1.EncDecAttention.q.weight",
"decoder.block.{}.layer.1.EncDecAttention.k.weight",
"decoder.block.{}.layer.1.EncDecAttention.v.weight",
"decoder.block.{}.layer.1.EncDecAttention.o.weight",
"decoder.block.{}.layer.1.layer_norm.weight",
"decoder.block.{}.layer.2.DenseReluDense.wi.weight",
"decoder.block.{}.layer.2.DenseReluDense.wo.weight",
"decoder.block.{}.layer.2.layer_norm.weight",
]
to_model_vars = [
"tf_transformers/t5_decoder/transformer/layer_{}/self_attention/query/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/self_attention/key/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/self_attention/value/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/self_attention_output/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/pre_attention_norm/weight:0",
"tf_transformers/t5_decoder/transformer/layer_{}/cross_attention/query/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/cross_attention/key/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/cross_attention/value/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/cross_attention_output/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/pre_cross_attention_norm/weight:0",
"tf_transformers/t5_decoder/transformer/layer_{}/intermediate/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/output/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/self_attention_layer_norm/weight:0",
]
# Simple Assertion
assert len(from_model_vars) == len(to_model_vars)
mapping_dict = {}
for index in range(len(from_model_vars)):
for i in range(config["num_hidden_layers"]):
mapping_dict[from_model_vars[index].format(i)] = to_model_vars[index].format(i)
# Only Layer 0
mapping_dict[
"decoder.block.0.layer.0.SelfAttention.relative_attention_bias.weight"
] = "tf_transformers/t5_decoder/transformer/layer_0/self_attention/relative_attention_bias/embeddings:0"
# Final Layer Norm weight
mapping_dict["decoder.final_layer_norm.weight"] = "tf_transformers/t5_decoder/last_layer_norm/weight:0"
# HF model variable name to variable values, for fast retrieval
from_to_variable_dict = {name: var.detach().numpy() for name, var in model_hf.named_parameters()}
tf_transformers_model_index_dict = {}
for index, var in enumerate(model.variables):
tf_transformers_model_index_dict[var.name] = index
if (
var.name
== "tf_transformers/t5_decoder/transformer/layer_0/cross_attention/relative_attention_bias/embeddings:0"
):
model.variables[index].assign(tf.zeros_like(model.variables[index]))
continue
# legacy_ai <-- hub
assigned_map = []
# assigned_map_values = []
for original_var, legacy_var in mapping_dict.items():
index = tf_transformers_model_index_dict[legacy_var]
# If not in mapping_dict, then mostly it is from attention layer
if "query/kernel:0" in legacy_var or "key/kernel:0" in legacy_var or "value/kernel:0" in legacy_var:
# hub (2D) to tf_transformers (3D)
model.variables[index].assign(
np.reshape(
np.transpose(from_to_variable_dict.get(original_var)),
(
config["embedding_size"],
config["num_attention_heads"],
config["attention_head_size"],
),
)
)
assigned_map.append((original_var, legacy_var))
continue
elif "kernel:0" in legacy_var:
if list(model.variables[index].shape) == list(from_to_variable_dict.get(original_var).shape):
model.variables[index].assign(np.transpose(from_to_variable_dict.get(original_var)))
assigned_map.append((original_var, legacy_var))
continue
else:
model.variables[index].assign(np.transpose(from_to_variable_dict.get(original_var)))
assigned_map.append((original_var, legacy_var))
continue
model.variables[index].assign(from_to_variable_dict.get(original_var))
assigned_map.append((original_var, legacy_var))
if SKIP_ASSERT is False:
from transformers import T5Tokenizer
tokenizer = T5Tokenizer.from_pretrained(model_name)
text = "This is a long sentence to check how close models are."
inputs = tokenizer(text, return_tensors="pt")
outputs_hf = model_hf(inputs["input_ids"], decoder_input_ids=inputs["input_ids"])
outputs_hf = torch.sum(outputs_hf["last_hidden_state"], dim=-1).detach().numpy()
inputs = tokenizer(text, return_tensors="tf")
inputs_tf = {}
inputs_tf["encoder_input_ids"] = inputs["input_ids"]
inputs_tf["encoder_input_mask"] = inputs["attention_mask"]
inputs_tf["decoder_input_ids"] = inputs["input_ids"]
outputs_tf = model(inputs_tf)
outputs_tf = tf.reduce_sum(outputs_tf["token_embeddings"], axis=-1).numpy()
tf.debugging.assert_near(outputs_hf, outputs_tf, rtol=1.0)
def convert_t5_tf(model, config, model_name):
"""TF converter
Args:
model_hf: HuggingFace Model (TF)
model: tf_transformers model/layer
config: dict
Returns:
a function
"""
# When dropout, use_auto_regressive is enabled assertion won't work
SKIP_ASSERT = False
try:
# LegacyLayer
local_config = model._config_dict['decoder']
except Exception as e:
# LegacyModel
local_config = model.model_config['decoder']
if local_config['use_dropout']:
logging.warn("Note: As `use_dropout` is True we will skip Assertions, please verify the model.")
SKIP_ASSERT = True
if local_config['use_auto_regressive']:
raise ValueError(
"Please save model checkpoint without `use_auto_regressive` and then reload it with `use_auto_regressive`."
)
SKIP_ASSERT = True
import transformers
transformers.logging.set_verbosity_error()
from_model_vars = [
"tf_t5model/encoder/block_._{}/layer_._0/SelfAttention/q/kernel:0",
"tf_t5model/encoder/block_._{}/layer_._0/SelfAttention/k/kernel:0",
"tf_t5model/encoder/block_._{}/layer_._0/SelfAttention/v/kernel:0",
"tf_t5model/encoder/block_._{}/layer_._0/SelfAttention/o/kernel:0",
"tf_t5model/encoder/block_._{}/layer_._0/layer_norm/weight:0",
"tf_t5model/encoder/block_._{}/layer_._1/DenseReluDense/wi/kernel:0",
"tf_t5model/encoder/block_._{}/layer_._1/DenseReluDense/wo/kernel:0",
"tf_t5model/encoder/block_._{}/layer_._1/layer_norm/weight:0",
]
to_model_vars = [
"tf_transformers/t5_encoder/transformer/layer_{}/self_attention/query/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/self_attention/key/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/self_attention/value/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/self_attention_output/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/pre_attention_norm/weight:0",
"tf_transformers/t5_encoder/transformer/layer_{}/intermediate/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/output/kernel:0",
"tf_transformers/t5_encoder/transformer/layer_{}/self_attention_layer_norm/weight:0",
]
# Simple Assertion
assert len(from_model_vars) == len(to_model_vars)
mapping_dict = {}
for index in range(len(from_model_vars)):
for i in range(config["num_hidden_layers"]):
mapping_dict[from_model_vars[index].format(i)] = to_model_vars[index].format(i)
# Only Layer 0
mapping_dict[
"tf_t5model/encoder/block_._0/layer_._0/SelfAttention/relative_attention_bias/embeddings:0"
] = "tf_transformers/t5_encoder/transformer/layer_0/self_attention/relative_attention_bias/embeddings:0"
# Word Embedding
mapping_dict["shared/shared/weight:0"] = "tf_transformers/t5_encoder/word_embeddings/embeddings:0"
# Final Layer Norm weight
mapping_dict["tf_t5model/encoder/final_layer_norm/weight:0"] = "tf_transformers/t5_encoder/last_layer_norm/weight:0"
# T5Model
from transformers import TFT5Model
model_hf = TFT5Model.from_pretrained(model_name)
from_to_variable_dict = {var.name: var for var in model_hf.variables}
tf_transformers_model_index_dict = {}
for index, var in enumerate(model.variables):
tf_transformers_model_index_dict[var.name] = index
# legacy_ai <-- hub
assigned_map = []
# assigned_map_values = []
for original_var, legacy_var in mapping_dict.items():
index = tf_transformers_model_index_dict[legacy_var]
# If not in mapping_dict, then mostly it is from attention layer
if "query/kernel:0" in legacy_var or "key/kernel:0" in legacy_var or "value/kernel:0" in legacy_var:
# hub (2D) to tf_transformers (3D)
model.variables[index].assign(
tf.reshape(
from_to_variable_dict.get(original_var),
(
config["embedding_size"],
config["num_attention_heads"],
config["attention_head_size"],
),
)
)
assigned_map.append((original_var, legacy_var))
continue
model.variables[index].assign(from_to_variable_dict.get(original_var))
assigned_map.append((original_var, legacy_var))
# Decoder Side
# From vars (Transformer variables)
from_model_vars = [
"tf_t5model/decoder/block_._{}/layer_._0/SelfAttention/q/kernel:0",
"tf_t5model/decoder/block_._{}/layer_._0/SelfAttention/k/kernel:0",
"tf_t5model/decoder/block_._{}/layer_._0/SelfAttention/v/kernel:0",
"tf_t5model/decoder/block_._{}/layer_._0/SelfAttention/o/kernel:0",
"tf_t5model/decoder/block_._{}/layer_._0/layer_norm/weight:0",
"tf_t5model/decoder/block_._{}/layer_._1/EncDecAttention/q/kernel:0",
"tf_t5model/decoder/block_._{}/layer_._1/EncDecAttention/k/kernel:0",
"tf_t5model/decoder/block_._{}/layer_._1/EncDecAttention/v/kernel:0",
"tf_t5model/decoder/block_._{}/layer_._1/EncDecAttention/o/kernel:0",
"tf_t5model/decoder/block_._{}/layer_._1/layer_norm/weight:0",
"tf_t5model/decoder/block_._{}/layer_._2/DenseReluDense/wi/kernel:0",
"tf_t5model/decoder/block_._{}/layer_._2/DenseReluDense/wo/kernel:0",
"tf_t5model/decoder/block_._{}/layer_._2/layer_norm/weight:0",
]
to_model_vars = [
"tf_transformers/t5_decoder/transformer/layer_{}/self_attention/query/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/self_attention/key/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/self_attention/value/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/self_attention_output/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/pre_attention_norm/weight:0",
"tf_transformers/t5_decoder/transformer/layer_{}/cross_attention/query/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/cross_attention/key/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/cross_attention/value/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/cross_attention_output/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/pre_cross_attention_norm/weight:0",
"tf_transformers/t5_decoder/transformer/layer_{}/intermediate/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/output/kernel:0",
"tf_transformers/t5_decoder/transformer/layer_{}/self_attention_layer_norm/weight:0",
]
# Simple Assertion
assert len(from_model_vars) == len(to_model_vars)
mapping_dict = {}
for index in range(len(from_model_vars)):
for i in range(config["num_hidden_layers"]):
mapping_dict[from_model_vars[index].format(i)] = to_model_vars[index].format(i)
# Only Layer 0
mapping_dict[
"tf_t5model/decoder/block_._0/layer_._0/SelfAttention/relative_attention_bias/embeddings:0"
] = "tf_transformers/t5_decoder/transformer/layer_0/self_attention/relative_attention_bias/embeddings:0"
mapping_dict[
"tf_t5model/decoder/block_._0/layer_._1/EncDecAttention/relative_attention_bias/embeddings:0"
] = "tf_transformers/t5_decoder/transformer/layer_0/cross_attention/relative_attention_bias/embeddings:0"
# Final Layer Norm weight
mapping_dict["tf_t5model/decoder/final_layer_norm/weight:0"] = "tf_transformers/t5_decoder/last_layer_norm/weight:0"
from_to_variable_dict = {var.name: var for var in model_hf.variables}
tf_transformers_model_index_dict = {}
for index, var in enumerate(model.variables):
tf_transformers_model_index_dict[var.name] = index
# legacy_ai <-- hub
assigned_map = []
# assigned_map_values = []
for original_var, legacy_var in mapping_dict.items():
index = tf_transformers_model_index_dict[legacy_var]
# If not in mapping_dict, then mostly it is from attention layer
if "query/kernel:0" in legacy_var or "key/kernel:0" in legacy_var or "value/kernel:0" in legacy_var:
# hub (2D) to tf_transformers (3D)
model.variables[index].assign(
tf.reshape(
from_to_variable_dict.get(original_var),
(
config["embedding_size"],
config["num_attention_heads"],
config["attention_head_size"],
),
)
)
assigned_map.append((original_var, legacy_var))
continue
if (
original_var
== "tf_t5model/decoder/block_._0/layer_._1/EncDecAttention/relative_attention_bias/embeddings:0"
):
if original_var not in from_to_variable_dict:
model.variables[index].assign(tf.zeros_like(model.variables[index]))
assigned_map.append((original_var, legacy_var))
continue
model.variables[index].assign(from_to_variable_dict.get(original_var))
assigned_map.append((original_var, legacy_var))
if SKIP_ASSERT is False:
from transformers import T5Tokenizer
tokenizer = T5Tokenizer.from_pretrained(model_name)
text = "This is a long sentence to check how close models are."
inputs = tokenizer(text, return_tensors="tf")
outputs_hf = model_hf(inputs["input_ids"], decoder_input_ids=inputs["input_ids"])
outputs_hf = tf.reduce_sum(outputs_hf["last_hidden_state"], axis=-1).numpy()
inputs_tf = {}
inputs_tf["encoder_input_ids"] = inputs["input_ids"]
inputs_tf["encoder_input_mask"] = inputs["attention_mask"]
inputs_tf["decoder_input_ids"] = inputs["input_ids"]
outputs_tf = model(inputs_tf)
outputs_tf = tf.reduce_sum(outputs_tf["token_embeddings"], axis=-1).numpy()
if keras_utils.get_policy_name() == 'float32':
tf.debugging.assert_near(outputs_hf, outputs_tf, rtol=1.0)
| 46.423841
| 120
| 0.677699
| 2,621
| 21,030
| 5.109119
| 0.074781
| 0.07632
| 0.064521
| 0.055858
| 0.968636
| 0.964752
| 0.911657
| 0.896199
| 0.872228
| 0.812262
| 0
| 0.016334
| 0.208179
| 21,030
| 452
| 121
| 46.526549
| 0.787833
| 0.070756
| 0
| 0.710843
| 0
| 0
| 0.430726
| 0.367422
| 0
| 0
| 0
| 0
| 0.048193
| 1
| 0.006024
| false
| 0
| 0.033133
| 0
| 0.039157
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
97f49d5c7e6cd996f56f9673f760ded621c3606f
| 58,780
|
py
|
Python
|
models.py
|
LucasCTN/Pupil-locator
|
981c900c5d06aa783406b6c24ee0e8ee140195f1
|
[
"MIT"
] | 32
|
2019-03-15T12:13:31.000Z
|
2022-03-24T03:30:32.000Z
|
models.py
|
LucasCTN/Pupil-locator
|
981c900c5d06aa783406b6c24ee0e8ee140195f1
|
[
"MIT"
] | 3
|
2020-05-14T04:48:06.000Z
|
2022-02-10T01:18:38.000Z
|
models.py
|
LucasCTN/Pupil-locator
|
981c900c5d06aa783406b6c24ee0e8ee140195f1
|
[
"MIT"
] | 14
|
2019-04-09T02:36:45.000Z
|
2022-03-24T03:27:11.000Z
|
import tensorflow
import tensorflow.compat.v1 as tf
import tensorflow_hub as hub
from base_model import BaseModel
tf.disable_v2_behavior()
class Simple(object):
"""
Convolution model:
"""
def __init__(self, model_name, cfg, logger):
super(Simple, self).__init__(model_name, cfg, logger)
self.logger.log("building the model...")
self.init_placeholders()
self.init_forward()
self.init_optimizer()
self.summary_op = tf.summary.merge_all()
def init_forward(self):
cnn_input = self.X
xavi = tensorflow.initializers.GlorotUniform(),
assert len(self.cfg["filter_sizes"]) == len(self.cfg["n_filters"])
for i in range(len(self.cfg["filter_sizes"])):
cnn_input = tf.nn.dropout(cnn_input, self.keep_prob)
cnn_input = tf.layers.conv2d(cnn_input,
filters=self.cfg["n_filters"][i],
kernel_size=self.cfg["filter_sizes"][i],
padding='same',
activation=tf.nn.leaky_relu,
kernel_initializer=xavi)
cnn_input = tf.layers.batch_normalization(cnn_input,
training=self.train_flag,
momentum=0.99,
epsilon=0.001,
center=True,
scale=True)
if self.cfg["max_pool"][i] == 1:
cnn_input = tf.layers.max_pooling2d(cnn_input, pool_size=2, strides=2)
# print what happen to layers! :)
self.logger.log("layer {} conv2d: {}".format(i, cnn_input.get_shape()))
# Define fully connected layer
# First we need to reshape cnn output to [batch_size, -1]
a = tf.contrib.layers.flatten(cnn_input)
h_prev = a.get_shape().as_list()[1]
for i, h in enumerate(self.cfg["fc_layers"]):
# by using fully_connected, tf will take care of X*W+b
with tf.name_scope("fc_layer" + str(i)):
with tf.name_scope("weight_" + str(i)):
initial_value = tf.truncated_normal([h_prev, h], stddev=0.001)
w = tf.Variable(initial_value, name="fc_w_" + str(i))
self.variable_summaries(w)
with tf.name_scope("bias_" + str(i)):
b = tf.Variable(tf.zeros([h]), name='fc_b_' + str(i))
self.variable_summaries(b)
with tf.name_scope("Wx_plus_b_" + str(i)):
z = tf.matmul(a, w) + b
with tf.name_scope("L_ReLu_" + str(i)):
a = tf.nn.leaky_relu(z)
h_prev = h
# fc_input = tf.contrib.layers.fully_connected(fc_input, h, activation_fn=tf.nn.leaky_relu)
# use batch normalization. With batch normalization we can get 1% better results
# fc_input = tf.layers.batch_normalization(fc_input, training=(self.mode == "train"))
# use dropout
# fc_input = tf.nn.dropout(fc_input, keep_prob=self.keep_prob)
# show fully connected layers shape
self.logger.log("layer {} fully connected: {}".format(i, a.get_shape()))
self.logits = tf.contrib.layers.fully_connected(a, self.cfg["output_dim"], activation_fn=None)
# self.logits = tf.reshape(cnn_input, shape=(-1, self.cfg["output_dim"]))
self.loss = tf.losses.mean_squared_error(self.Y,
self.logits,
weights=[self.cfg["output_weights"][0:self.cfg["output_dim"]]])
# Training summary for the current batch_loss
tf.summary.scalar('loss', self.loss)
# YOLO implementation
# https://github.com/WojciechMormul/yolo2/blob/master/train.py
class YOLO(BaseModel):
"""
Convolution model: Yolo
"""
def __init__(self, model_name, cfg, logger):
super(YOLO, self).__init__(model_name, cfg, logger)
self.logger.log("building the model...")
self.init_placeholders()
self.init_forward()
self.init_optimizer()
self.summary_op = tf.summary.merge_all()
def maxpool_layer(self, x, size, stride, name):
with tf.name_scope(name):
x = tf.layers.max_pooling2d(x, size, stride, padding='SAME')
return x
def conv_layer(self, x, kernel, depth, train_logical, name):
with tf.variable_scope(name):
# x = tf.nn.dropout(x, keep_prob=self.keep_prob)
x = tf.layers.conv2d(x, depth, kernel, padding='SAME',
use_bias=False,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
kernel_regularizer=tf.keras.regularizers.l2(self.cfg["l2_beta"]))
x = tf.layers.batch_normalization(x, training=train_logical)
x = tf.nn.leaky_relu(x, alpha=0.1, name="ReLu")
return x
def passthrough_layer(self, a, b, kernel, depth, size, train_logical, name):
b = self.conv_layer(b, kernel, depth, train_logical, name)
b = tf.space_to_depth(b, size)
y = tf.concat([a, b], axis=3)
return y
def init_forward(self):
x = self.X
self.logger.log("input shape: {}".format(x.get_shape()))
i = 1
# block 1
x = self.conv_layer(x, (3, 3), i * 16, self.train_flag, 'conv1')
self.logger.log("conv {}: {}".format(1, x.get_shape()))
x = self.maxpool_layer(x, (2, 2), (2, 2), 'maxpool1')
self.logger.log("maxpool {}: {}".format(1, x.get_shape()))
x = self.conv_layer(x, (3, 3), i * 32, self.train_flag, 'conv2')
self.logger.log("conv {}: {}".format(1, x.get_shape()))
x = self.maxpool_layer(x, (2, 2), (2, 2), 'maxpool2')
self.logger.log("maxpool {}: {}".format(2, x.get_shape()))
# block 2
x = self.conv_layer(x, (3, 3), i * 64, self.train_flag, 'conv3')
self.logger.log("conv {}: {}".format(3, x.get_shape()))
x = self.conv_layer(x, (1, 1), i * 32, self.train_flag, 'conv4')
self.logger.log("conv {}: {}".format(4, x.get_shape()))
x = self.conv_layer(x, (3, 3), i * 64, self.train_flag, 'conv5')
self.logger.log("conv {}: {}".format(5, x.get_shape()))
x = self.maxpool_layer(x, (2, 2), (2, 2), 'maxpool5')
self.logger.log("maxpool {}: {}".format(5, x.get_shape()))
# block 3
x = self.conv_layer(x, (3, 3), i * 128, self.train_flag, 'conv6')
self.logger.log("conv {}: {}".format(6, x.get_shape()))
x = self.conv_layer(x, (1, 1), i * 64, self.train_flag, 'conv7')
self.logger.log("conv {}: {}".format(7, x.get_shape()))
x = self.conv_layer(x, (3, 3), i * 128, self.train_flag, 'conv8')
self.logger.log("conv {}: {}".format(8, x.get_shape()))
x = self.maxpool_layer(x, (2, 2), (2, 2), 'maxpool8')
self.logger.log("maxpool {}: {}".format(8, x.get_shape()))
# block 4
x = self.conv_layer(x, (3, 3), i * 256, self.train_flag, 'conv9')
self.logger.log("conv {}: {}".format(9, x.get_shape()))
x = self.conv_layer(x, (1, 1), i * 128, self.train_flag, 'conv10')
self.logger.log("conv {}: {}".format(10, x.get_shape()))
x = self.conv_layer(x, (3, 3), i * 256, self.train_flag, 'conv11')
self.logger.log("conv {}: {}".format(11, x.get_shape()))
x = self.conv_layer(x, (1, 1), i * 128, self.train_flag, 'conv12')
self.logger.log("conv {}: {}".format(12, x.get_shape()))
passthrough = self.conv_layer(x, (3, 3), i * 256, self.train_flag, 'conv13')
self.logger.log("conv {}: {}".format(13, x.get_shape()))
x = self.maxpool_layer(passthrough, (2, 2), (2, 2), 'maxpool13')
self.logger.log("maxpool {}: {}".format(13, x.get_shape()))
# block 5
x = self.conv_layer(x, (3, 3), i * 512, self.train_flag, 'conv14')
self.logger.log("conv {}: {}".format(14, x.get_shape()))
x = self.conv_layer(x, (1, 1), i * 256, self.train_flag, 'conv15')
self.logger.log("conv {}: {}".format(15, x.get_shape()))
x = self.conv_layer(x, (3, 3), i * 512, self.train_flag, 'conv16')
self.logger.log("conv {}: {}".format(16, x.get_shape()))
x = self.conv_layer(x, (1, 1), i * 256, self.train_flag, 'conv17')
self.logger.log("conv {}: {}".format(17, x.get_shape()))
x = self.conv_layer(x, (3, 3), i * 512, self.train_flag, 'conv18')
self.logger.log("conv {}: {}".format(18, x.get_shape()))
# block 6
x = self.conv_layer(x, (3, 3), i * 512, self.train_flag, 'conv19')
self.logger.log("conv {}: {}".format(19, x.get_shape()))
x = self.conv_layer(x, (3, 3), i * 512, self.train_flag, 'conv20')
self.logger.log("conv {}: {}".format(20, x.get_shape()))
x = self.passthrough_layer(x, passthrough, (3, 3), i * 32, 2, self.train_flag, 'conv21')
self.logger.log("conv {}: {}".format(21, x.get_shape()))
x = self.conv_layer(x, (3, 3), i * 512, self.train_flag, 'conv22')
self.logger.log("conv {}: {}".format(22, x.get_shape()))
x = self.maxpool_layer(x, (2, 2), (2, 2), 'maxpool22')
self.logger.log("maxpool {}: {}".format(22, x.get_shape()))
x = self.conv_layer(x, (3, 3), i * 512, self.train_flag, 'conv23')
self.logger.log("conv {}: {}".format(23, x.get_shape()))
x = self.maxpool_layer(x, (2, 2), (2, 2), 'maxpool23')
self.logger.log("maxpool {}: {}".format(23, x.get_shape()))
x = self.conv_layer(x, (3, 3), i * 512, self.train_flag, 'conv24')
self.logger.log("conv {}: {}".format(24, x.get_shape()))
x = self.maxpool_layer(x, (2, 2), (2, 2), 'maxpool24')
self.logger.log("maxpool {}: {}".format(24, x.get_shape()))
x = self.conv_layer(x, (3, 3), i * 512, self.train_flag, 'conv26')
self.logger.log("conv {}: {}".format(26, x.get_shape()))
# Final layer
# x = self.conv_layer(x, (1, 1), self.cfg["output_dim"], self.train_flag, 'conv27')
x = tf.layers.conv2d(x, self.cfg["output_dim"], (1, 1),
padding='SAME',
use_bias=False,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv27")
x = tf.nn.leaky_relu(x, alpha=0.1, name="ReLu")
self.logger.log("conv {}: {}".format("Logits", x.get_shape()))
# Logits
self.logits = tf.reshape(x, shape=(-1, self.cfg["output_dim"]), name='y')
self.loss = tf.losses.mean_squared_error(self.Y,
self.logits,
weights=[self.cfg["output_weights"][0:self.cfg["output_dim"]]])
class NASNET(BaseModel):
"""
Convolution model:
"""
def __init__(self, model_name, cfg, logger):
super(NASNET, self).__init__(model_name, cfg, logger)
self.logger.log("building the model...")
self.init_placeholders()
self.init_forward()
self.init_optimizer()
self.summary_op = tf.summary.merge_all()
def maxpool_layer(self, x, size, stride, name):
with tf.name_scope(name):
x = tf.layers.max_pooling2d(x, size, stride, padding='SAME')
return x
def conv_layer(self, x, kernel, depth, train_logical, name):
with tf.variable_scope(name):
# x = tf.nn.dropout(x, keep_prob=self.keep_prob)
x = tf.layers.conv2d(x, depth, kernel, padding='SAME',
use_bias=False,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
kernel_regularizer=tf.keras.regularizers.l2(self.cfg["l2_beta"]))
x = tf.layers.batch_normalization(x, training=train_logical)
x = tf.nn.leaky_relu(x, alpha=0.1, name="ReLu")
return x
def init_forward(self):
module = hub.Module("https://tfhub.dev/google/imagenet/mobilenet_v2_100_192/feature_vector/1",
trainable=True,
name="NASNET")
module.ModuleSpec.get_tags()
a = module(self.X)
h_prev = a.get_shape().as_list()[1]
layers = [512, 128]
for i, h in enumerate(layers):
# by using fully_connected, tf will take care of X*W+b
with tf.name_scope("fc_layer" + str(i)):
with tf.name_scope("weight_" + str(i)):
initial_value = tf.truncated_normal([h_prev, h], stddev=0.001)
w = tf.Variable(initial_value, name="fc_w_" + str(i))
tf.add_to_collection(tf.GraphKeys.REGULARIZATION_LOSSES, w)
self.variable_summaries(w)
with tf.name_scope("bias_" + str(i)):
b = tf.Variable(tf.zeros([h]), name='fc_b_' + str(i))
self.variable_summaries(b)
with tf.name_scope("Wx_plus_b_" + str(i)):
z = tf.matmul(a, w) + b
with tf.name_scope("Batch_norm_" + str(i)):
z_bn = tf.layers.batch_normalization(z, training=self.train_flag)
with tf.name_scope("L_ReLu_" + str(i)):
a = tf.nn.leaky_relu(z_bn)
h_prev = h
self.logger.log("layer {} fully connected: {}".format(i, a.get_shape()))
self.logits = tf.contrib.layers.fully_connected(a, self.cfg["output_dim"], activation_fn=None)
self.loss = tf.losses.mean_squared_error(self.Y, self.logits)
# Training summary for the current batch_loss
tf.summary.scalar('loss', self.loss)
class Inception(BaseModel):
"""
Google inception model
"""
def __init__(self, model_name, cfg, logger):
super(Inception, self).__init__(model_name, cfg, logger)
self.m = 0.5
self.l2_reg = tf.keras.regularizers.l2(cfg["l2_beta"])
self.logger.log("building the model...")
self.init_placeholders()
self.init_forward()
self.init_optimizer()
self.summary_op = tf.summary.merge_all()
def bn_lrelu(self, x, train_logical):
x = tf.layers.batch_normalization(x, training=train_logical, momentum=0.9997, scale=True, center=True)
x = tf.nn.leaky_relu(x, alpha=0.17)
return x
# Inception Block A
def block_a(self, net, name_scope, is_training):
with tf.variable_scope(name_or_scope=name_scope,
default_name="Inception_block_A"):
# Branch 0, 1x1
with tf.variable_scope("branch_0"):
branch_0 = tf.layers.conv2d(inputs=net,
filters=96 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding='SAME',
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_0a_1x1")
branch_0 = self.bn_lrelu(branch_0, is_training)
# Branch 1: 1x1 + 3x3
with tf.variable_scope("branch_1"):
branch_1 = tf.layers.conv2d(inputs=net,
filters=64 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding='SAME',
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_1a_1x1")
branch_1 = self.bn_lrelu(branch_1, is_training)
branch_1 = tf.layers.conv2d(inputs=branch_1,
filters=96 * self.m,
kernel_size=(3, 3),
strides=(1, 1),
padding='SAME',
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_1b_3x3")
branch_1 = self.bn_lrelu(branch_1, is_training)
# Branch 2: 1x1 + 3x3 + 3x3
with tf.variable_scope("branch_2"):
branch_2 = tf.layers.conv2d(inputs=net,
filters=64 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding='SAME',
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_2a_1x1")
branch_2 = self.bn_lrelu(branch_2, is_training)
branch_2 = tf.layers.conv2d(inputs=branch_2,
filters=96 * self.m,
kernel_size=(3, 3),
strides=(1, 1),
padding='SAME',
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_2b_3x3")
branch_2 = self.bn_lrelu(branch_2, is_training)
branch_2 = tf.layers.conv2d(inputs=branch_2,
filters=96 * self.m,
kernel_size=(3, 3),
strides=(1, 1),
padding='SAME',
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_2c_3x3")
branch_2 = self.bn_lrelu(branch_2, is_training)
# Branch 3: AvgPool + 1x1
with tf.variable_scope("branch_3"):
branch_3 = tf.layers.average_pooling2d(inputs=net,
pool_size=(3, 3),
strides=(1, 1),
padding='SAME',
name="AvgPool_3a_3x3")
branch_3 = tf.layers.conv2d(inputs=branch_3,
filters=96 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding='SAME',
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_3b_1x1")
branch_3 = self.bn_lrelu(branch_3, is_training)
return tf.concat([branch_0, branch_1, branch_2, branch_3], axis=3)
# Reduction block A
def block_a_reduction(self, net, name_scope, is_training):
with tf.variable_scope(name_or_scope=name_scope,
default_name="Reduction_block_A"):
# Branch 0, 3x3(V2)
with tf.variable_scope("branch_0"):
branch_0 = tf.layers.conv2d(inputs=net,
filters=384 * self.m,
kernel_size=(3, 3),
strides=(2, 2),
padding='VALID',
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_0a_3x3V2")
branch_0 = self.bn_lrelu(branch_0, is_training)
# Branch 1: 1x1 + 3x3 + 3x3V2
with tf.variable_scope("branch_1"):
branch_1 = tf.layers.conv2d(inputs=net,
filters=192 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding='SAME',
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_1a_1x1")
branch_1 = self.bn_lrelu(branch_1, is_training)
branch_1 = tf.layers.conv2d(inputs=branch_1,
filters=224 * self.m,
kernel_size=(3, 3),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2_1b_3x3")
branch_1 = self.bn_lrelu(branch_1, is_training)
branch_1 = tf.layers.conv2d(inputs=branch_1,
filters=256 * self.m,
kernel_size=(3, 3),
strides=(2, 2),
padding="VALID",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2_1c_3x3V2")
branch_1 = self.bn_lrelu(branch_1, is_training)
# Branch 2: MaxPool(3x3)
with tf.variable_scope("branch_3"):
branch_2 = tf.layers.max_pooling2d(inputs=net,
pool_size=(3, 3),
strides=(2, 2),
padding='VALID',
name="MaxPool_2a_3x3V2")
return tf.concat([branch_0, branch_1, branch_2], axis=3)
# Inception Block B
def block_b(self, net, name_scope, is_training):
with tf.variable_scope(name_or_scope=name_scope,
default_name="Inception_block_B"):
# Branch 0: 1x1
with tf.variable_scope("branch_0"):
branch_0 = tf.layers.conv2d(inputs=net,
filters=384 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_0a_1x1")
branch_0 = self.bn_lrelu(branch_0, is_training)
# branch 1: 1x1 + 1x7 + 7x1
with tf.variable_scope("branch_1"):
branch_1 = tf.layers.conv2d(inputs=net,
filters=192 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_1a_1x1")
branch_1 = self.bn_lrelu(branch_1, is_training)
branch_1 = tf.layers.conv2d(inputs=branch_1,
filters=224 * self.m,
kernel_size=(1, 7),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_1b_1x7")
branch_1 = self.bn_lrelu(branch_1, is_training)
branch_1 = tf.layers.conv2d(inputs=branch_1,
filters=256 * self.m,
kernel_size=(7, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_1c_7x1")
branch_1 = self.bn_lrelu(branch_1, is_training)
# branch 2: 1x1 + 1x7 + 7x1 + 1x7 + 7x1
with tf.variable_scope("branch_2"):
branch_2 = tf.layers.conv2d(inputs=net,
filters=192 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2_2a_1x1")
branch_2 = self.bn_lrelu(branch_2, is_training)
branch_2 = tf.layers.conv2d(inputs=branch_2,
filters=192 * self.m,
kernel_size=(1, 7),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_2b_1x7")
branch_2 = self.bn_lrelu(branch_2, is_training)
branch_2 = tf.layers.conv2d(inputs=branch_2,
filters=224 * self.m,
kernel_size=(7, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_2c_7x1")
branch_2 = self.bn_lrelu(branch_2, is_training)
branch_2 = tf.layers.conv2d(inputs=branch_2,
filters=224 * self.m,
kernel_size=(1, 7),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_2d_1x7")
branch_2 = self.bn_lrelu(branch_2, is_training)
branch_2 = tf.layers.conv2d(inputs=branch_2,
filters=256 * self.m,
kernel_size=(7, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_2e_7x1")
branch_2 = self.bn_lrelu(branch_2, is_training)
# Branch 3: AvgPool + 1x1
with tf.variable_scope("branch_3"):
branch_3 = tf.layers.average_pooling2d(inputs=net,
pool_size=(3, 3),
strides=(1, 1),
padding="SAME",
name="AvgPool_3a_3x3")
branch_3 = tf.layers.conv2d(inputs=branch_3,
filters=128 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_3b_1x1")
branch_3 = self.bn_lrelu(branch_3, is_training)
return tf.concat([branch_0, branch_1, branch_2, branch_3], axis=3)
# Reduction block B
def block_b_reduction(self, net, name_scope, is_training):
with tf.variable_scope(name_or_scope=name_scope,
default_name="Reduction_block_B"):
# Branch 0: 1x1 + 3x3(V,2)
with tf.variable_scope("branch_0"):
branch_0 = tf.layers.conv2d(inputs=net,
filters=192 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_0a_1x1")
branch_0 = self.bn_lrelu(branch_0, is_training)
branch_0 = tf.layers.conv2d(inputs=branch_0,
filters=192 * self.m,
kernel_size=(3, 3),
strides=(2, 2),
padding="VALID",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_0b_3x3V2")
branch_0 = self.bn_lrelu(branch_0, is_training)
# Branch 1: 1x1 + 1x7 + 7x1 + 3x3(V,2)
with tf.variable_scope("branch_1"):
branch_1 = tf.layers.conv2d(inputs=net,
filters=256 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_1a_1x1")
branch_1 = self.bn_lrelu(branch_1, is_training)
branch_1 = tf.layers.conv2d(inputs=branch_1,
filters=256 * self.m,
kernel_size=(1, 7),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_1b_1x7")
branch_1 = self.bn_lrelu(branch_1, is_training)
branch_1 = tf.layers.conv2d(inputs=branch_1,
filters=320 * self.m,
kernel_size=(7, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_1c_7x1")
branch_1 = self.bn_lrelu(branch_1, is_training)
branch_1 = tf.layers.conv2d(inputs=branch_1,
filters=320 * self.m,
kernel_size=(3, 3),
strides=(2, 2),
padding="VALID",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_1d_3x3V2")
branch_1 = self.bn_lrelu(branch_1, is_training)
# Branch 2: MaxPool 3x3 (V,2)
with tf.variable_scope("branch_2"):
branch_2 = tf.layers.max_pooling2d(inputs=net,
pool_size=(3, 3),
strides=(2, 2),
padding="VALID",
name="MaxPool_2a_3x3V2")
return tf.concat([branch_0, branch_1, branch_2], axis=3)
# Inception Block C
def block_c(self, net, name_scope, is_training):
with tf.variable_scope(name_or_scope=name_scope,
default_name="Inception_Block_C"):
# Branch 0: 1x1
with tf.variable_scope("branch_0"):
branch_0 = tf.layers.conv2d(inputs=net,
filters=256 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_0a_1x1")
branch_0 = self.bn_lrelu(branch_0, is_training)
# Branch 1: 1x1 {1x3, 3x1}
with tf.variable_scope("branch_1"):
branch_1 = tf.layers.conv2d(inputs=net,
filters=384 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_1a_1x1")
branch_1 = self.bn_lrelu(branch_1, is_training)
branch_1a = tf.layers.conv2d(inputs=branch_1,
filters=256 * self.m,
kernel_size=(1, 3),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_1b0_1x3")
branch_1a = self.bn_lrelu(branch_1a, is_training)
branch_1b = tf.layers.conv2d(inputs=branch_1,
filters=256 * self.m,
kernel_size=(3, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_1b1_3x1")
branch_1b = self.bn_lrelu(branch_1b, is_training)
branch_1 = tf.concat([branch_1a, branch_1b], axis=3)
# Branch 2: 1x1, 3x1, 1x3 {3x1, 1x3}
with tf.variable_scope("branch_2"):
branch_2 = tf.layers.conv2d(inputs=net,
filters=384 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_2a_1x1")
branch_2 = self.bn_lrelu(branch_2, is_training)
branch_2 = tf.layers.conv2d(inputs=branch_2,
filters=448 * self.m,
kernel_size=(1, 3),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_2b_1x3")
branch_2 = self.bn_lrelu(branch_2, is_training)
branch_2 = tf.layers.conv2d(inputs=branch_2,
filters=512 * self.m,
kernel_size=(3, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_2c_3x1")
branch_2 = self.bn_lrelu(branch_2, is_training)
branch_2a = tf.layers.conv2d(inputs=branch_2,
filters=256 * self.m,
kernel_size=(1, 3),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_2d0_1x3")
branch_2a = self.bn_lrelu(branch_2a, is_training)
branch_2b = tf.layers.conv2d(inputs=branch_2,
filters=256 * self.m,
kernel_size=(3, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_2d1_3x1")
branch_2b = self.bn_lrelu(branch_2b, is_training)
branch_2 = tf.concat([branch_2a, branch_2b], axis=3)
# Branch 3: AvgPool, 1x1
with tf.variable_scope("branch_3"):
branch_3 = tf.layers.average_pooling2d(inputs=net,
pool_size=(3, 3),
strides=(1, 1),
padding="SAME",
name="AvgPool_3a_3x3")
branch_3 = tf.layers.conv2d(inputs=branch_3,
filters=256 * self.m,
kernel_size=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_3b_1x1")
branch_3 = self.bn_lrelu(branch_3, is_training)
return tf.concat([branch_0, branch_1, branch_2, branch_3], axis=3)
def init_forward(self):
# make the stem
net = self.X
self.logger.log("net shape {}".format(net.get_shape()))
# Begin Inception Model
with tf.variable_scope(name_or_scope="InceptionV4"):
net = tf.layers.conv2d(inputs=net,
filters=32 * self.m,
kernel_size=(3, 3),
strides=(2, 2),
padding="VALID",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_stem0_3x3V2")
net = self.bn_lrelu(net, self.train_flag)
self.logger.log("stem0 shape {}".format(net.get_shape()))
net = tf.layers.conv2d(inputs=net,
filters=32 * self.m,
kernel_size=(3, 3),
strides=(1, 1),
padding="VALID",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="conv2d_stem1_3x3V1")
net = self.bn_lrelu(net, self.train_flag)
self.logger.log("stem1 shape {}".format(net.get_shape()))
net = tf.layers.conv2d(inputs=net,
filters=64 * self.m,
kernel_size=(3, 3),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_stem2_3x3")
net = self.bn_lrelu(net, self.train_flag)
self.logger.log("stem2 shape {}".format(net.get_shape()))
with tf.variable_scope("Mixed_3a"):
with tf.variable_scope("branch_0"):
net_a = tf.layers.conv2d(inputs=net,
filters=96 * self.m,
kernel_size=(3, 3),
strides=(2, 2),
padding="VALID",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_0a_3x3s2")
net_a = self.bn_lrelu(net_a, self.train_flag)
with tf.variable_scope("branch_1"):
net_b = tf.layers.max_pooling2d(inputs=net,
pool_size=(3, 3),
strides=(2, 2),
padding="VALID",
name="MaxPool_1a_3x3s2")
net = tf.concat([net_a, net_b], axis=3)
self.logger.log("Mixed_3a shape {}".format(net.get_shape()))
with tf.variable_scope("mixed_4a"):
# Branch 0: 1x1, 7x1, 1x7, 3x3v
with tf.variable_scope("branch_0"):
branch_0 = tf.layers.conv2d(inputs=net,
filters=64 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_0a_3x3")
branch_0 = self.bn_lrelu(branch_0, self.train_flag)
branch_0 = tf.layers.conv2d(inputs=branch_0,
filters=64 * self.m,
kernel_size=(7, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_0b_7x1")
branch_0 = self.bn_lrelu(branch_0, self.train_flag)
branch_0 = tf.layers.conv2d(inputs=branch_0,
filters=64 * self.m,
kernel_size=(1, 7),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_0c_1x7")
branch_0 = self.bn_lrelu(branch_0, self.train_flag)
branch_0 = tf.layers.conv2d(inputs=branch_0,
filters=96 * self.m,
kernel_size=(3, 3),
strides=(1, 1),
padding="VALID",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_0d_3x3V")
branch_0 = self.bn_lrelu(branch_0, self.train_flag)
# Branch 1: 1x1, 3x3v
with tf.variable_scope("branch_1"):
branch_1 = tf.layers.conv2d(inputs=net,
filters=64 * self.m,
kernel_size=(1, 1),
strides=(1, 1),
padding="SAME",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_0a_3x3")
branch_1 = self.bn_lrelu(branch_1, self.train_flag)
branch_1 = tf.layers.conv2d(inputs=branch_1,
filters=96 * self.m,
kernel_size=(3, 3),
strides=(1, 1),
padding="VALID",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_0b_3x3V")
branch_1 = self.bn_lrelu(branch_1, self.train_flag)
net = tf.concat([branch_0, branch_1], axis=3)
self.logger.log("mixed_4a shape {}".format(net.get_shape()))
with tf.variable_scope("Mixed_5a"):
# Branch 0: 3x3
with tf.variable_scope("branch_0"):
branch_0 = tf.layers.conv2d(inputs=net,
filters=192 * self.m,
kernel_size=(3, 3),
strides=(2, 2),
padding="VALID",
kernel_regularizer=self.l2_reg,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
name="Conv2d_0a_3x3v")
branch_0 = self.bn_lrelu(branch_0, self.train_flag)
# Branch 1: MaxPool 3x3s2
with tf.variable_scope("branch_1"):
branch_1 = tf.layers.max_pooling2d(inputs=net,
pool_size=(3, 3),
strides=(2, 2),
padding="VALID",
name="MaxPool_0a_3x3s2")
net = tf.concat([branch_0, branch_1], axis=3)
self.logger.log("Mixed_5a shape {}".format(net.get_shape()))
# Block A: 3x
net = self.block_a(net, "Block_A0", self.train_flag)
self.logger.log("Block_A0 shape {}".format(net.get_shape()))
net = self.block_a(net, "Block_A1", self.train_flag)
self.logger.log("Block_A1 shape {}".format(net.get_shape()))
net = self.block_a(net, "Block_A2", self.train_flag)
self.logger.log("Block_A2 shape {}".format(net.get_shape()))
# Block A: Reduction
net = self.block_a_reduction(net, "Reduction_A", self.train_flag)
self.logger.log("Reduction_A shape {}".format(net.get_shape()))
# Block B: 4x
net = self.block_b(net, "Block_B0", self.train_flag)
self.logger.log("Block_B0 shape {}".format(net.get_shape()))
net = self.block_b(net, "Block_B1", self.train_flag)
self.logger.log("Block_B1 shape {}".format(net.get_shape()))
net = self.block_b(net, "Block_B2", self.train_flag)
self.logger.log("Block_B2 shape {}".format(net.get_shape()))
net = self.block_b(net, "Block_B3", self.train_flag)
self.logger.log("Block_B3 shape {}".format(net.get_shape()))
# # Block B reducttion
# net = self.block_b_reduction(net, "Reduction_B", self.train_flag)
# self.logger.log("Reduction_B shape {}".format(net.get_shape()))
#
# # Block C: 4x
# net = self.block_c(net, "Block_C0", self.train_flag)
# self.logger.log("Block_C0 shape {}".format(net.get_shape()))
#
# net = self.block_c(net, "Block_C1", self.train_flag)
# self.logger.log("Block_C1 shape {}".format(net.get_shape()))
#
# net = self.block_c(net, "Block_C2", self.train_flag)
# self.logger.log("Block_C1 shape {}".format(net.get_shape()))
#
# net = self.block_c(net, "Block_C3", self.train_flag)
# self.logger.log("Block_C1 shape {}".format(net.get_shape()))
net = tf.nn.dropout(net, self.keep_prob, name="net_dropout")
self.GAP = tf.reduce_mean(net, axis=[1, 2], name="GAP")
self.logger.log("GAP shape {}".format(self.GAP.get_shape()))
# Final layer
units = self.GAP.get_shape().as_list()[1]
net = tf.reshape(self.GAP, (-1, 1, 1, units), name="reshaping")
net = tf.layers.conv2d(net, self.cfg["output_dim"], (1, 1),
padding='VALID',
kernel_initializer=tensorflow.initializers.GlorotUniform(),
kernel_regularizer=self.l2_reg,
use_bias=False,
name="final_conv")
net = tf.nn.relu(net, name="logits_relu")
self.logger.log("Final layer {}: {}".format("Logits", net.get_shape()))
# Logits
self.logits = tf.reshape(net, shape=(-1, self.cfg["output_dim"]), name='y')
self.loss = tf.losses.huber_loss(labels=self.Y,
predictions=self.logits,
weights=[self.cfg["output_weights"][0:self.cfg["output_dim"]]],
delta=1.0)
# Training summary for the current batch_loss
tf.summary.scalar('loss', self.loss)
class GAP(object):
"""
Convolution model:
"""
def __init__(self, model_name, cfg, logger):
super(GAP, self).__init__(model_name, cfg, logger)
self.logger.log("building the model...")
self.init_placeholders()
self.init_forward()
self.init_optimizer()
self.summary_op = tf.summary.merge_all()
def init_forward(self):
k = 4
cnn_input = self.X
assert len(self.cfg["filter_sizes"]) == len(self.cfg["n_filters"])
for i in range(len(self.cfg["filter_sizes"])):
# cnn_input = tf.nn.dropout(cnn_input, self.keep_prob)
cnn_input = tf.layers.conv2d(cnn_input,
filters=self.cfg["n_filters"][i] * k,
kernel_size=self.cfg["filter_sizes"][i],
padding='same',
activation=tf.nn.leaky_relu,
kernel_initializer=tensorflow.initializers.GlorotUniform(),
kernel_regularizer=tf.keras.regularizers.l2(self.cfg["l2_beta"]))
cnn_input = tf.layers.batch_normalization(cnn_input,
training=self.train_flag)
# print what happen to layers! :)
self.logger.log("layer {} conv2d: {}".format(i, cnn_input.get_shape()))
if self.cfg["max_pool"][i] == 1:
cnn_input = tf.layers.max_pooling2d(cnn_input, pool_size=2, strides=2)
# print what happen to layers! :)
self.logger.log("layer {} MaxPool: {}".format(i, cnn_input.get_shape()))
_, w, h, _ = cnn_input.get_shape()
cnn_input = tf.layers.average_pooling2d(cnn_input, (w, h), strides=1)
self.logger.log("layer {} AvgPool: {}".format(i, cnn_input.get_shape()))
# Define fully connected layer
# First we need to reshape cnn output to [batch_size, -1]
a = tf.contrib.layers.flatten(cnn_input)
h_prev = a.get_shape().as_list()[1]
for i, h in enumerate(self.cfg["fc_layers"]):
# by using fully_connected, tf will take care of X*W+b
with tf.name_scope("fc_layer" + str(i)):
with tf.name_scope("weight_" + str(i)):
initial_value = tf.truncated_normal([h_prev, h], stddev=0.001)
w = tf.Variable(initial_value, name="fc_w_" + str(i))
tf.add_to_collection(tf.GraphKeys.REGULARIZATION_LOSSES, w)
self.variable_summaries(w)
with tf.name_scope("bias_" + str(i)):
b = tf.Variable(tf.zeros([h]), name='fc_b_' + str(i))
self.variable_summaries(b)
with tf.name_scope("Wx_plus_b_" + str(i)):
z = tf.matmul(a, w) + b
with tf.name_scope("L_ReLu_" + str(i)):
a = tf.nn.leaky_relu(z)
h_prev = h
# show fully connected layers shape
self.logger.log("layer {} fully connected: {}".format(i, a.get_shape()))
self.logits = tf.contrib.layers.fully_connected(a, self.cfg["output_dim"], activation_fn=None)
# self.logits = tf.reshape(cnn_input, shape=(-1, self.cfg["output_dim"]))
self.loss = tf.losses.mean_squared_error(self.Y,
self.logits,
weights=[self.cfg["output_weights"][0:self.cfg["output_dim"]]])
# Training summary for the current batch_loss
tf.summary.scalar('loss', self.loss)
| 51.561404
| 112
| 0.434961
| 5,664
| 58,780
| 4.290784
| 0.061617
| 0.023042
| 0.036909
| 0.085051
| 0.891166
| 0.843476
| 0.834177
| 0.801136
| 0.796033
| 0.791301
| 0
| 0.045578
| 0.46213
| 58,780
| 1,139
| 113
| 51.606673
| 0.723115
| 0.050306
| 0
| 0.729216
| 0
| 0
| 0.057906
| 0
| 0
| 0
| 0
| 0
| 0.002375
| 1
| 0.024941
| false
| 0.004751
| 0.004751
| 0
| 0.048694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f3fa5a4b7dd46b50af7b24f6c980656320e67a5
| 194
|
py
|
Python
|
test/test_triangle_construction.py
|
erichaase/topcoder-python
|
de285d8092a94f2ec1b5c0c33eba55b5c27a5390
|
[
"MIT"
] | 1
|
2017-03-25T17:40:57.000Z
|
2017-03-25T17:40:57.000Z
|
test/test_triangle_construction.py
|
erichaase/topcoder-python
|
de285d8092a94f2ec1b5c0c33eba55b5c27a5390
|
[
"MIT"
] | null | null | null |
test/test_triangle_construction.py
|
erichaase/topcoder-python
|
de285d8092a94f2ec1b5c0c33eba55b5c27a5390
|
[
"MIT"
] | null | null | null |
from test.assert_json import assert_json
from topcoder.triangle_construction import solution
def test_triangle_construction ():
assert_json('triangle_construction', solution)
| 32.333333
| 54
| 0.783505
| 22
| 194
| 6.590909
| 0.454545
| 0.206897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164948
| 194
| 5
| 55
| 38.8
| 0.895062
| 0
| 0
| 0
| 0
| 0
| 0.108247
| 0.108247
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3f6d51c0eb68bf7beee6b900802689bf93708af8
| 22,204
|
py
|
Python
|
gym-dubins-airplane/gym_dubins_airplane/envs/agent.py
|
Cenderme/super-octo-waddle
|
723b838487dd8127f79b4797f76d427c928f56da
|
[
"MIT"
] | null | null | null |
gym-dubins-airplane/gym_dubins_airplane/envs/agent.py
|
Cenderme/super-octo-waddle
|
723b838487dd8127f79b4797f76d427c928f56da
|
[
"MIT"
] | null | null | null |
gym-dubins-airplane/gym_dubins_airplane/envs/agent.py
|
Cenderme/super-octo-waddle
|
723b838487dd8127f79b4797f76d427c928f56da
|
[
"MIT"
] | 1
|
2021-03-28T16:06:47.000Z
|
2021-03-28T16:06:47.000Z
|
<<<<<<< HEAD
import numpy as np
import random
from model import QNetwork
from replay_buffer import ReplayBuffer
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import math
import operator
BUFFER_SIZE = int(1e5)
BATCH_SIZE = 256 # minibatch size
GAMMA = 0.99 # discount factor
TAU = 1e-3 # for soft update of target parameters
LR = 5e-4 # learning rate
UPDATE_NN_EVERY = 1 # how often to update the network
# prioritized experience replay
UPDATE_MEM_EVERY = 20 # how often to update the priorities
UPDATE_MEM_PAR_EVERY = 3000 # how often to update the hyperparameters
EXPERIENCES_PER_SAMPLING = math.ceil(BATCH_SIZE * UPDATE_MEM_EVERY /
UPDATE_NN_EVERY)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class Agent():
"""Interacts with and learns from the environment."""
def __init__(self, state_size, action_size, seed, compute_weights=False):
"""Initialize an Agent object.
Params
======
state_size (int): dimension of each state
action_size (int): dimension of each action
seed (int): random seed
"""
self.state_size = state_size
self.action_size = action_size
self.seed = random.seed(seed)
self.compute_weights = compute_weights
# Q-Network
self.qnetwork_local = QNetwork(state_size, action_size,
seed).to(device)
self.qnetwork_target = QNetwork(state_size, action_size,
seed).to(device)
self.optimizer = optim.Adam(self.qnetwork_local.parameters(), lr=LR)
self.criterion = nn.MSELoss()
# Replay memory
self.memory = ReplayBuffer(action_size, BUFFER_SIZE, BATCH_SIZE,
EXPERIENCES_PER_SAMPLING, seed,
compute_weights)
# Initialize time step (for updating every UPDATE_NN_EVERY steps)
self.t_step_nn = 0
# Initialize time step (for updating every UPDATE_MEM_PAR_EVERY steps)
self.t_step_mem_par = 0
# Initialize time step (for updating every UPDATE_MEM_EVERY steps)
self.t_step_mem = 0
def step(self, state, action, reward, next_state, done):
# Save experience in replay memory
self.memory.add(state, action, reward, next_state, done)
# Learn every UPDATE_NN_EVERY time steps.
self.t_step_nn = (self.t_step_nn + 1) % UPDATE_NN_EVERY
self.t_step_mem = (self.t_step_mem + 1) % UPDATE_MEM_EVERY
self.t_step_mem_par = (self.t_step_mem_par + 1) % UPDATE_MEM_PAR_EVERY
if self.t_step_mem_par == 0:
self.memory.update_parameters()
if self.t_step_nn == 0:
# If enough samples are available in memory, get random subset and learn
if self.memory.experience_count > EXPERIENCES_PER_SAMPLING:
sampling = self.memory.sample()
self.learn(sampling, GAMMA)
if self.t_step_mem == 0:
self.memory.update_memory_sampling()
def act(self, state, eps=0.):
"""Returns actions for given state as per current policy.
Params
======
state (array_like): current state
eps (float): epsilon, for epsilon-greedy action selection
"""
state = torch.from_numpy(state).float().unsqueeze(0).to(device)
# here we calculate action values (Q values)
self.qnetwork_local.eval()
with torch.no_grad():
action_values = self.qnetwork_local(state)
self.qnetwork_local.train()
# Epsilon-greedy action selection
if random.random() > eps:
return np.argmax(action_values.cpu().data.cpu().numpy())
else:
return random.choice(np.arange(self.action_size))
def learn(self, sampling, gamma):
"""Update value parameters using given batch of experience tuples.
Params
======
sampling (Tuple[torch.Tensor]): tuple of (s, a, r, s', done) tuples
gamma (float): discount factor
"""
states, actions, rewards, next_states, dones, weights, indices = sampling
## TODO: compute and minimize the loss
q_target = self.qnetwork_target(next_states).detach().max(
1)[0].unsqueeze(1)
expected_values = rewards + gamma * q_target * (1 - dones)
output = self.qnetwork_local(states).gather(1, actions)
loss = F.mse_loss(output, expected_values)
if self.compute_weights:
with torch.no_grad():
weight = sum(np.multiply(weights, loss.data.cpu().numpy()))
loss *= weight
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
# ------------------- update target network ------------------- #
self.soft_update(self.qnetwork_local, self.qnetwork_target, TAU)
# ------------------- update priorities ------------------- #
delta = abs(expected_values - output.detach()).cpu().numpy()
#print("delta", delta)
self.memory.update_priorities(delta, indices)
def soft_update(self, local_model, target_model, tau):
"""Soft update model parameters.
θ_target = τ*θ_local + (1 - τ)*θ_target
Params
======
local_model (PyTorch model): weights will be copied from
target_model (PyTorch model): weights will be copied to
tau (float): interpolation parameter
"""
for target_param, local_param in zip(target_model.parameters(),
local_model.parameters()):
target_param.data.copy_(tau * local_param.data +
(1.0 - tau) * target_param.data)
=======
<<<<<<< HEAD
import numpy as np
import random
from model import QNetwork
from replay_buffer import ReplayBuffer # importing uniform sampling
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import math
import operator
BUFFER_SIZE = int(1e5) # replay buffer size
BATCH_SIZE = 64 # minibatch size
GAMMA = 0.999 # discount factor
TAU = 1e-3 # for soft update of target parameters
LR = 5e-4 # learning rate
UPDATE_NN_EVERY = 4 # how often to update the network
# # prioritized experience replay
# UPDATE_MEM_EVERY = 20 # how often to update the priorities
# UPDATE_MEM_PAR_EVERY = 3000 # how often to update the hyperparameters
# EXPERIENCES_PER_SAMPLING = math.ceil(BATCH_SIZE * UPDATE_MEM_EVERY /
# UPDATE_NN_EVERY)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class Agent():
"""Interacts with and learns from the environment."""
def __init__(self, state_size, action_size, seed, compute_weights=False):
"""Initialize an Agent object.
Params
======
state_size (int): dimension of each state
action_size (int): dimension of each action
seed (int): random seed
"""
self.state_size = state_size
self.action_size = action_size
self.seed = random.seed(seed)
# self.compute_weights = compute_weights
# Algorithms to enable during training
self.PrioritizedReplayBuffer = True # Use False to enable uniform sampling
self.HardTargetUpdate = True # Use False to enable soft target update
# building the policy and target Q-networks for the agent, such that the target Q-network is kept frozen to avoid the training instability issues
# Q-Network
self.qnetwork_local = QNetwork(state_size, action_size,
seed).to(device) # main policy network
self.qnetwork_target = QNetwork(state_size, action_size,
seed).to(device) # target network
self.optimizer = optim.Adam(self.qnetwork_local.parameters(), lr=LR)
self.criterion = nn.MSELoss()
# Replay memory
# building the experience replay memory used to avoid training instability issues
# Below: PER
self.memory = ReplayBuffer(action_size, BUFFER_SIZE, BATCH_SIZE,seed)
# Below: Uniform by method defined in this script
#self.memory = ReplayBuffer(action_size, BUFFER_SIZE, BATCH_SIZE, seed)
# Initialize time step (for updating every UPDATE_NN_EVERY steps)
self.t_step_nn = 0
# # Initialize time step (for updating every UPDATE_MEM_PAR_EVERY steps)
# self.t_step_mem_par = 0
# # Initialize time step (for updating every UPDATE_MEM_EVERY steps)
# self.t_step_mem = 0
def step(self, state, action, reward, next_state, done):
# Save experience in replay memory
self.memory.add(state, action, reward, next_state, done)
# Learn every UPDATE_NN_EVERY time steps.
self.t_step_nn = (self.t_step_nn + 1) % UPDATE_NN_EVERY
# self.t_step_mem = (self.t_step_mem + 1) % UPDATE_MEM_EVERY
# self.t_step_mem_par = (self.t_step_mem_par + 1) % UPDATE_MEM_PAR_EVERY
# if self.t_step_mem_par == 0:
# self.memory.update_parameters()
if self.t_step_nn == 0:
# If enough samples are available in memory, get random subset and learn
if len(self.memory) > BATCH_SIZE:
experiences = self.memory.sample()
self.learn(experiences, GAMMA)
# if self.t_step_mem == 0:
# self.memory.update_memory_sampling()
def act(self, state, eps=0.):
"""A function to select an action based on the Epsilon greedy policy. Epislon percent of times the agent will select a random
action while 1-Epsilon percent of the time the agent will select the action with the highest Q value as predicted by the
neural network.
Params
======
state (array_like): current state
eps (float): epsilon, for epsilon-greedy action selection
"""
state = torch.from_numpy(state).float().unsqueeze(0).to(device)
# here we calculate action values (Q values)
self.qnetwork_local.eval() # model deactivate norm, dropout etc. layers as it is expected
with torch.no_grad():
action_values = self.qnetwork_local(state)
self.qnetwork_local.train() # model.train() sets the modules in the network in training mode
# Epsilon-greedy action selection
if random.random() > eps:
return np.argmax(action_values.cpu().data.cpu().numpy())
else:
return random.choice(np.arange(self.action_size))
def learn(self, experiences, gamma):
"""Update value parameters using given batch of experience tuples.
Function for training the neural network. The function will update the weights of the newtwork
Params
======
experiences (Tuple[torch.Tensor]): tuple of (s, a, r, s', done) tuples
gamma (float): discount factor
"""
states, actions, rewards, next_states, dones = experiences
# Target (absolute) Q values from target Q network
q_target = self.qnetwork_target(next_states).detach().max(
1)[0].unsqueeze(1)
# Predictions from local Q network
expected_values = rewards + gamma * q_target * (1 - dones)
output = self.qnetwork_local(states).gather(1, actions)
# computing the loss
loss = F.mse_loss(output,
expected_values) # Loss Function: Mean Square Error
# if self.compute_weights:
# with torch.no_grad():
# weight = sum(np.multiply(weights, loss.data.cpu().numpy()))
# loss *= weight
# Minimizing the loss by optimizer
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
# ------------------- update target network ------------------- #
self.soft_update(self.qnetwork_local, self.qnetwork_target, TAU)
# ------------------- update priorities ------------------- #
# delta = abs(expected_values - output.detach()).cpu().numpy()
# #print("delta", delta)
# self.memory.update_priorities(delta, indices)
def soft_update(self, local_model, target_model, tau):
"""Soft update model parameters.
θ_target = τ*θ_local + (1 - τ)*θ_target
Params
======
local_model (PyTorch model): weights will be copied from
target_model (PyTorch model): weights will be copied to
tau (float): interpolation parameter
"""
for target_param, local_param in zip(target_model.parameters(),
local_model.parameters()):
target_param.data.copy_(tau * local_param.data +
(1.0 - tau) * target_param.data)
# def hard_update(self):
# """ This hard_update method performs direct update of target network
# weight update from local network weights instantly"""
# Write the algorithm here
def load_models(self, policy_net_filename, target_net_filename):
""" Function to load the parameters of the policy and target models """
print('Loading model...')
self.qnetwork_local.load_model(policy_net_filename)
self.qnetwork_target.load_model(target_net_filename)
# def state_norm(self, state):
# state[0] = state[0]/800
# state[1] = state[1]/800
# state[2] = state[2]/np.pi
# state[3] = state[3]/np.pi
# state[4] = state[4]/np.pi
# state[5] = state[5]/np.pi
# state[6] = state[6]/np.pi
# state[7] = state[7]/np.pi
# return np.array(state)
# class ReplayBuffer:
# """Fixed-size buffer to store experience tuples."""
# def __init__(self, action_size, buffer_size, batch_size, seed):
# """Initialize a ReplayBuffer object.
# Params
# ======
# action_size (int): dimension of each action
# buffer_size (int): maximum size of buffer
# batch_size (int): size of each training batch
# seed (int): random seed
# """
# self.action_size = action_size
# self.memory = deque(maxlen=buffer_size)
# self.batch_size = batch_size
# self.experience = namedtuple("Experience", field_names=["state", "action", "reward", "next_state", "done"])
# self.seed = random.seed(seed)
# def add(self, state, action, reward, next_state, done):
# """Add a new experience to memory."""
# e = self.experience(state, action, reward, next_state, done)
# self.memory.append(e)
# def sample(self):
# """Randomly sample a batch of experiences from memory."""
# experiences = random.sample(self.memory, k=self.batch_size)
# states = torch.from_numpy(np.vstack([e.state for e in experiences if e is not None])).float().to(device)
# actions = torch.from_numpy(np.vstack([e.action for e in experiences if e is not None])).long().to(device)
# rewards = torch.from_numpy(np.vstack([e.reward for e in experiences if e is not None])).float().to(device)
# next_states = torch.from_numpy(np.vstack([e.next_state for e in experiences if e is not None])).float().to(
# device)
# dones = torch.from_numpy(np.vstack([e.done for e in experiences if e is not None]).astype(np.uint8)).float().to(
# device)
# return (states, actions, rewards, next_states, dones)
# def __len__(self):
# """Return the current size of internal memory."""
# return len(self.memory)
=======
import numpy as np
import random
from model import QNetwork
from replay_buffer import ReplayBuffer
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import math
import operator
BUFFER_SIZE = int(1e5) # replay buffer size
BATCH_SIZE = 256 # minibatch size
GAMMA = 0.99 # discount factor
TAU = 1e-3 # for soft update of target parameters
LR = 5e-4 # learning rate
UPDATE_NN_EVERY = 1 # how often to update the network
# prioritized experience replay
UPDATE_MEM_EVERY = 20 # how often to update the priorities
UPDATE_MEM_PAR_EVERY = 3000 # how often to update the hyperparameters
EXPERIENCES_PER_SAMPLING = math.ceil(BATCH_SIZE * UPDATE_MEM_EVERY /
UPDATE_NN_EVERY)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class Agent():
"""Interacts with and learns from the environment."""
def __init__(self, state_size, action_size, seed, compute_weights=False):
"""Initialize an Agent object.
Params
======
state_size (int): dimension of each state
action_size (int): dimension of each action
seed (int): random seed
"""
self.state_size = state_size
self.action_size = action_size
self.seed = random.seed(seed)
self.compute_weights = compute_weights
# Q-Network
self.qnetwork_local = QNetwork(state_size, action_size,
seed).to(device) # main policy network
self.qnetwork_target = QNetwork(state_size, action_size,
seed).to(device) # target network
self.optimizer = optim.Adam(self.qnetwork_local.parameters(), lr=LR)
self.criterion = nn.MSELoss()
# Replay memory
self.memory = ReplayBuffer(action_size, BUFFER_SIZE, BATCH_SIZE,
EXPERIENCES_PER_SAMPLING, seed,
compute_weights)
# Initialize time step (for updating every UPDATE_NN_EVERY steps)
self.t_step_nn = 0
# Initialize time step (for updating every UPDATE_MEM_PAR_EVERY steps)
self.t_step_mem_par = 0
# Initialize time step (for updating every UPDATE_MEM_EVERY steps)
self.t_step_mem = 0
def step(self, state, action, reward, next_state, done):
# Save experience in replay memory
self.memory.add(state, action, reward, next_state, done)
# Learn every UPDATE_NN_EVERY time steps.
self.t_step_nn = (self.t_step_nn + 1) % UPDATE_NN_EVERY
self.t_step_mem = (self.t_step_mem + 1) % UPDATE_MEM_EVERY
self.t_step_mem_par = (self.t_step_mem_par + 1) % UPDATE_MEM_PAR_EVERY
if self.t_step_mem_par == 0:
self.memory.update_parameters()
if self.t_step_nn == 0:
# If enough samples are available in memory, get random subset and learn
if self.memory.experience_count > EXPERIENCES_PER_SAMPLING:
sampling = self.memory.sample()
self.learn(sampling, GAMMA)
if self.t_step_mem == 0:
self.memory.update_memory_sampling()
def act(self, state, eps=0.):
"""Returns actions for given state as per current policy.
Params
======
state (array_like): current state
eps (float): epsilon, for epsilon-greedy action selection
"""
state = torch.from_numpy(state).float().unsqueeze(0).to(device)
# here we calculate action values (Q values)
self.qnetwork_local.eval()
with torch.no_grad():
action_values = self.qnetwork_local(state)
self.qnetwork_local.train()
# Epsilon-greedy action selection
if random.random() > eps:
return np.argmax(action_values.cpu().data.cpu().numpy())
else:
return random.choice(np.arange(self.action_size))
def learn(self, sampling, gamma):
"""Update value parameters using given batch of experience tuples.
Params
======
sampling (Tuple[torch.Tensor]): tuple of (s, a, r, s', done) tuples
gamma (float): discount factor
"""
states, actions, rewards, next_states, dones, weights, indices = sampling
## TODO: compute and minimize the loss
q_target = self.qnetwork_target(next_states).detach().max(
1)[0].unsqueeze(1)
expected_values = rewards + gamma * q_target * (1 - dones)
output = self.qnetwork_local(states).gather(1, actions)
loss = F.mse_loss(output,
expected_values) # Loss Function: Mean Square Error
if self.compute_weights:
with torch.no_grad():
weight = sum(np.multiply(weights, loss.data.cpu().numpy()))
loss *= weight
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
# ------------------- update target network ------------------- #
self.soft_update(self.qnetwork_local, self.qnetwork_target, TAU)
# ------------------- update priorities ------------------- #
delta = abs(expected_values - output.detach()).cpu().numpy()
#print("delta", delta)
self.memory.update_priorities(delta, indices)
def soft_update(self, local_model, target_model, tau):
"""Soft update model parameters.
θ_target = τ*θ_local + (1 - τ)*θ_target
Params
======
local_model (PyTorch model): weights will be copied from
target_model (PyTorch model): weights will be copied to
tau (float): interpolation parameter
"""
for target_param, local_param in zip(target_model.parameters(),
local_model.parameters()):
target_param.data.copy_(tau * local_param.data +
(1.0 - tau) * target_param.data)
>>>>>>> 6819adb566d3adb52b4ba0d843df0a5e09f4af63
>>>>>>> baf535a02bf8c1d22686c247a37d9aa21a5ccce8
| 41.580524
| 153
| 0.608854
| 2,724
| 22,204
| 4.79185
| 0.103524
| 0.01379
| 0.024822
| 0.022064
| 0.836743
| 0.829081
| 0.818969
| 0.804106
| 0.800812
| 0.796369
| 0
| 0.011707
| 0.288281
| 22,204
| 533
| 154
| 41.658537
| 0.814276
| 0.276887
| 0
| 0.926531
| 0
| 0
| 0.003524
| 0
| 0
| 0
| 0
| 0.003752
| 0
| 0
| null | null | 0
| 0.122449
| null | null | 0.004082
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
58dad9d6e3852b2eedeceb86ac2f0d5f64da6302
| 43,068
|
py
|
Python
|
day20/data.py
|
ogun/advent-of-code-2020
|
1dcf268f9f1f6cd7c46d442b6aaa9b19ec5b2cf1
|
[
"MIT"
] | 4
|
2020-12-03T13:22:12.000Z
|
2020-12-22T21:51:49.000Z
|
day20/data.py
|
ogun/advent-of-code-2020
|
1dcf268f9f1f6cd7c46d442b6aaa9b19ec5b2cf1
|
[
"MIT"
] | null | null | null |
day20/data.py
|
ogun/advent-of-code-2020
|
1dcf268f9f1f6cd7c46d442b6aaa9b19ec5b2cf1
|
[
"MIT"
] | 1
|
2020-12-12T13:41:12.000Z
|
2020-12-12T13:41:12.000Z
|
INPUT = {
2647: [
list("#....#####"),
list(".##......#"),
list("##......##"),
list(".....#..#."),
list(".........#"),
list(".....#..##"),
list("#.#....#.."),
list("#......#.#"),
list("#....##..#"),
list("...##....."),
],
1283: [
list("######..#."),
list("#.#..#.#.."),
list("..#..#...#"),
list(".#.##..#.."),
list("#......#.."),
list("#.#....##."),
list(".#.....#.#"),
list("#.#..#.#.#"),
list(".#......##"),
list("...##....."),
],
3547: [
list("#.#.#.###."),
list("#........."),
list("#....##..."),
list("#.....#..#"),
list("#.....#.#."),
list("##..##...#"),
list("#...##...."),
list("......#..#"),
list("#...##...."),
list(".....###.#"),
],
1451: [
list("##..#.#..."),
list("#.#......."),
list("##.#.....#"),
list("....#....."),
list("...#...##."),
list("......#.#."),
list("#...##.##."),
list("........#."),
list(".#.##.#..."),
list("..##..#..."),
],
3137: [
list("....#.##.#"),
list("#....#...#"),
list("..#.#....."),
list("...####..#"),
list(".#.###...#"),
list(".......#.."),
list("##.##.#..#"),
list(".#.##....#"),
list("#...#....#"),
list("..##.##..#"),
],
2897: [
list("###..#.##."),
list("..#......#"),
list(".....#...."),
list("###.#....#"),
list("#.#..#...#"),
list(".#...##..#"),
list("##..##.##."),
list("#.....#..#"),
list(".#......##"),
list("#.#.#.##.#"),
],
1093: [
list("..#.#.#.#."),
list("#.#......."),
list("..##....#."),
list(".#.....#.#"),
list("#........#"),
list(".#....#..#"),
list("##....#..#"),
list("#.##..#..#"),
list("..###...##"),
list(".######.##"),
],
1217: [
list("#..#....##"),
list("#.....#..."),
list("##...##..#"),
list("#.....#..."),
list("..#.#..#.."),
list("#..#....##"),
list(".##.#....."),
list("......#..."),
list(".#........"),
list(".#..###.#."),
],
2801: [
list("###..##.#."),
list(".........#"),
list("##.#...###"),
list("#......#.."),
list("#........#"),
list("......#..."),
list("##.####..."),
list(".....##..."),
list("..#..#.##."),
list("...###.##."),
],
1361: [
list("...#.##..#"),
list("....#....."),
list("###......."),
list("#......#.."),
list(".......##."),
list("#...#..#.."),
list("#.....##.#"),
list("##........"),
list("#.#......."),
list("###.#..###"),
],
2063: [
list("...#....##"),
list("##...#..##"),
list("#........#"),
list("........##"),
list("#.......##"),
list("#........."),
list("##.....##."),
list(".....##..#"),
list(".#.##.#..."),
list(".#..#####."),
],
3797: [
list("##..#...#."),
list(".###.#.##."),
list(".....#.##."),
list("..#......."),
list("...#.#...."),
list("........##"),
list("#.#.#.##.#"),
list("#.....#.##"),
list("#.......#."),
list(".....#.##."),
],
1289: [
list("####.##.#."),
list(".....#...."),
list("#..#.#...."),
list("####...#.."),
list("#.#..#..#."),
list(".#.##..#.."),
list("#........#"),
list("....#..#.."),
list("........#."),
list("###.#.####"),
],
1427: [
list("##.##..##."),
list("###..#.##."),
list("#..##...#."),
list("#..#.#...#"),
list("#........#"),
list("#...##...."),
list("#........#"),
list(".....#..#."),
list(".####....#"),
list("##.#.##.#."),
],
1951: [
list("....##.#.#"),
list(".........#"),
list("#........#"),
list(".#..#...#."),
list(".....#####"),
list("#......#.#"),
list("...##....#"),
list("......#..."),
list("..#...#..#"),
list("....####.#"),
],
1483: [
list("....####.."),
list(".......#.#"),
list("###..#..##"),
list("...#.#...#"),
list("#..##...##"),
list("##.#......"),
list("#...#..#.."),
list("..#...#.##"),
list(".........#"),
list(".#...#...."),
],
1789: [
list("##..#####."),
list("....#....#"),
list("........#."),
list("..#.#..#.#"),
list("..##.#..##"),
list(".........#"),
list(".........#"),
list("#..#.#..##"),
list("....##...."),
list("#.#......."),
],
2129: [
list("#.###.#..#"),
list("....##...#"),
list(".#..#..##."),
list("...###.##."),
list("..#..#...#"),
list("....##...#"),
list("#........."),
list("#...#..###"),
list("#...#....."),
list("...#....##"),
],
2137: [
list("..#.####.#"),
list("##...#.#.."),
list(".......###"),
list(".#.....#.#"),
list(".#....##.#"),
list("#.......#."),
list("#....#...#"),
list("#.....####"),
list("......##.#"),
list("..#####.##"),
],
3761: [
list(".####.#..."),
list("####..#..#"),
list("#...##..##"),
list(".#.....#.#"),
list("....#....#"),
list("#.......#."),
list("...#..#..#"),
list("#.##...##."),
list("...###...#"),
list("...##.#..#"),
],
1327: [
list("..####.#.#"),
list("#..#......"),
list("......#.##"),
list("#..##....."),
list("..##.##..#"),
list("#.#.#....."),
list("####.....#"),
list("..#......."),
list("#.#...##.."),
list("#.##....#."),
],
2741: [
list(".#..#...#."),
list("#....#..#."),
list("......##.#"),
list("....#.#..#"),
list("........##"),
list("...#..#..."),
list("......##.."),
list("#...#..#.#"),
list("......##.."),
list("..#..#..#."),
],
1699: [
list(".###..####"),
list("##.....#.#"),
list(".....##.##"),
list("#.#...##.."),
list(".#........"),
list(".#....#..#"),
list("#..#....#."),
list(".#...#...#"),
list("#.......#."),
list("##.#..#..#"),
],
1151: [
list("..#.##...."),
list("##....#..."),
list("###.#..#.#"),
list("#.......##"),
list("....#.#..#"),
list("#...###..."),
list(".#..#.#..#"),
list("#.#..##..#"),
list(".#.#.#.#.."),
list(".###..####"),
],
2273: [
list("#.#.#.#.##"),
list(".........."),
list("#......#.."),
list("#.....#..."),
list("#.#...#..."),
list("##....##.."),
list("##..##.#.."),
list("#.#####.##"),
list("##.##...##"),
list("#...##..##"),
],
1999: [
list("##.##...##"),
list("#......#.."),
list("##..#....."),
list("#........#"),
list("#.#...####"),
list("..#....#.#"),
list("#..#...#.."),
list(".........#"),
list("#...##...."),
list("##.##.##.."),
],
1721: [
list("....##...#"),
list("###.#....#"),
list(".##..#...."),
list(".#.#.#...."),
list("...##....#"),
list("##..#....#"),
list("#....#.###"),
list("#.....##.."),
list("....#...##"),
list("..#.#.#..#"),
],
2521: [
list("..#######."),
list("#.#..##.#."),
list(".#....##.#"),
list("..#...####"),
list(".......##."),
list("##...###.."),
list("...##....#"),
list(".##.#....."),
list("###..##..#"),
list("####.##.#."),
],
2111: [
list("..#.#..#.."),
list("...#.....#"),
list("..####...#"),
list(".#.#..##.#"),
list(".##..#.##."),
list("........##"),
list("........##"),
list("#..#.#...."),
list("...#.###.."),
list(".#.#...#.."),
],
2767: [
list(".#######.."),
list("##.......#"),
list("#...#.##.."),
list("....#...##"),
list("#........#"),
list("..#.###..."),
list("....#..#.#"),
list("##....#.##"),
list("..##....##"),
list(".#####.#.."),
],
2141: [
list("####.#...."),
list("#..#.#...#"),
list("...#..#..#"),
list(".......#.."),
list(".....###.#"),
list("#....#...."),
list(".......#.#"),
list(".#...#..##"),
list("...#......"),
list(".###.####."),
],
2557: [
list(".#.##..#.."),
list("..##.....#"),
list("#.#.#....#"),
list("..##...#.."),
list("...#..##.#"),
list(".........."),
list("##......##"),
list("#..#......"),
list("#.#..#...#"),
list("##.#####.."),
],
2269: [
list(".#.#...##."),
list("#.......##"),
list("#.....##.."),
list("##.#......"),
list("#.##..###."),
list(".#.....##."),
list("....#....#"),
list("....#...##"),
list("#..##....."),
list("#.#.#.#.##"),
],
3511: [
list(".#.#.##..."),
list(".#.....##."),
list(".#....#..#"),
list("#.#......#"),
list("#.#.#....."),
list("#........#"),
list("..#......."),
list(".##.#....."),
list("##.#.....#"),
list("..####..##"),
],
2789: [
list("#......#.."),
list("#...#....."),
list("#........."),
list(".......#.#"),
list("...#....##"),
list("#.##..###."),
list("#...##...#"),
list(".........#"),
list(".........#"),
list(".###..##.."),
],
2971: [
list("#.##.#...."),
list("...#.....#"),
list(".#....#..."),
list("#.#..##..."),
list("#.....#..."),
list("####.....#"),
list("#..###..##"),
list("#....#...."),
list("#..#.##..."),
list("#.#..###.."),
],
3719: [
list("#.###....."),
list("...#.....#"),
list("...##...##"),
list(".#..#.#..#"),
list("#..#.#..#."),
list("#.#..#..##"),
list("#...###..#"),
list(".#.#..#.##"),
list("........#."),
list("#....###.."),
],
1901: [
list(".#...##.##"),
list("#........."),
list(".#.#.....#"),
list("#.##.....#"),
list("#........#"),
list("#....#...#"),
list(".....##.##"),
list("##.###..##"),
list("....#....#"),
list("....##..##"),
],
3191: [
list("#.#..###.#"),
list("#...#..##."),
list("#.....#..."),
list(".#.#.#...."),
list(".#..##...."),
list("#.....#.#."),
list(".##......."),
list("....#....#"),
list("#..##.#..."),
list("####....##"),
],
3709: [
list("..#......#"),
list("#..#...#.#"),
list("#.##....#."),
list(".#..#.##.."),
list("..#......#"),
list("#....##..."),
list("##........"),
list("....#....#"),
list(".........#"),
list(".#.#..###."),
],
1613: [
list("...##..##."),
list("#......#.."),
list("..##.#..##"),
list("......##.."),
list(".#..#..##."),
list(".......##."),
list(".......#.#"),
list("...#.#...."),
list("#......#.#"),
list("###..#...."),
],
2441: [
list("..#.######"),
list("#.#......."),
list("#..#.#...."),
list("....#...##"),
list("#...#...##"),
list("#.##...#.#"),
list("........##"),
list("#.#...#..."),
list("#..####.##"),
list("#.##.####."),
],
1409: [
list("..####.#.#"),
list("..##....#."),
list("..#.#...#."),
list("..##.##..."),
list(".#.##....#"),
list("#.....##.#"),
list("####.....#"),
list("###....#.."),
list("####..#.#."),
list("#..##.##.#"),
],
1523: [
list(".#.##..##."),
list("#..#.#...."),
list("##.#.#...#"),
list("....#.##.#"),
list("#........#"),
list("#.#......."),
list("#...##...#"),
list("...#..##.#"),
list("#.##...#.."),
list(".####..#.."),
],
1367: [
list("#..#...#.#"),
list("#.#......."),
list("..#..#...."),
list(".###..###."),
list("###..#.##."),
list("##...#..#."),
list("#..#...#.#"),
list("......##.."),
list("##.....#.#"),
list(".#####..##"),
],
1783: [
list("...#.####."),
list(".####..#.."),
list("#....#.###"),
list("#.#..#.#.#"),
list("#.#.#.#..#"),
list("#.......##"),
list("#.##.#.#.."),
list(".#.#....#."),
list("#..#.#...#"),
list(".###..##.#"),
],
1871: [
list(".##..#.##."),
list("#........#"),
list("#...#....#"),
list("##.#..##.."),
list("##.....##."),
list("#.....#.##"),
list("........##"),
list("....#....#"),
list("#........."),
list("....#.#..#"),
],
3217: [
list("#.#...#.##"),
list(".........#"),
list(".........#"),
list("#...#....."),
list("#....#.#.#"),
list(".........#"),
list("...#.##.##"),
list("#...#....."),
list(".#..#....#"),
list("#..###.#.#"),
],
3163: [
list("...##.#.##"),
list("#.#......#"),
list("....#...##"),
list("#.......##"),
list("###..#.#.."),
list(".#....####"),
list("##....#.##"),
list("#.......#."),
list(".....#..#."),
list(".##.#.#.##"),
],
3271: [
list("##.#.#.##."),
list("##....##.#"),
list("#.#.##..##"),
list("#.#...##.#"),
list(".##......#"),
list("#.....#.#."),
list("#........#"),
list("##..##...."),
list("#.#..##..#"),
list("#..#.####."),
],
2707: [
list("..###.#..."),
list("#...#....."),
list("#.#..#...."),
list("#..##...##"),
list(".###......"),
list(".#..##...#"),
list("#...#....."),
list("....#....."),
list("#..#.#...."),
list(".##....#.#"),
],
3083: [
list("##..#.#.##"),
list("#..#....##"),
list(".........#"),
list("..#.#...##"),
list("..#......."),
list(".#.#.....#"),
list("..#..#.#.."),
list("#...#.#..#"),
list("#..#.#...."),
list("#.###..##."),
],
1051: [
list("####...##."),
list("...#.#...#"),
list(".........."),
list("..#......."),
list("#......#.."),
list(".#.##.##.."),
list("#....#.#.#"),
list("#..#.#...#"),
list("#.#..##..#"),
list("......###."),
],
3767: [
list(".#..##.###"),
list("...#.#...."),
list("..#.....#."),
list("#.#......."),
list(".#.....#.#"),
list("##..#....#"),
list("#...#..#.#"),
list("........##"),
list("#........#"),
list("..#....##."),
],
2267: [
list(".#..#..#.."),
list(".#.#.#...."),
list(".#......#."),
list("#...#....#"),
list(".###..#..."),
list(".##.#...##"),
list("..#.##.##."),
list("...#.#.##."),
list("##.#.##..#"),
list(".#.##....."),
],
1973: [
list("#.#####..#"),
list(".#.......#"),
list("#..#.#..#."),
list("#.#.#.#.#."),
list(".##......."),
list("#.#.....#."),
list(".#.......#"),
list("#...##.#.#"),
list("##.......#"),
list(".##...####"),
],
3671: [
list("#..##.#.##"),
list("....##...#"),
list(".###.##..."),
list(".........#"),
list("#..#.....#"),
list("..##...#.."),
list("......#..."),
list("..#..#..##"),
list("..#......."),
list("##..###..#"),
],
3221: [
list("#.#..###.#"),
list("#..#....##"),
list("#..#......"),
list("#...#...##"),
list("..#..#..#."),
list("#..##...#."),
list("...#....#."),
list(".....#..#."),
list("##..#..#.."),
list(".....#...#"),
],
1549: [
list(".###.##..#"),
list("#.#.##...#"),
list("#....#...."),
list(".........."),
list("#.#......#"),
list("##.#.#..##"),
list("...#.#..##"),
list("........#."),
list("#.#....###"),
list("#....#...#"),
],
3461: [
list(".######..#"),
list("#.......##"),
list(".......#.."),
list(".#...#...."),
list("..##....#."),
list("#.....##.."),
list("##.#.#..#."),
list(".........#"),
list("##.##.#..."),
list("....#...##"),
],
2459: [
list("..##.##.#."),
list("...#..#..."),
list(".........#"),
list("#.#..#..##"),
list("#.###.#..."),
list("##.#......"),
list(".......#.."),
list(".........#"),
list("........##"),
list("#.##...#.."),
],
3203: [
list(".#...####."),
list("..##..#.#."),
list("#..#..##.."),
list("#.#....##."),
list("...#.#...."),
list(".......###"),
list("#.....##.."),
list("....#....#"),
list("#......#.."),
list("###......."),
],
2203: [
list("#.#..##.##"),
list(".......#.."),
list("......#.##"),
list("#.......##"),
list("#..##.##.#"),
list("..#.....##"),
list("#.##.....#"),
list("#.#....#.."),
list(".##.....##"),
list("......#..."),
],
3637: [
list("#...###.#."),
list("#........."),
list("..#......."),
list("...#.....#"),
list("#..##....#"),
list("#........#"),
list(".......#.."),
list("#....#.#.."),
list("#.#..##..#"),
list("..#.#..##."),
],
2467: [
list("..##.##..."),
list("##....####"),
list("...#.#.#.#"),
list("#.##...#.#"),
list("...##.##.."),
list("#.....#..."),
list("##........"),
list("..#...#.#."),
list("#...####.#"),
list("#......###"),
],
2411: [
list("...##....#"),
list("...##..###"),
list("...##.####"),
list("#.#..##.#."),
list("..##.#.###"),
list(".#..#.###."),
list("....####.#"),
list(".....##.#."),
list("#........."),
list(".#..#..###"),
],
2221: [
list("####.....#"),
list("#.#.....##"),
list(".#....#..."),
list(".#.#......"),
list(".##..#..#."),
list("....#....."),
list(".........#"),
list("##.......#"),
list("#....#...."),
list(".##.######"),
],
1487: [
list("..#..##..."),
list(".........#"),
list("#..#...###"),
list("....#...#."),
list(".#...##.#."),
list(".....#.#.#"),
list(".....##..."),
list("#.##......"),
list("#.#......."),
list("#.#####.#."),
],
1481: [
list("#.###.##.."),
list("....##...#"),
list("....#....."),
list("...#......"),
list("##.###.#.#"),
list("#.##..####"),
list("..#......#"),
list(".#....##.#"),
list("..##.##.#."),
list(".#####.#.#"),
],
1669: [
list("#...##.##."),
list("...#..#..."),
list(".##..#.#.#"),
list("#..#..#..#"),
list("#......#.#"),
list(".#......##"),
list("........#."),
list("......#..#"),
list(".##..#.#.#"),
list("##.##....#"),
],
3167: [
list(".#.####..."),
list(".........#"),
list("#......##."),
list(".....#...."),
list("..#.#...##"),
list("#.#.####.#"),
list("...#....#."),
list(".........#"),
list("#...#.#..#"),
list("#.#.#.#.#."),
],
3347: [
list("###...##.."),
list("#.#......#"),
list("...#.....#"),
list(".........."),
list("#.#.....#."),
list("..####..##"),
list("..#.#.#..#"),
list("##...#..#."),
list("..##.....#"),
list("#..#....#."),
],
2213: [
list("#..#####.#"),
list(".........."),
list("#..#.##.#."),
list("...###.#.#"),
list("......##.."),
list("......#..#"),
list(".##.....##"),
list("..#....###"),
list("...####..#"),
list(".####.#.##"),
],
3329: [
list("..##...#.."),
list("#.#....#.#"),
list("#...#..#.."),
list("......#.##"),
list("#...####.#"),
list(".........."),
list("##....##.#"),
list("#......##."),
list("....##...#"),
list("..####.##."),
],
3851: [
list("#.#....##."),
list(".........#"),
list("#.....#..."),
list("##.##....."),
list("...#.###.."),
list("#....##..."),
list(".....#.##."),
list(".#........"),
list("#......#.#"),
list("...#..#..#"),
],
2659: [
list("#.#...#.#."),
list(".....#.##."),
list("#..##.####"),
list("#.#.##...."),
list("#....#..#."),
list("...#...#.."),
list("...#....#."),
list("#....#.#.."),
list(".##.#....#"),
list(".....#..#."),
],
1933: [
list(".####.##.."),
list("#..####..."),
list(".#..####.."),
list(".#.#.##..."),
list("......#.#."),
list("##........"),
list(".#.#.....#"),
list("#..#......"),
list("....#....."),
list("...#...##."),
],
3299: [
list("###.##..#."),
list(".......#.."),
list("...#...##."),
list("###...#.##"),
list("......##.."),
list("....#.#..#"),
list(".###......"),
list(".#.#####.."),
list("#..#.#..#."),
list(".....#.#.#"),
],
3691: [
list("...###...#"),
list("#........."),
list("#.#.....##"),
list("#.#....#.."),
list("#..#...#.."),
list(".........."),
list("##...##..#"),
list(".#...#...#"),
list("#.....#.##"),
list(".###..#..."),
],
3733: [
list("#..#.#####"),
list(".....#...."),
list("....###..#"),
list("#..#.#...."),
list("#.#..#.###"),
list("..###...##"),
list("......#.##"),
list("...###...."),
list("...#....#."),
list("..##......"),
],
2131: [
list("##.#..#.#."),
list(".#...#..##"),
list("#.......#."),
list("....##...#"),
list(".###..#..."),
list("...#####.."),
list(".....#...#"),
list("##..#..##."),
list("..##....#."),
list(".#...####."),
],
1723: [
list(".....#####"),
list(".#.#..#..."),
list("##......#."),
list("#.......##"),
list(".###...#.."),
list("#..#......"),
list("#........."),
list("......#..#"),
list(".........#"),
list(".###.##.##"),
],
3463: [
list("##.#....##"),
list("#....##..#"),
list("..#.#....."),
list("#.#...#..#"),
list("#....#...."),
list("..#....#.#"),
list("#...#..###"),
list("##....#.##"),
list("..#.#....."),
list(".#..#.##.."),
],
2549: [
list("#.####.#.."),
list("...##....#"),
list("##..#.##.#"),
list("..###.#..#"),
list("#.#......#"),
list("#........#"),
list("....#....."),
list("#......#.#"),
list("#....####."),
list("...##.#.##"),
],
1031: [
list("#..#.#.#.#"),
list("......##.."),
list("#........#"),
list(".###......"),
list("..#..#..#."),
list("##....##.."),
list("......#..."),
list("...#...###"),
list(".###...#.."),
list(".##.#.###."),
],
1979: [
list("#.######.."),
list(".#.#.....#"),
list("#........#"),
list("#..##....."),
list("##........"),
list("##.....#.."),
list("......#..."),
list(".........#"),
list(".#........"),
list("..#.#####."),
],
2939: [
list("#.#...#.##"),
list(".#..#....#"),
list(".#.....#.#"),
list("##......##"),
list("...#..##.."),
list("#....#.##."),
list("#...##.#.#"),
list("..#...#..."),
list("##.....#.."),
list(".....##.#."),
],
2381: [
list("..##.###.#"),
list("..##...#.."),
list(".#...#...."),
list("#......#.#"),
list("##.......#"),
list("#..####..."),
list("...#.#.#.#"),
list("#.##.....#"),
list("..#......#"),
list("#..#.##..."),
],
3943: [
list("#.#.###..#"),
list(".......###"),
list("#.#...###."),
list("#..##.#..#"),
list("#......#.."),
list("#.##...#.#"),
list("#........."),
list("##....##.#"),
list("....#.#..."),
list(".###.#...."),
],
1553: [
list("#####.####"),
list("#...#....."),
list("#.#.....#."),
list("##......#."),
list("#....#.#.."),
list(".#.....#.#"),
list("##....#.#."),
list("#........#"),
list(".........#"),
list(".#.....##."),
],
2351: [
list(".###.###.."),
list("#.....#..."),
list("##.##....#"),
list("..#..##.#."),
list("#.#......."),
list("#....#...."),
list("......##.#"),
list("##...##..#"),
list(".#.....#.."),
list(".#.###..#."),
],
2311: [
list("#.#.#..##."),
list("#..###.#.."),
list("...##..#.#"),
list("###......."),
list("##........"),
list("#.#......."),
list("..##.....#"),
list(".#.####..."),
list("..#.#.#..."),
list("###..##.#."),
],
1567: [
list("..###.#.##"),
list(".#.....###"),
list("#...#..##."),
list("#.......#."),
list(".......#.."),
list("#....#...."),
list("...#.##.#."),
list("....#...##"),
list("....#....#"),
list("#.#...##.."),
],
2579: [
list("#.##..##.."),
list("#......#.."),
list("#..#..#..#"),
list("##.......#"),
list("....##.#.#"),
list("#.####..#."),
list("#..#..#.##"),
list("#...#..#.#"),
list("...##...#."),
list("#..#.###.."),
],
3593: [
list(".#.##.#.##"),
list("#...#....#"),
list(".........."),
list("##....#..#"),
list("##......##"),
list("#........."),
list("......#..#"),
list("...#.....#"),
list("....#....#"),
list("##..###..#"),
],
2281: [
list("##....###."),
list("...#......"),
list("#......#.#"),
list("##.#..#..#"),
list("###.#..##."),
list(".#...#...#"),
list(".........."),
list(".#.###.#.."),
list("#..#......"),
list("#..#.##.#."),
],
1193: [
list(".......###"),
list("##..#..#.."),
list(".###...###"),
list("....#.###."),
list("..#...#..#"),
list("#.#....#.."),
list("...####..#"),
list("#....#..##"),
list(".#.......#"),
list(".#.#...##."),
],
3833: [
list("...#####.."),
list("#..####..."),
list("#.#....###"),
list("...##.#.##"),
list("..#...#..#"),
list(".##.#####."),
list("#..#..#..#"),
list("#...##...."),
list(".....#.#.."),
list(".##.##.#.#"),
],
2003: [
list(".#.###.#.."),
list(".........#"),
list("..#..#...."),
list("#........."),
list("#..##....#"),
list(".......#.#"),
list("......#..."),
list("#....##..#"),
list(".#......##"),
list("..#..##.#."),
],
2731: [
list("#.#..#..##"),
list("....#..#.#"),
list("..#...#..."),
list("..#..#...."),
list("#.#..#...#"),
list("#....##..."),
list("#........."),
list("#..##..#.#"),
list("#........."),
list(".###.#...."),
],
3881: [
list("..##......"),
list("#...#..#.#"),
list("##...#...."),
list("....#....."),
list("##.......#"),
list(".....#####"),
list("...#....##"),
list(".........#"),
list(".........."),
list("#..##.####"),
],
3673: [
list("##..###.#."),
list("...##....#"),
list("###.....##"),
list("#..#...#.#"),
list("#.##......"),
list("..#.#....."),
list("..#.#....#"),
list(".###.....#"),
list(".###.##..."),
list("###.#..#.#"),
],
1021: [
list("#..###.#.."),
list("###..##.#."),
list("#..##....#"),
list(".....###.."),
list("....##...#"),
list("....#....."),
list("#.##..#..#"),
list(".........."),
list(".......#.#"),
list("..#.##..#."),
],
2423: [
list("#.....####"),
list(".##.#....#"),
list(".#........"),
list("##.....#.."),
list("#.....###."),
list("#...#...#."),
list("#...#..#.#"),
list(".#..#..##."),
list("##.......#"),
list(".#####.###"),
],
3923: [
list("..#....###"),
list("#.....#..#"),
list("#...#.#.#."),
list(".#.......#"),
list("#..#.#...."),
list(".......#.#"),
list("##....##.#"),
list(".#..#...#."),
list("#...##..#."),
list("..#.#.#..#"),
],
2753: [
list("..####..#."),
list("#.......#."),
list("#.##.#..##"),
list("#.#.#....."),
list("#..#......"),
list("....#.#..."),
list(".#.#..#..#"),
list("#.....#..#"),
list("##.#..#..."),
list("#####....#"),
],
3929: [
list("....#####."),
list("##..#.##.."),
list("##.#.#.##."),
list("##...#.#.."),
list("#........#"),
list(".##.#..#.."),
list("#..#.##..."),
list("##..#...#."),
list(".....#...#"),
list("###..####."),
],
3041: [
list(".##.#..#.#"),
list("#..#...#.."),
list("###..#..#."),
list(".#.#....##"),
list("...##....."),
list("#....#..##"),
list("#........#"),
list("##.#...#.."),
list("##....#..#"),
list("...#..#..#"),
],
3433: [
list("..#.#.#..."),
list("#.#......."),
list(".....#...."),
list("..#......#"),
list("#..#.....#"),
list("........##"),
list("##..##.##."),
list("##........"),
list("#.#.##..##"),
list("###.###..#"),
],
2719: [
list("..##..#..#"),
list("#.##..##.."),
list("#......#.."),
list("#...##..##"),
list("..#..#.#.#"),
list("#......###"),
list("..###..#.."),
list("....#.#..#"),
list("....##...#"),
list("##..#..###"),
],
1201: [
list(".#...##.##"),
list("#........#"),
list("##...##..."),
list(".........."),
list(".....#.#.."),
list("#.##.....#"),
list("...#.##..#"),
list(".........#"),
list(".#.#.....#"),
list(".##...#..."),
],
1129: [
list("...####..#"),
list("......##.."),
list("#.....##.."),
list("#.......#."),
list("#......#.."),
list("...##....#"),
list("........##"),
list("##.#.#.#.."),
list("...#..##.#"),
list("...##....#"),
],
3019: [
list("..#...###."),
list(".....#.##."),
list("#.##.....#"),
list(".#.##..#.."),
list(".#..###..#"),
list("..#.####.#"),
list("#..#.#...#"),
list(".......#.#"),
list("#..##.#..#"),
list("#.##....##"),
],
1747: [
list("##.###.#.."),
list("#.......#."),
list("#...#..#.#"),
list("##...##.#."),
list("..###.#..#"),
list("#..#..##.."),
list("#...#....."),
list("..#......."),
list("...#..#.#."),
list(".##..##.##"),
],
1741: [
list(".##.#..#.#"),
list("#...##..##"),
list("#....#.#.#"),
list("##...##..#"),
list("##.......#"),
list("#...#..##."),
list("...#.##.##"),
list("...#..#.#."),
list(".......#.#"),
list(".#####.###"),
],
1867: [
list("#..##....."),
list(".......###"),
list("#..##....#"),
list("##...#...."),
list("...###...."),
list("##..#....."),
list(".##......."),
list("#.....###."),
list("#...#..#.#"),
list("...###...."),
],
2803: [
list(".#.##....#"),
list("#.####..#."),
list("#........."),
list("#.#......#"),
list(".......#.#"),
list("........#."),
list("..#..#.#.#"),
list("....###..."),
list("#...##...."),
list("...###...."),
],
3643: [
list("#..#..#.##"),
list("####.#..#."),
list("#.#...#.##"),
list(".#..#....."),
list("##....#..#"),
list(".##......."),
list(".......#.#"),
list("...##.#..."),
list(".....#.##."),
list("#...####.#"),
],
2437: [
list("..###..###"),
list("....#....."),
list(".........."),
list("#.#..#.###"),
list("##...####."),
list("....##...."),
list("...##....."),
list("##..#.##.."),
list("#......#.."),
list("#.#.....#."),
],
1069: [
list("..####...."),
list("##..##...#"),
list(".#..#..##."),
list(".#....##.#"),
list("###.#.#.##"),
list("...##..#.#"),
list("##....#..."),
list("#.#....#.#"),
list(".#.....#.#"),
list("#.#.#....."),
],
1381: [
list(".###.#.##."),
list("....#..#.."),
list("#.......##"),
list("#...#....."),
list(".#...#..##"),
list("...#....##"),
list("#..#.###.."),
list("..######.#"),
list("#....#...#"),
list("#######.#."),
],
2617: [
list("..##..#.#."),
list("#.....##.#"),
list("..#.#..#.."),
list(".##.#..#.."),
list("###...#.#."),
list(".###.##..."),
list("#.#......."),
list("#..##.#..#"),
list("##.....#.."),
list(".##..#..##"),
],
2393: [
list(".##..#.#.#"),
list("..#.#..###"),
list("..##..#.##"),
list("....#....."),
list("#...#....."),
list("##.#.....#"),
list(".#.#..#.#."),
list("##.....#.."),
list(".......#.#"),
list("####..#..."),
],
3529: [
list("#.#...##.#"),
list("......#..#"),
list(".........#"),
list("#.....#..."),
list(".......#.."),
list(".....#.#.#"),
list(".....#...."),
list("#....#.#.#"),
list("....#.##.#"),
list(".####.#..#"),
],
2953: [
list("...##...#."),
list("##.#.#..##"),
list("#...#....."),
list("##.#...###"),
list("...#......"),
list("#.#.#..#.#"),
list(".#...#...#"),
list("##....#.##"),
list(".......#.."),
list(".#.#..#..."),
],
3617: [
list("#..##...##"),
list("......#..."),
list("#....#...."),
list(".........."),
list(".######.##"),
list("##..#.#.##"),
list("#.#...#..."),
list("........#."),
list(".######.##"),
list("##...###.#"),
],
3863: [
list(".##.#...##"),
list("#...#....."),
list("..#.#....#"),
list("#....#..##"),
list(".....###.."),
list("#.#......#"),
list("#.......#."),
list("...#.....#"),
list("#........."),
list("..###....#"),
],
3727: [
list("#.###.##.#"),
list(".........."),
list("...##....."),
list("..#..#..##"),
list("#......###"),
list("#....##..."),
list("###.##...."),
list(".....#...."),
list("##.####.#."),
list("#..#.#.###"),
],
3803: [
list("###..#.##."),
list(".##......#"),
list(".........#"),
list("###.....##"),
list("....###..#"),
list(".......#.#"),
list("........##"),
list("#..#......"),
list("##......##"),
list("#.###..#.."),
],
1579: [
list("#...##.###"),
list(".....#.###"),
list(".##...#..."),
list("#.#..#..#."),
list("..##.....#"),
list(".........#"),
list(".........."),
list("#.....#.##"),
list(".....#...."),
list(".###..#..."),
],
1049: [
list("#..#.##.##"),
list("##......##"),
list("..#.##...#"),
list("#.......#."),
list("###.....#."),
list(".....#.#.#"),
list("...#......"),
list("..##......"),
list("#.#....#.."),
list("##..#.#..."),
],
2687: [
list("##..#.##.."),
list(".#........"),
list("##..#...#."),
list(".#.#.....#"),
list(".#..#.#..#"),
list("#.###..#.."),
list("..#......#"),
list("#.......##"),
list("#..#.....#"),
list("#.##.#..##"),
],
1637: [
list("#..##...##"),
list("##..#....#"),
list("...#....#."),
list("#....#...."),
list(".....#...#"),
list("#...#...##"),
list(".#....#..."),
list("#........."),
list("..#....#.."),
list(".#.####..."),
],
3527: [
list(".#....#.#."),
list("#.......#."),
list("..#....#.#"),
list("####.#.#.#"),
list("...#..#..."),
list("###..#.###"),
list("##..#....#"),
list("#.##....##"),
list("..#......#"),
list(".....#.#.."),
],
2963: [
list("#.#.#.#.#."),
list("#.....#..."),
list("##.#.....#"),
list("..##......"),
list("..#......."),
list(".#...#.##."),
list("###......#"),
list("##....#..#"),
list(".#...#..##"),
list("..##..##.#"),
],
2287: [
list("##.######."),
list(".#.##.##.."),
list("#..#....##"),
list("##.#.#...#"),
list(".......##."),
list("#...##...#"),
list("...##..#.."),
list("##....#.#."),
list("....#.##.."),
list("..#.#..###"),
],
3677: [
list("###.....##"),
list("#..#.#..#."),
list("#.#......."),
list(".....#..##"),
list(".........."),
list("......#.##"),
list(".....#..#."),
list("#..#...#.."),
list(".##......#"),
list("#...##.##."),
],
3559: [
list("..#..#.##."),
list("###......#"),
list("..#.##...."),
list("#.#..#...."),
list("##..##..##"),
list("..#...#.#."),
list("#.....#.##"),
list("....#....#"),
list("...#.#...#"),
list("...#.###.."),
],
2837: [
list("..#...#..."),
list(".....##..."),
list("#.#..#...#"),
list("....#....#"),
list("...####.##"),
list("#........."),
list("...#...##."),
list(".#..###.#."),
list("....#....."),
list(".###.##.#."),
],
3539: [
list("..##....#."),
list("........#."),
list("......#..#"),
list("...#..#..."),
list("###....###"),
list("#...#....."),
list(".#........"),
list("#.....#..."),
list("..##.#..#."),
list("..###..#.#"),
],
1667: [
list(".#..####.."),
list(".....#...."),
list("......#..."),
list("#.#...##.#"),
list("#...#.#..#"),
list("##.#.#...#"),
list("##..#..#.."),
list("#...##...#"),
list(".#..###..."),
list("..#..####."),
],
2791: [
list("#.##.###.#"),
list("...#..#..."),
list("##.....###"),
list("...#.#..##"),
list(".........#"),
list(".###...#.."),
list("...#.....#"),
list("##.....##."),
list("###......."),
list("#..#.#...."),
],
2609: [
list("..##.#...."),
list("##.#.#...#"),
list("#.#..#...."),
list("#........."),
list("...#..#..#"),
list("#...#.#..."),
list("##.##....#"),
list(".###......"),
list("##.....##."),
list("#.#...#.#."),
],
3061: [
list("####..#.##"),
list("#.....##.."),
list(".........."),
list("......#..."),
list("..#.#..###"),
list(".#.#..#..#"),
list(".#...#...#"),
list("#........#"),
list(".....#.#.."),
list("#..#....##"),
],
}
| 24.880416
| 27
| 0.147232
| 1,585
| 43,068
| 4.000631
| 0.092114
| 1.635073
| 2.180098
| 2.543447
| 0.908374
| 0.908374
| 0.908374
| 0.908374
| 0.908374
| 0.908374
| 0
| 0.020196
| 0.337791
| 43,068
| 1,730
| 28
| 24.894798
| 0.202139
| 0
| 0
| 0.763006
| 0
| 0
| 0.334355
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
453f5032c66fbc0cd7fe41bafbb9d5b22a41c276
| 164
|
py
|
Python
|
ms_deisotope/test/test_library.py
|
mstim/ms_deisotope
|
29f4f466e92e66b65a2d21eca714aa627caa21db
|
[
"Apache-2.0"
] | 18
|
2017-09-01T12:26:12.000Z
|
2022-02-23T02:31:29.000Z
|
ms_deisotope/test/test_library.py
|
mstim/ms_deisotope
|
29f4f466e92e66b65a2d21eca714aa627caa21db
|
[
"Apache-2.0"
] | 19
|
2017-03-12T20:40:36.000Z
|
2022-03-31T22:50:47.000Z
|
ms_deisotope/test/test_library.py
|
mstim/ms_deisotope
|
29f4f466e92e66b65a2d21eca714aa627caa21db
|
[
"Apache-2.0"
] | 14
|
2016-05-06T02:25:30.000Z
|
2022-03-31T14:40:06.000Z
|
import ms_deisotope
import ms_peak_picker
def test_c_extensions_load():
assert ms_peak_picker.check_c_extensions()
assert ms_deisotope.check_c_extensions()
| 27.333333
| 46
| 0.835366
| 25
| 164
| 4.96
| 0.48
| 0.266129
| 0.193548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109756
| 164
| 6
| 47
| 27.333333
| 0.849315
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
18bc050913b93c60b163305750204405919fa44b
| 183
|
py
|
Python
|
lambda-function.py
|
unbiased-coder/python-create-lambda-layer
|
f7bcf02a08a6a3e6dbf44ee2879cd9f95d3e729c
|
[
"Unlicense"
] | null | null | null |
lambda-function.py
|
unbiased-coder/python-create-lambda-layer
|
f7bcf02a08a6a3e6dbf44ee2879cd9f95d3e729c
|
[
"Unlicense"
] | null | null | null |
lambda-function.py
|
unbiased-coder/python-create-lambda-layer
|
f7bcf02a08a6a3e6dbf44ee2879cd9f95d3e729c
|
[
"Unlicense"
] | null | null | null |
# layer is located in Python directory
import json
from layer import unbiased_coder_test_layer_function
def lambda_handler(event, context):
unbiased_coder_test_layer_function()
| 22.875
| 52
| 0.836066
| 26
| 183
| 5.538462
| 0.692308
| 0.180556
| 0.236111
| 0.305556
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125683
| 183
| 7
| 53
| 26.142857
| 0.9
| 0.196721
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7a0826bfd470d08b1181ca0b314522c9c02dbbe5
| 132
|
py
|
Python
|
src/hommmer/metrics/dummy_median.py
|
hammer-mt/hommmer
|
a02cb87841395f30911242a019f28f6ac15f27ec
|
[
"MIT"
] | 4
|
2021-11-09T21:27:30.000Z
|
2021-11-23T00:38:20.000Z
|
src/hommmer/metrics/dummy_median.py
|
hammer-mt/hommmer
|
a02cb87841395f30911242a019f28f6ac15f27ec
|
[
"MIT"
] | null | null | null |
src/hommmer/metrics/dummy_median.py
|
hammer-mt/hommmer
|
a02cb87841395f30911242a019f28f6ac15f27ec
|
[
"MIT"
] | null | null | null |
import numpy as np
def dummy_median(y_actual):
# dummy median predictor
return np.full(y_actual.shape, np.median(y_actual))
| 26.4
| 55
| 0.75
| 22
| 132
| 4.318182
| 0.590909
| 0.221053
| 0.273684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 132
| 5
| 55
| 26.4
| 0.855856
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
e136455ed18f5c117b45a154de97ce26c3e0d90d
| 15,020
|
py
|
Python
|
baselines/herhrl/actor_critic.py
|
knowledgetechnologyuhh/goal_conditioned_RL_baselines
|
915fc875fd8cc75accd0804d99373916756f726e
|
[
"MIT"
] | 15
|
2020-07-01T16:16:09.000Z
|
2021-12-20T21:56:33.000Z
|
baselines/herhrl/actor_critic.py
|
knowledgetechnologyuhh/goal_conditioned_RL_baselines
|
915fc875fd8cc75accd0804d99373916756f726e
|
[
"MIT"
] | 14
|
2020-09-25T22:41:20.000Z
|
2022-03-12T00:38:44.000Z
|
baselines/herhrl/actor_critic.py
|
knowledgetechnologyuhh/goal_conditioned_RL_baselines
|
915fc875fd8cc75accd0804d99373916756f726e
|
[
"MIT"
] | 2
|
2020-07-01T16:19:08.000Z
|
2020-11-28T10:45:59.000Z
|
import tensorflow as tf
from baselines.util import store_args, nn
import math
import uuid
@tf.RegisterGradient("HeavisideGrad")
def _heaviside_grad(unused_op: tf.Operation, grad: tf.Tensor):
return tf.maximum(0.0, 1.0 - tf.abs(unused_op.inputs[0])) * grad
def heaviside(x: tf.Tensor, g: tf.Graph = tf.get_default_graph()):
custom_grads = {
"Identity": "HeavisideGrad"
}
with g.gradient_override_map(custom_grads):
i = tf.identity(x, name="identity_" + str(uuid.uuid1()))
ge = tf.greater_equal(x, 0, name="ge_" + str(uuid.uuid1()))
# tf.stop_gradient is needed to exclude tf.to_float from derivative
step_func = i + tf.stop_gradient(tf.to_float(ge) - i)
return step_func
class ActorCritic:
@store_args
def __init__(self, inputs_tf, dimo, dimg, dimu, max_u, o_stats, g_stats, hidden, layers,
**kwargs):
"""The actor-critic network and related training code.
Args:
inputs_tf (dict of tensors): all necessary inputs for the network: the
observation (o), the goal (g), and the action (u)
dimo (int): the dimension of the observations
dimg (int): the dimension of the goals
dimu (int): the dimension of the actions
max_u (float): the maximum magnitude of actions; action outputs will be scaled
accordingly
o_stats (baselines.her.Normalizer): normalizer for observations
g_stats (baselines.her.Normalizer): normalizer for goals
hidden (int): number of hidden units that should be used in hidden layers
layers (int): number of hidden layers
"""
self.o_tf = inputs_tf['o']
self.g_tf = inputs_tf['g']
self.u_tf = inputs_tf['u']
# Prepare inputs for actor and critic.
o = self.o_stats.normalize(self.o_tf)
g = self.g_stats.normalize(self.g_tf)
input_pi = tf.concat(axis=1, values=[o, g]) # for actor
# Networks.
with tf.variable_scope('shared_preproc'):
self.preproc_in = input_pi
with tf.variable_scope('pi'):
self.pi_tf = self.max_u * tf.tanh(nn(
input_pi, [self.hidden] * self.layers + [self.dimu]))
with tf.variable_scope('Q'):
# for policy training
input_Q = tf.concat(axis=1, values=[o, g, self.pi_tf / self.max_u])
self.Q_pi_tf = nn(input_Q, [self.hidden] * self.layers + [1])
# for critic training
input_Q = tf.concat(axis=1, values=[o, g, self.u_tf / self.max_u])
self._input_Q = input_Q # exposed for tests
self.Q_tf = nn(input_Q, [self.hidden] * self.layers + [1], reuse=True)
class ActorCriticSharedPreproc:
""" The same as the ActorCritic but with a single shared preprocessing layer.
"""
@store_args
def __init__(self, inputs_tf, dimo, dimg, dimu, max_u, o_stats, g_stats, hidden, layers,
**kwargs):
"""The actor-critic network and related training code.
Args:
inputs_tf (dict of tensors): all necessary inputs for the network: the
observation (o), the goal (g), and the action (u)
dimo (int): the dimension of the observations
dimg (int): the dimension of the goals
dimu (int): the dimension of the actions
max_u (float): the maximum magnitude of actions; action outputs will be scaled
accordingly
o_stats (baselines.her.Normalizer): normalizer for observations
g_stats (baselines.her.Normalizer): normalizer for goals
hidden (int): number of hidden units that should be used in hidden layers
layers (int): number of hidden layers
"""
self.o_tf = inputs_tf['o']
self.g_tf = inputs_tf['g']
self.u_tf = inputs_tf['u']
# Prepare inputs for actor and critic.
o = self.o_stats.normalize(self.o_tf)
g = self.g_stats.normalize(self.g_tf)
input_og = tf.concat(axis=1, values=[o, g]) # for actor
# Networks.
with tf.variable_scope('shared_preproc'):
self.preproc_in = nn(input_og, [self.hidden])
with tf.variable_scope('pi'):
self.pi_tf = self.max_u * tf.tanh(nn(
self.preproc_in, [self.hidden] * self.layers + [self.dimu]))
with tf.variable_scope('Q'):
# for policy training
input_Q = tf.concat(axis=1, values=[self.preproc_in, self.pi_tf / self.max_u])
self.Q_pi_tf = nn(input_Q, [self.hidden] * self.layers + [1])
# for critic training
input_Q = tf.concat(axis=1, values=[self.preproc_in, self.u_tf / self.max_u])
self._input_Q = input_Q # exposed for tests
self.Q_tf = nn(input_Q, [self.hidden] * self.layers + [1], reuse=True)
class ActorCriticVanillaAttnEnforceW:
""" The same as the ActorCritic but with an additional attention layer that is subject to an additional error function that encourages the outputs of the attention to be either 0 or 1.
The error function has a "W"-shape with the lower points are 0 and 1. The steepness parameter controls the steepness of the "W".
"""
steepness = 1
@store_args
def __init__(self, inputs_tf, dimo, dimg, dimu, max_u, o_stats, g_stats, hidden, layers,
**kwargs):
"""The actor-critic network and related training code.
Args:
inputs_tf (dict of tensors): all necessary inputs for the network: the
observation (o), the goal (g), and the action (u)
dimo (int): the dimension of the observations
dimg (int): the dimension of the goals
dimu (int): the dimension of the actions
max_u (float): the maximum magnitude of actions; action outputs will be scaled
accordingly
o_stats (baselines.her.Normalizer): normalizer for observations
g_stats (baselines.her.Normalizer): normalizer for goals
hidden (int): number of hidden units that should be used in hidden layers
layers (int): number of hidden layers
"""
self.o_tf = inputs_tf['o']
self.g_tf = inputs_tf['g']
self.u_tf = inputs_tf['u']
# Prepare inputs for actor and critic.
o = self.o_stats.normalize(self.o_tf)
g = self.g_stats.normalize(self.g_tf)
input_og = tf.concat(axis=1, values=[o, g]) # for actor
# Networks.
with tf.variable_scope('shared_preproc') as scope:
self.attn = tf.nn.sigmoid(nn(input_og, [self.hidden] * 2 + [input_og.shape[1]], name='attn') * self.steepness)
force_W_steepness = 3
# This is the additional "W"-error that will be added to the total error for optimizing the weights in the shared preproc layers.
self.shared_preproc_err = ((4 * force_W_steepness * (self.attn ** 2)) - (4 * force_W_steepness * self.attn)) ** 2
had_prod = self.attn * input_og
# Now map input to a smaller space
reduced_attn_input = had_prod
reduced_attn = self.attn
self.preproc_in = tf.concat(axis=1, values=[reduced_attn_input, reduced_attn])
with tf.variable_scope('pi'):
self.pi_tf = self.max_u * tf.tanh(nn(
self.preproc_in, [self.hidden] * self.layers + [self.dimu]))
with tf.variable_scope('Q'):
# for policy training
input_Q = tf.concat(axis=1, values=[self.preproc_in, self.pi_tf / self.max_u])
self.Q_pi_tf = nn(input_Q, [self.hidden] * self.layers + [1])
# for critic training
input_Q = tf.concat(axis=1, values=[self.preproc_in, self.u_tf / self.max_u])
self._input_Q = input_Q # exposed for tests
self.Q_tf = nn(input_Q, [self.hidden] * self.layers + [1], reuse=True)
class ActorCriticProbSamplingAttn:
""" The same as the ActorCritic but with an additional attention layer that is subject to probabilistic sampling.
"""
steepness = 1
@store_args
def __init__(self, inputs_tf, dimo, dimg, dimu, max_u, o_stats, g_stats, hidden, layers,
**kwargs):
"""The actor-critic network and related training code.
Args:
inputs_tf (dict of tensors): all necessary inputs for the network: the
observation (o), the goal (g), and the action (u)
dimo (int): the dimension of the observations
dimg (int): the dimension of the goals
dimu (int): the dimension of the actions
max_u (float): the maximum magnitude of actions; action outputs will be scaled
accordingly
o_stats (baselines.her.Normalizer): normalizer for observations
g_stats (baselines.her.Normalizer): normalizer for goals
hidden (int): number of hidden units that should be used in hidden layers
layers (int): number of hidden layers
"""
self.o_tf = inputs_tf['o']
self.g_tf = inputs_tf['g']
self.u_tf = inputs_tf['u']
# self.rnd_in_tf = inputs_tf['rnd']
# Prepare inputs for actor and critic.
o = self.o_stats.normalize(self.o_tf)
g = self.g_stats.normalize(self.g_tf)
input_og = tf.concat(axis=1, values=[o, g]) # for actor
# Networks.
with tf.variable_scope('shared_preproc') as scope:
# This is the probability vector concerned with which input is probable to be attended to.
self.prob_in = tf.nn.sigmoid(nn(input_og, [64] * 2 + [input_og.shape[1]], name='attn'))
self.rnd = tf.random_uniform(shape=[kwargs['batch_size'], int(input_og.shape[1])])
# By subtracting a random number in [0,1] from the prob_in, we generate an attn value in [-1,1].
# Hence, the prob_in learns to be close to 0 or 1, because the NN "wants" a controllable and deterministic
# attn vector. Because we have the sigmoid applied to the difference, this is only possible if the prob_in
# is close to 0 or 1.
self.attn = tf.sigmoid((self.prob_in - self.rnd) * self.steepness)
had_prod = self.attn * input_og
# Now map input to a smaller space
reduced_attn_input = had_prod
# reduced_attn_input = nn(had_prod, [int(input_og.shape[1]//3)], name='compress_in')
reduced_attn = self.attn
# reduced_attn = nn(attn, [int(input_og.shape[1]//3)], name='compress_attn')
self.preproc_in = tf.concat(axis=1, values=[reduced_attn_input, reduced_attn])
with tf.variable_scope('pi'):
self.pi_tf = self.max_u * tf.tanh(nn(
self.preproc_in, [self.hidden] * self.layers + [self.dimu]))
with tf.variable_scope('Q'):
# for policy training
input_Q = tf.concat(axis=1, values=[self.preproc_in, self.pi_tf / self.max_u])
self.Q_pi_tf = nn(input_Q, [self.hidden] * self.layers + [1])
# for critic training
input_Q = tf.concat(axis=1, values=[self.preproc_in, self.u_tf / self.max_u])
self._input_Q = input_Q # exposed for tests
self.Q_tf = nn(input_Q, [self.hidden] * self.layers + [1], reuse=True)
class ActorCriticProbSamplingAttnSteep100(ActorCriticProbSamplingAttn):
"""
Same as ActorCriticProbSampling but with a steeper sigmoid function/
"""
steepness = 100
@store_args
def __init__(self, inputs_tf, dimo, dimg, dimu, max_u, o_stats, g_stats, hidden, layers,
**kwargs):
ActorCriticProbSamplingAttn.__init__(self, inputs_tf, dimo, dimg, dimu, max_u, o_stats, g_stats, hidden, layers,
**kwargs)
class ActorCriticProbSamplingAttnSteep6(ActorCriticProbSamplingAttn):
"""
Same as ActorCriticProbSampling but with a steeper sigmoid function/
"""
steepness = 6
@store_args
def __init__(self, inputs_tf, dimo, dimg, dimu, max_u, o_stats, g_stats, hidden, layers,
**kwargs):
ActorCriticProbSamplingAttn.__init__(self, inputs_tf, dimo, dimg, dimu, max_u, o_stats, g_stats, hidden, layers,
**kwargs)
class ActorCriticProbSamplingAttnHeaviside:
""" Same as ActorCriticProbSampling, but with heaviside function instead of sigmoid.
"""
@store_args
def __init__(self, inputs_tf, dimo, dimg, dimu, max_u, o_stats, g_stats, hidden, layers,
**kwargs):
"""The actor-critic network and related training code.
Args:
inputs_tf (dict of tensors): all necessary inputs for the network: the
observation (o), the goal (g), and the action (u)
dimo (int): the dimension of the observations
dimg (int): the dimension of the goals
dimu (int): the dimension of the actions
max_u (float): the maximum magnitude of actions; action outputs will be scaled
accordingly
o_stats (baselines.her.Normalizer): normalizer for observations
g_stats (baselines.her.Normalizer): normalizer for goals
hidden (int): number of hidden units that should be used in hidden layers
layers (int): number of hidden layers
"""
self.o_tf = inputs_tf['o']
self.g_tf = inputs_tf['g']
self.u_tf = inputs_tf['u']
# Prepare inputs for actor and critic.
o = self.o_stats.normalize(self.o_tf)
g = self.g_stats.normalize(self.g_tf)
input_og = tf.concat(axis=1, values=[o, g]) # for actor
# Networks.
with tf.variable_scope('shared_preproc') as scope:
self.prob_in = tf.nn.sigmoid(nn(input_og, [64] * 2 + [input_og.shape[1]], name='attn'))
self.rnd = tf.random_uniform(minval=0.01, maxval=0.99, shape=[kwargs['batch_size'], int(input_og.shape[1])])
self.attn = heaviside(self.prob_in - self.rnd)
had_prod = self.attn * input_og
# Now map input to a smaller space
reduced_attn_input = had_prod
reduced_attn = self.attn
self.preproc_in = tf.concat(axis=1, values=[reduced_attn_input, reduced_attn])
with tf.variable_scope('pi'):
self.pi_tf = self.max_u * tf.tanh(nn(
self.preproc_in, [self.hidden] * self.layers + [self.dimu]))
with tf.variable_scope('Q'):
# for policy training
input_Q = tf.concat(axis=1, values=[self.preproc_in, self.pi_tf / self.max_u])
self.Q_pi_tf = nn(input_Q, [self.hidden] * self.layers + [1])
# for critic training
input_Q = tf.concat(axis=1, values=[self.preproc_in, self.u_tf / self.max_u])
self._input_Q = input_Q # exposed for tests
self.Q_tf = nn(input_Q, [self.hidden] * self.layers + [1], reuse=True)
| 49.245902
| 188
| 0.617177
| 2,087
| 15,020
| 4.269765
| 0.103498
| 0.026933
| 0.02424
| 0.02626
| 0.825833
| 0.812255
| 0.81001
| 0.801032
| 0.791606
| 0.783526
| 0
| 0.007851
| 0.279161
| 15,020
| 304
| 189
| 49.407895
| 0.815184
| 0.373901
| 0
| 0.754839
| 0
| 0
| 0.02035
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058065
| false
| 0
| 0.025806
| 0.006452
| 0.167742
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e198fb68c680f8491f12365f6f0a9226dc81426e
| 271
|
py
|
Python
|
bpl_api/api/__init__.py
|
DuneRoot/bpl-api
|
41470cf75c15b034636828fa476fd6e6a35880c6
|
[
"MIT"
] | null | null | null |
bpl_api/api/__init__.py
|
DuneRoot/bpl-api
|
41470cf75c15b034636828fa476fd6e6a35880c6
|
[
"MIT"
] | null | null | null |
bpl_api/api/__init__.py
|
DuneRoot/bpl-api
|
41470cf75c15b034636828fa476fd6e6a35880c6
|
[
"MIT"
] | null | null | null |
from bpl_api.api import Accounts
from bpl_api.api import Blocks
from bpl_api.api import Delegates
from bpl_api.api import Loader
from bpl_api.api import Multisignatures
from bpl_api.api import Peers
from bpl_api.api import Signatures
from bpl_api.api import Transactions
| 30.111111
| 39
| 0.852399
| 48
| 271
| 4.645833
| 0.25
| 0.251121
| 0.358744
| 0.466368
| 0.681614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118081
| 271
| 8
| 40
| 33.875
| 0.933054
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e1bde951309ff9bdd2597c2e51a3da17338d87cd
| 16,733
|
py
|
Python
|
port/modules/font/dvsmb_16.py
|
diskman88/mpython-desktop-robot
|
01cd15fbeeba521ab874cf66f94d3909c4f8c39a
|
[
"MIT"
] | 53
|
2018-10-15T12:01:24.000Z
|
2019-11-22T09:31:02.000Z
|
port/modules/font/dvsmb_16.py
|
diskman88/mpython-desktop-robot
|
01cd15fbeeba521ab874cf66f94d3909c4f8c39a
|
[
"MIT"
] | 10
|
2018-10-17T13:42:19.000Z
|
2019-11-25T06:42:40.000Z
|
port/modules/font/dvsmb_16.py
|
diskman88/mpython-desktop-robot
|
01cd15fbeeba521ab874cf66f94d3909c4f8c39a
|
[
"MIT"
] | 26
|
2018-12-04T03:53:39.000Z
|
2019-11-22T03:40:05.000Z
|
# Code generated by font-to-py.py.
# Font: dsmb.ttf
version = '0.26'
def height():
return 16
def max_width():
return 9
def hmap():
return True
def reverse():
return False
def monospaced():
return False
def min_ch():
return 32
def max_ch():
return 126
_font =\
b'\x09\x00\x00\x00\x78\x00\x8c\x00\x0c\x00\x1c\x00\x38\x00\x30\x00'\
b'\x30\x00\x30\x00\x00\x00\x30\x00\x30\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x09\x00\x00\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x00\x00\xc0\x00\xc0\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\xcc\x00\xcc\x00\xcc\x00'\
b'\xcc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x19\x00\x1b\x00'\
b'\x1b\x00\x7f\x80\x36\x00\x36\x00\x36\x00\xff\x00\x64\x00\x6c\x00'\
b'\x6c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x10\x00\x10\x00'\
b'\x7c\x00\xd4\x00\xd0\x00\xf0\x00\x7c\x00\x1e\x00\x16\x00\x16\x00'\
b'\xd6\x00\x7c\x00\x10\x00\x10\x00\x00\x00\x00\x00\x09\x00\x00\x00'\
b'\x70\x00\x88\x00\x88\x00\x88\x00\x71\x80\x1e\x00\xe7\x00\x08\x80'\
b'\x08\x80\x08\x80\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x00\x00\x38\x00\x60\x00\x60\x00\x20\x00\x70\x00\x76\x00\xde\x00'\
b'\xde\x00\xdc\x00\xec\x00\x7e\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\x30\x00\x60\x00\x60\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x60\x00\x60\x00\x30\x00'\
b'\x00\x00\x00\x00\x09\x00\x00\x00\xc0\x00\x60\x00\x60\x00\x30\x00'\
b'\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x60\x00\x60\x00'\
b'\xc0\x00\x00\x00\x00\x00\x09\x00\x00\x00\x10\x00\xd6\x00\x7c\x00'\
b'\x7c\x00\xd6\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00'\
b'\x18\x00\x18\x00\x18\x00\xff\x00\xff\x00\x18\x00\x18\x00\x18\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x60\x00'\
b'\x60\x00\x60\x00\x60\x00\xc0\x00\x00\x00\x00\x00\x09\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf8\x00\xf8\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xc0\x00\xc0\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x00\x03\x00\x06\x00\x06\x00\x0c\x00\x0c\x00\x18\x00'\
b'\x18\x00\x30\x00\x30\x00\x60\x00\x60\x00\xc0\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\x38\x00\x6c\x00\xc6\x00\xc6\x00\xd6\x00'\
b'\xd6\x00\xc6\x00\xc6\x00\xc6\x00\x6c\x00\x38\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x09\x00\x00\x00\x70\x00\xb0\x00\x30\x00\x30\x00'\
b'\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\xfc\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x78\x00\x86\x00\x06\x00'\
b'\x06\x00\x0e\x00\x0c\x00\x1c\x00\x38\x00\x70\x00\xe0\x00\xfe\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x7c\x00\x86\x00'\
b'\x06\x00\x06\x00\x38\x00\x0c\x00\x06\x00\x06\x00\x06\x00\x8e\x00'\
b'\x78\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x0c\x00'\
b'\x1c\x00\x3c\x00\x2c\x00\x6c\x00\xcc\x00\x8c\x00\xfe\x00\x0c\x00'\
b'\x0c\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00'\
b'\xfc\x00\xc0\x00\xc0\x00\xc0\x00\xf8\x00\x8c\x00\x06\x00\x06\x00'\
b'\x06\x00\x8c\x00\x78\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x00\x00\x38\x00\x64\x00\xc0\x00\xc0\x00\xfc\x00\xc6\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\x46\x00\x3c\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x00\xfe\x00\x06\x00\x0e\x00\x0c\x00\x0c\x00\x18\x00'\
b'\x18\x00\x38\x00\x30\x00\x30\x00\x60\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\x7c\x00\xc6\x00\xc6\x00\xc6\x00\x38\x00'\
b'\x6c\x00\xc6\x00\xc6\x00\xc6\x00\xee\x00\x7c\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x09\x00\x00\x00\x78\x00\xc4\x00\xc6\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\x7e\x00\x06\x00\x06\x00\x4c\x00\x38\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\x00\x00\x00\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x60\x00\x60\x00\x60\x00\x00\x00\x00\x00\x60\x00\x60\x00'\
b'\x60\x00\x60\x00\xc0\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00'\
b'\x00\x00\x01\x00\x0f\x00\x3c\x00\xe0\x00\xe0\x00\x3c\x00\x0f\x00'\
b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\xff\x00\xff\x00\x00\x00\x00\x00\xff\x00'\
b'\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x00\x00\x00\x00\x00\x00\x80\x00\xf0\x00\x3c\x00\x07\x00\x07\x00'\
b'\x3c\x00\xf0\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x00\x78\x00\x8c\x00\x0c\x00\x1c\x00\x38\x00\x30\x00'\
b'\x30\x00\x30\x00\x00\x00\x30\x00\x30\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\x00\x00\x1e\x00\x23\x00\x63\x00\xcf\x00'\
b'\xdb\x00\xdb\x00\xdb\x00\xdb\x00\xdb\x00\xcf\x00\x60\x00\x32\x00'\
b'\x1f\x00\x00\x00\x09\x00\x00\x00\x38\x00\x38\x00\x38\x00\x38\x00'\
b'\x6c\x00\x6c\x00\x6c\x00\x7c\x00\x6c\x00\xee\x00\xc6\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\xfc\x00\xc6\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\xf8\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xfc\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x3c\x00\x62\x00'\
b'\x40\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x40\x00\x62\x00'\
b'\x3c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\xf8\x00'\
b'\xcc\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00'\
b'\xcc\x00\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00'\
b'\xfe\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xfc\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x00\x00\xfe\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xfc\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x00\x3c\x00\x62\x00\x40\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xce\x00\xc6\x00\xc6\x00\x66\x00\x3e\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00'\
b'\xfe\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x09\x00\x00\x00\xfc\x00\x30\x00\x30\x00\x30\x00'\
b'\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\xfc\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x3e\x00\x06\x00\x06\x00'\
b'\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x86\x00\x7c\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\xc6\x00\xce\x00'\
b'\xdc\x00\xd8\x00\xf0\x00\xf8\x00\xfc\x00\xcc\x00\xce\x00\xc6\x00'\
b'\xc7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00'\
b'\xee\x00\xee\x00\xee\x00\xee\x00\xee\x00\xfe\x00\xd6\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x00\x00\xe6\x00\xe6\x00\xe6\x00\xf6\x00\xf6\x00\xd6\x00\xde\x00'\
b'\xde\x00\xce\x00\xce\x00\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x00\x38\x00\x6c\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\xc6\x00\x6c\x00\x38\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\xfc\x00\xce\x00\xc6\x00\xc6\x00\xc6\x00'\
b'\xce\x00\xfc\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x09\x00\x00\x00\x38\x00\x6c\x00\xc6\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\x6c\x00\x3c\x00\x0c\x00'\
b'\x04\x00\x00\x00\x00\x00\x09\x00\x00\x00\xfc\x00\xc6\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\xc6\x00\xf8\x00\xcc\x00\xce\x00\xc6\x00\xc7\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x3c\x00\xc2\x00'\
b'\xc0\x00\xc0\x00\xf0\x00\x7c\x00\x1e\x00\x06\x00\x06\x00\x86\x00'\
b'\x7c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\xff\x00'\
b'\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00'\
b'\x18\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00'\
b'\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\x7c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x00\x00\xc6\x00\xc6\x00\x6c\x00\x6c\x00\x6c\x00\x6c\x00\x6c\x00'\
b'\x28\x00\x38\x00\x38\x00\x38\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x00\xc1\x80\xc1\x80\xc1\x80\xdd\x80\xdd\x80\x5d\x00'\
b'\x55\x00\x55\x00\x77\x00\x63\x00\x63\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\xc6\x00\x6c\x00\x6c\x00\x38\x00\x38\x00'\
b'\x10\x00\x38\x00\x38\x00\x6c\x00\x6c\x00\xc6\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x09\x00\x00\x00\xe7\x00\x66\x00\x66\x00\x3c\x00'\
b'\x3c\x00\x3c\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\xfe\x00\x06\x00\x0e\x00'\
b'\x1c\x00\x18\x00\x38\x00\x30\x00\x60\x00\xe0\x00\xc0\x00\xfe\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\xf0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xf0\x00\x00\x00\x00\x00\x09\x00\x00\x00\xc0\x00'\
b'\x40\x00\x60\x00\x20\x00\x30\x00\x30\x00\x18\x00\x18\x00\x08\x00'\
b'\x0c\x00\x04\x00\x06\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00'\
b'\xf0\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00'\
b'\x30\x00\x30\x00\x30\x00\x30\x00\xf0\x00\x00\x00\x00\x00\x09\x00'\
b'\x00\x00\x18\x00\x3c\x00\x66\x00\xc3\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x80\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\xc0\x00\x60\x00\x30\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3c\x00'\
b'\x46\x00\x06\x00\x7e\x00\xc6\x00\xc6\x00\xce\x00\x7e\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xfc\x00\xee\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xee\x00\xfc\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x3c\x00\x62\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x62\x00'\
b'\x3c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x06\x00'\
b'\x06\x00\x06\x00\x7e\x00\xee\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00'\
b'\xee\x00\x7e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x3c\x00\x46\x00\xc6\x00\xfe\x00\xc0\x00'\
b'\xc0\x00\x62\x00\x3c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x00\x00\x1e\x00\x30\x00\x30\x00\xfe\x00\x30\x00\x30\x00\x30\x00'\
b'\x30\x00\x30\x00\x30\x00\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7e\x00\x6e\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\xc6\x00\x6e\x00\x7e\x00\x06\x00\x46\x00\x3c\x00'\
b'\x00\x00\x09\x00\x00\x00\xc0\x00\xc0\x00\xc0\x00\xfc\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x09\x00\x18\x00\x18\x00\x18\x00\x00\x00\x78\x00'\
b'\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\xff\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x09\x00\x18\x00\x18\x00\x18\x00\x00\x00'\
b'\x78\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00'\
b'\x18\x00\x18\x00\xf0\x00\x00\x00\x09\x00\x00\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xcc\x00\xd8\x00\xf0\x00\xf0\x00\xd8\x00\xd8\x00\xcc\x00'\
b'\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\xf0\x00'\
b'\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00'\
b'\x30\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\xff\x00\xdb\x00\xdb\x00\xdb\x00\xdb\x00'\
b'\xdb\x00\xdb\x00\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\xfc\x00\xc6\x00\xc6\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\xc6\x00\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x38\x00\x6c\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\xc6\x00\x6c\x00\x38\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\x00\xee\x00'\
b'\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xee\x00\xfc\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7e\x00'\
b'\xee\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xee\x00\x7e\x00\x06\x00'\
b'\x06\x00\x06\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xfc\x00\xe0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x7c\x00\xc2\x00\xc0\x00\xf8\x00\x3e\x00\x06\x00\x86\x00'\
b'\x7c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00'\
b'\x30\x00\x30\x00\xfe\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00'\
b'\x30\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00\xc6\x00'\
b'\xc6\x00\xc6\x00\x7e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\xc6\x00\xee\x00\x6c\x00\x6c\x00'\
b'\x6c\x00\x28\x00\x38\x00\x38\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc1\x80\xc1\x80\xc9\x80'\
b'\x5d\x00\x77\x00\x77\x00\x63\x00\x63\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\xee\x00\x6c\x00'\
b'\x38\x00\x38\x00\x38\x00\x3c\x00\x6c\x00\xee\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc6\x00'\
b'\x6e\x00\x6c\x00\x6c\x00\x3c\x00\x38\x00\x18\x00\x18\x00\x18\x00'\
b'\x30\x00\x70\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xfe\x00\x06\x00\x0c\x00\x18\x00\x30\x00\x60\x00\xc0\x00\xfe\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x1c\x00\x30\x00'\
b'\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\xc0\x00\x30\x00\x30\x00'\
b'\x30\x00\x30\x00\x30\x00\x1c\x00\x00\x00\x09\x00\x00\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x09\x00\x00\x00'\
b'\xe0\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x0c\x00'\
b'\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\xe0\x00\x00\x00\x09\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x71\x00\xff\x00'\
b'\x8e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
_index =\
b'\x00\x00\x22\x00\x22\x00\x44\x00\x44\x00\x66\x00\x66\x00\x88\x00'\
b'\x88\x00\xaa\x00\xaa\x00\xcc\x00\xcc\x00\xee\x00\xee\x00\x10\x01'\
b'\x10\x01\x32\x01\x32\x01\x54\x01\x54\x01\x76\x01\x76\x01\x98\x01'\
b'\x98\x01\xba\x01\xba\x01\xdc\x01\xdc\x01\xfe\x01\xfe\x01\x20\x02'\
b'\x20\x02\x42\x02\x42\x02\x64\x02\x64\x02\x86\x02\x86\x02\xa8\x02'\
b'\xa8\x02\xca\x02\xca\x02\xec\x02\xec\x02\x0e\x03\x0e\x03\x30\x03'\
b'\x30\x03\x52\x03\x52\x03\x74\x03\x74\x03\x96\x03\x96\x03\xb8\x03'\
b'\xb8\x03\xda\x03\xda\x03\xfc\x03\xfc\x03\x1e\x04\x1e\x04\x40\x04'\
b'\x40\x04\x62\x04\x62\x04\x84\x04\x84\x04\xa6\x04\xa6\x04\xc8\x04'\
b'\xc8\x04\xea\x04\xea\x04\x0c\x05\x0c\x05\x2e\x05\x2e\x05\x50\x05'\
b'\x50\x05\x72\x05\x72\x05\x94\x05\x94\x05\xb6\x05\xb6\x05\xd8\x05'\
b'\xd8\x05\xfa\x05\xfa\x05\x1c\x06\x1c\x06\x3e\x06\x3e\x06\x60\x06'\
b'\x60\x06\x82\x06\x82\x06\xa4\x06\xa4\x06\xc6\x06\xc6\x06\xe8\x06'\
b'\xe8\x06\x0a\x07\x0a\x07\x2c\x07\x2c\x07\x4e\x07\x4e\x07\x70\x07'\
b'\x70\x07\x92\x07\x92\x07\xb4\x07\xb4\x07\xd6\x07\xd6\x07\xf8\x07'\
b'\xf8\x07\x1a\x08\x1a\x08\x3c\x08\x3c\x08\x5e\x08\x5e\x08\x80\x08'\
b'\x80\x08\xa2\x08\xa2\x08\xc4\x08\xc4\x08\xe6\x08\xe6\x08\x08\x09'\
b'\x08\x09\x2a\x09\x2a\x09\x4c\x09\x4c\x09\x6e\x09\x6e\x09\x90\x09'\
b'\x90\x09\xb2\x09\xb2\x09\xd4\x09\xd4\x09\xf6\x09\xf6\x09\x18\x0a'\
b'\x18\x0a\x3a\x0a\x3a\x0a\x5c\x0a\x5c\x0a\x7e\x0a\x7e\x0a\xa0\x0a'\
b'\xa0\x0a\xc2\x0a\xc2\x0a\xe4\x0a\xe4\x0a\x06\x0b\x06\x0b\x28\x0b'\
b'\x28\x0b\x4a\x0b\x4a\x0b\x6c\x0b\x6c\x0b\x8e\x0b\x8e\x0b\xb0\x0b'\
b'\xb0\x0b\xd2\x0b\xd2\x0b\xf4\x0b\xf4\x0b\x16\x0c\x16\x0c\x38\x0c'\
b'\x38\x0c\x5a\x0c\x5a\x0c\x7c\x0c\x7c\x0c\x9e\x0c\x9e\x0c\xc0\x0c'\
_mvfont = memoryview(_font)
def get_ch(ch):
ordch = ord(ch)
ordch = ordch + 1 if ordch >= 32 and ordch <= 126 else 63
idx_offs = 4 * (ordch - 32)
offset = int.from_bytes(_index[idx_offs : idx_offs + 2], 'little')
next_offs = int.from_bytes(_index[idx_offs + 2 : idx_offs + 4], 'little')
width = int.from_bytes(_font[offset:offset + 2], 'little')
return _mvfont[offset + 2:next_offs], 16, width
| 62.204461
| 78
| 0.695392
| 3,989
| 16,733
| 2.911507
| 0.045876
| 0.586361
| 0.692785
| 0.700534
| 0.799552
| 0.752282
| 0.723696
| 0.697348
| 0.664371
| 0.639745
| 0
| 0.400934
| 0.039921
| 16,733
| 268
| 79
| 62.436567
| 0.322004
| 0.002809
| 0
| 0.094488
| 1
| 0.897638
| 0.890283
| 0.888943
| 0
| 1
| 0
| 0
| 0
| 1
| 0.031496
| false
| 0
| 0
| 0.027559
| 0.062992
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
83204304c2f735e91218db683272b9bcc5d80e1b
| 52,410
|
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/junipernetworks/junos/plugins/module_utils/network/junos/argspec/bgp_address_family/bgp_address_family.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1
|
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/junipernetworks/junos/plugins/module_utils/network/junos/argspec/bgp_address_family/bgp_address_family.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12
|
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/junipernetworks/junos/plugins/module_utils/network/junos/argspec/bgp_address_family/bgp_address_family.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
#
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################
# WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The arg spec for the junos_bgp_address_family module
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
class Bgp_address_familyArgs(object): # pylint: disable=R0903
"""The arg spec for the junos_bgp_address_family module
"""
def __init__(self, **kwargs):
pass
argument_spec = {
"config": {
"options": {
"address_family": {
"elements": "dict",
"options": {
"af_type": {
"elements": "dict",
"options": {
"accepted_prefix_limit": {
"options": {
"forever": {"type": "bool"},
"idle_timeout": {"type": "bool"},
"idle_timeout_value": {"type": "int"},
"limit_threshold": {"type": "int"},
"maximum": {"type": "int"},
"teardown": {"type": "bool"},
},
"type": "dict",
},
"add_path": {
"options": {
"receive": {"type": "bool"},
"send": {
"options": {
"include_backup_path": {
"type": "int"
},
"multipath": {"type": "bool"},
"path_count": {
"required": True,
"type": "int",
},
"path_selection_mode": {
"options": {
"all_paths": {
"type": "bool"
},
"equal_cost_paths": {
"type": "bool"
},
},
"type": "dict",
},
"prefix_policy": {
"type": "str"
},
},
"type": "dict",
},
},
"type": "dict",
},
"aggregate_label": {
"options": {
"community": {"type": "str"},
"set": {"type": "bool"},
},
"type": "dict",
},
"aigp": {
"options": {
"disable": {"type": "bool"},
"set": {"type": "bool"},
},
"type": "dict",
},
"damping": {"type": "bool"},
"defer_initial_multipath_build": {
"options": {
"maximum_delay": {"type": "int"},
"set": {"type": "bool"},
},
"type": "dict",
},
"delay_route_advertisements": {
"options": {
"max_delay_route_age": {"type": "int"},
"max_delay_routing_uptime": {
"type": "int"
},
"min_delay_inbound_convergence": {
"type": "int"
},
"min_delay_routing_uptime": {
"type": "int"
},
"set": {"type": "bool"},
},
"type": "dict",
},
"entropy_label": {
"options": {
"import": {"type": "str"},
"no_next_hop_validation": {
"type": "bool"
},
"set": {"type": "bool"},
},
"type": "dict",
},
"explicit_null": {
"options": {
"connected_only": {"type": "bool"},
"set": {"type": "bool"},
},
"type": "dict",
},
"extended_nexthop": {"type": "bool"},
"extended_nexthop_color": {"type": "bool"},
"graceful_restart_forwarding_state_bit": {
"choices": ["from-fib", "set"],
"type": "str",
},
"legacy_redirect_ip_action": {
"options": {
"receive": {"type": "bool"},
"send": {"type": "bool"},
"set": {"type": "bool"},
},
"type": "dict",
},
"local_ipv4_address": {"type": "str"},
"loops": {"type": "int"},
"no_install": {"type": "bool"},
"no_validate": {"type": "str"},
"output_queue_priority_expedited": {
"type": "bool"
},
"output_queue_priority_priority": {
"type": "int"
},
"per_group_label": {"type": "bool"},
"per_prefix_label": {"type": "bool"},
"prefix_limit": {
"options": {
"forever": {"type": "bool"},
"idle_timeout": {"type": "bool"},
"idle_timeout_value": {"type": "int"},
"limit_threshold": {"type": "int"},
"maximum": {"type": "int"},
"teardown": {"type": "bool"},
},
"type": "dict",
},
"resolve_vpn": {"type": "bool"},
"rib": {"choices": ["inet.3"], "type": "str"},
"ribgroup_name": {"type": "str"},
"route_refresh_priority_expedited": {
"type": "bool"
},
"route_refresh_priority_priority": {
"type": "int"
},
"secondary_independent_resolution": {
"type": "bool"
},
"set": {"type": "bool"},
"strip_nexthop": {"type": "bool"},
"topology": {
"elements": "dict",
"options": {
"community": {
"elements": "str",
"type": "list",
},
"name": {"type": "str"},
},
"type": "list",
},
"traffic_statistics": {
"options": {
"file": {
"options": {
"filename": {"type": "str"},
"files": {"type": "int"},
"no_world_readable": {
"type": "bool"
},
"size": {"type": "int"},
"world_readable": {
"type": "bool"
},
},
"type": "dict",
},
"interval": {"type": "int"},
"labeled_path": {"type": "bool"},
"set": {"type": "bool"},
},
"type": "dict",
},
"type": {
"choices": [
"any",
"flow",
"labeled-unicast",
"multicast",
"segment-routing-te",
"unicast",
"signaling",
"auto-discovery-mspw",
"auto-discovery-only",
],
"type": "str",
},
"withdraw_priority_expedited": {
"type": "bool"
},
"withdraw_priority_priority": {"type": "int"},
},
"type": "list",
},
"afi": {
"choices": [
"evpn",
"inet",
"inet-mdt",
"inet-mvpn",
"inet-vpn",
"inet6",
"inet6-mvpn",
"inet6-vpn",
"iso-vpn",
"l2vpn",
"route-target",
"traffic-engineering",
],
"type": "str",
},
},
"type": "list",
},
"groups": {
"elements": "dict",
"options": {
"address_family": {
"elements": "dict",
"options": {
"af_type": {
"elements": "dict",
"options": {
"accepted_prefix_limit": {
"options": {
"forever": {"type": "bool"},
"idle_timeout": {
"type": "bool"
},
"idle_timeout_value": {
"type": "int"
},
"limit_threshold": {
"type": "int"
},
"maximum": {"type": "int"},
"teardown": {"type": "bool"},
},
"type": "dict",
},
"add_path": {
"options": {
"receive": {"type": "bool"},
"send": {
"options": {
"include_backup_path": {
"type": "int"
},
"multipath": {
"type": "bool"
},
"path_count": {
"required": True,
"type": "int",
},
"path_selection_mode": {
"options": {
"all_paths": {
"type": "bool"
},
"equal_cost_paths": {
"type": "bool"
},
},
"type": "dict",
},
"prefix_policy": {
"type": "str"
},
},
"type": "dict",
},
},
"type": "dict",
},
"aggregate_label": {
"options": {
"community": {"type": "str"},
"set": {"type": "bool"},
},
"type": "dict",
},
"aigp": {
"options": {
"disable": {"type": "bool"},
"set": {"type": "bool"},
},
"type": "dict",
},
"damping": {"type": "bool"},
"defer_initial_multipath_build": {
"options": {
"maximum_delay": {
"type": "int"
},
"set": {"type": "bool"},
},
"type": "dict",
},
"delay_route_advertisements": {
"options": {
"max_delay_route_age": {
"type": "int"
},
"max_delay_routing_uptime": {
"type": "int"
},
"min_delay_inbound_convergence": {
"type": "int"
},
"min_delay_routing_uptime": {
"type": "int"
},
"set": {"type": "bool"},
},
"type": "dict",
},
"entropy_label": {
"options": {
"import": {"type": "str"},
"no_next_hop_validation": {
"type": "bool"
},
"set": {"type": "bool"},
},
"type": "dict",
},
"explicit_null": {
"options": {
"connected_only": {
"type": "bool"
},
"set": {"type": "bool"},
},
"type": "dict",
},
"extended_nexthop": {"type": "bool"},
"extended_nexthop_color": {
"type": "bool"
},
"graceful_restart_forwarding_state_bit": {
"choices": ["from-fib", "set"],
"type": "str",
},
"legacy_redirect_ip_action": {
"options": {
"receive": {"type": "bool"},
"send": {"type": "bool"},
"set": {"type": "bool"},
},
"type": "dict",
},
"local_ipv4_address": {"type": "str"},
"loops": {"type": "int"},
"no_install": {"type": "bool"},
"no_validate": {"type": "str"},
"output_queue_priority_expedited": {
"type": "bool"
},
"output_queue_priority_priority": {
"type": "int"
},
"per_group_label": {"type": "bool"},
"per_prefix_label": {"type": "bool"},
"prefix_limit": {
"options": {
"forever": {"type": "bool"},
"idle_timeout": {
"type": "bool"
},
"idle_timeout_value": {
"type": "int"
},
"limit_threshold": {
"type": "int"
},
"maximum": {"type": "int"},
"teardown": {"type": "bool"},
},
"type": "dict",
},
"resolve_vpn": {"type": "bool"},
"rib": {
"choices": ["inet.3"],
"type": "str",
},
"ribgroup_name": {"type": "str"},
"route_refresh_priority_expedited": {
"type": "bool"
},
"route_refresh_priority_priority": {
"type": "int"
},
"secondary_independent_resolution": {
"type": "bool"
},
"set": {"type": "bool"},
"strip_nexthop": {"type": "bool"},
"topology": {
"elements": "dict",
"options": {
"community": {
"elements": "str",
"type": "list",
},
"name": {"type": "str"},
},
"type": "list",
},
"traffic_statistics": {
"options": {
"file": {
"options": {
"filename": {
"type": "str"
},
"files": {
"type": "int"
},
"no_world_readable": {
"type": "bool"
},
"size": {
"type": "int"
},
"world_readable": {
"type": "bool"
},
},
"type": "dict",
},
"interval": {"type": "int"},
"labeled_path": {
"type": "bool"
},
"set": {"type": "bool"},
},
"type": "dict",
},
"type": {
"choices": [
"any",
"flow",
"labeled-unicast",
"multicast",
"segment-routing-te",
"unicast",
"signaling",
"auto-discovery-mspw",
"auto-discovery-only",
],
"type": "str",
},
"withdraw_priority_expedited": {
"type": "bool"
},
"withdraw_priority_priority": {
"type": "int"
},
},
"type": "list",
},
"afi": {
"choices": [
"evpn",
"inet",
"inet-mdt",
"inet-mvpn",
"inet-vpn",
"inet6",
"inet6-mvpn",
"inet6-vpn",
"iso-vpn",
"l2vpn",
"route-target",
"traffic-engineering",
],
"type": "str",
},
},
"type": "list",
},
"name": {"type": "str"},
"neighbors": {
"elements": "dict",
"options": {
"address_family": {
"elements": "dict",
"options": {
"af_type": {
"elements": "dict",
"options": {
"accepted_prefix_limit": {
"options": {
"forever": {
"type": "bool"
},
"idle_timeout": {
"type": "bool"
},
"idle_timeout_value": {
"type": "int"
},
"limit_threshold": {
"type": "int"
},
"maximum": {
"type": "int"
},
"teardown": {
"type": "bool"
},
},
"type": "dict",
},
"add_path": {
"options": {
"receive": {
"type": "bool"
},
"send": {
"options": {
"include_backup_path": {
"type": "int"
},
"multipath": {
"type": "bool"
},
"path_count": {
"required": True,
"type": "int",
},
"path_selection_mode": {
"options": {
"all_paths": {
"type": "bool"
},
"equal_cost_paths": {
"type": "bool"
},
},
"type": "dict",
},
"prefix_policy": {
"type": "str"
},
},
"type": "dict",
},
},
"type": "dict",
},
"aggregate_label": {
"options": {
"community": {
"type": "str"
},
"set": {
"type": "bool"
},
},
"type": "dict",
},
"aigp": {
"options": {
"disable": {
"type": "bool"
},
"set": {
"type": "bool"
},
},
"type": "dict",
},
"damping": {"type": "bool"},
"defer_initial_multipath_build": {
"options": {
"maximum_delay": {
"type": "int"
},
"set": {
"type": "bool"
},
},
"type": "dict",
},
"delay_route_advertisements": {
"options": {
"max_delay_route_age": {
"type": "int"
},
"max_delay_routing_uptime": {
"type": "int"
},
"min_delay_inbound_convergence": {
"type": "int"
},
"min_delay_routing_uptime": {
"type": "int"
},
"set": {
"type": "bool"
},
},
"type": "dict",
},
"entropy_label": {
"options": {
"import": {
"type": "str"
},
"no_next_hop_validation": {
"type": "bool"
},
"set": {
"type": "bool"
},
},
"type": "dict",
},
"explicit_null": {
"options": {
"connected_only": {
"type": "bool"
},
"set": {
"type": "bool"
},
},
"type": "dict",
},
"extended_nexthop": {
"type": "bool"
},
"extended_nexthop_color": {
"type": "bool"
},
"graceful_restart_forwarding_state_bit": {
"choices": [
"from-fib",
"set",
],
"type": "str",
},
"legacy_redirect_ip_action": {
"options": {
"receive": {
"type": "bool"
},
"send": {
"type": "bool"
},
"set": {
"type": "bool"
},
},
"type": "dict",
},
"local_ipv4_address": {
"type": "str"
},
"loops": {"type": "int"},
"no_install": {"type": "bool"},
"no_validate": {"type": "str"},
"output_queue_priority_expedited": {
"type": "bool"
},
"output_queue_priority_priority": {
"type": "int"
},
"per_group_label": {
"type": "bool"
},
"per_prefix_label": {
"type": "bool"
},
"prefix_limit": {
"options": {
"forever": {
"type": "bool"
},
"idle_timeout": {
"type": "bool"
},
"idle_timeout_value": {
"type": "int"
},
"limit_threshold": {
"type": "int"
},
"maximum": {
"type": "int"
},
"teardown": {
"type": "bool"
},
},
"type": "dict",
},
"resolve_vpn": {
"type": "bool"
},
"rib": {
"choices": ["inet.3"],
"type": "str",
},
"ribgroup_name": {
"type": "str"
},
"route_refresh_priority_expedited": {
"type": "bool"
},
"route_refresh_priority_priority": {
"type": "int"
},
"secondary_independent_resolution": {
"type": "bool"
},
"set": {"type": "bool"},
"strip_nexthop": {
"type": "bool"
},
"topology": {
"elements": "dict",
"options": {
"community": {
"elements": "str",
"type": "list",
},
"name": {
"type": "str"
},
},
"type": "list",
},
"traffic_statistics": {
"options": {
"file": {
"options": {
"filename": {
"type": "str"
},
"files": {
"type": "int"
},
"no_world_readable": {
"type": "bool"
},
"size": {
"type": "int"
},
"world_readable": {
"type": "bool"
},
},
"type": "dict",
},
"interval": {
"type": "int"
},
"labeled_path": {
"type": "bool"
},
"set": {
"type": "bool"
},
},
"type": "dict",
},
"type": {
"choices": [
"any",
"flow",
"labeled-unicast",
"multicast",
"segment-routing-te",
"unicast",
"signaling",
"auto-discovery-mspw",
"auto-discovery-only",
],
"type": "str",
},
"withdraw_priority_expedited": {
"type": "bool"
},
"withdraw_priority_priority": {
"type": "int"
},
},
"type": "list",
},
"afi": {
"choices": [
"evpn",
"inet",
"inet-mdt",
"inet-mvpn",
"inet-vpn",
"inet6",
"inet6-mvpn",
"inet6-vpn",
"iso-vpn",
"l2vpn",
"route-target",
"traffic-engineering",
],
"type": "str",
},
},
"type": "list",
},
"neighbor_address": {"type": "str"},
},
"type": "list",
},
},
"type": "list",
},
},
"type": "dict",
},
"running_config": {"type": "str"},
"state": {
"choices": [
"merged",
"replaced",
"overridden",
"deleted",
"rendered",
"gathered",
"parsed",
],
"default": "merged",
"type": "str",
},
} # pylint: disable=C0301
| 58.558659
| 93
| 0.144457
| 1,456
| 52,410
| 4.993132
| 0.152473
| 0.128748
| 0.059422
| 0.07923
| 0.915131
| 0.907978
| 0.906465
| 0.906465
| 0.906465
| 0.906465
| 0
| 0.003002
| 0.777562
| 52,410
| 894
| 94
| 58.624161
| 0.620604
| 0.011906
| 0
| 0.674797
| 0
| 0
| 0.135692
| 0.0272
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001161
| false
| 0.001161
| 0.004646
| 0
| 0.00813
| 0.001161
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
832e8297c3514e8c5a13c40b9e560fa2b232e673
| 11,776
|
py
|
Python
|
weatherdata/notebook.py
|
godmac/weather
|
ad95110f0d4838037e56c934e6a96cc90f6bf27d
|
[
"Apache-2.0"
] | null | null | null |
weatherdata/notebook.py
|
godmac/weather
|
ad95110f0d4838037e56c934e6a96cc90f6bf27d
|
[
"Apache-2.0"
] | null | null | null |
weatherdata/notebook.py
|
godmac/weather
|
ad95110f0d4838037e56c934e6a96cc90f6bf27d
|
[
"Apache-2.0"
] | null | null | null |
#! /usr/bin/python
# coding = utf-8
from Tkinter import *
class Notebook(Frame):
def __init__(self, parent, activerelief = SOLID, inactiverelief = SOLID, xpad = 4, ypad = 4, activefg = 'black',activebg='green',inactivebg='grey',bd=1,inactivefg = 'black', **kw):
self.activefg = activefg
self.activebg=activebg
self.bd=bd
self.inactivebg=inactivebg
self.inactivefg = inactivefg
self.deletedTabs = []
self.xpad = xpad
self.ypad = ypad
self.activerelief = activerelief
self.inactiverelief = inactiverelief
self.kwargs = kw
self.tabVars = {}
self.tabs = 0
self.noteBookFrame = Frame(parent)
self.BFrame = Frame(self.noteBookFrame)
self.noteBook = Frame(self.noteBookFrame, relief = RAISED, bd = 1, **kw)
self.noteBook.grid_propagate(0)
Frame.__init__(self)
self.noteBookFrame.grid()
self.BFrame.grid(row =0, sticky = W)
self.noteBook.grid(row = 1, column = 0, columnspan = 27)
def add_tab(self, width = 1, **kw):
temp = self.tabs
self.tabVars[self.tabs] = [Label(self.BFrame, relief = RIDGE, **kw)]
self.tabVars[self.tabs][0].bind("<Button-1>", lambda Event:self.change_tab(temp))
self.tabVars[self.tabs][0].pack(side = LEFT, ipady = self.ypad, ipadx = self.xpad)
self.tabVars[self.tabs].append(Frame(self.noteBook, **self.kwargs))
self.tabVars[self.tabs][1].grid(row = 0, column = 0)
self.change_tab(0)
self.tabs += 1
return self.tabVars[temp][1]
def change_tab(self, IDNum):
for i in (a for a in range(0, len(self.tabVars.keys()))):
if not i in self.deletedTabs:
if i <> IDNum:
self.tabVars[i][1].grid_remove()
self.tabVars[i][0]['relief'] = self.inactiverelief
self.tabVars[i][0]['fg'] = self.inactivefg
self.tabVars[i][0]['bg'] = self.inactivebg
self.tabVars[i][0]['bd'] = self.bd
else:
self.tabVars[i][1].grid()
self.tabVars[IDNum][0]['relief'] = self.activerelief
self.tabVars[i][0]['fg'] = self.activefg
self.tabVars[i][0]['bg'] = self.activebg
self.tabVars[i][0]['bd'] = self.bd
root=Tk()
scheduledimage=PhotoImage(data="R0lGODlhgACAAPcAAAAAAAsLCxUVFRoaGjIZGSMjIy0tLTIrKzU0NDw8PEEWFkgVFX0AAGoWFmMYGEo3N0JCQkxMTFlOTlNUVF1SUlxcXH9HR2dXV2RkZGplZWxsbHVlZXxjZH1maHtsbHR0dH5ycnx8fKYSEqwTE6QdHbUdHb4aGowyMpYzM58wMKQoKLomJqMxMaI8PLc0NI5OTpxOToRaWqtAQKNLS7JERbVLTbdOUatVVa5ZWbNWVrtTVb1XWbJeXr9ZW7BfYINiY4xoaZRtboJ0dIp1dZJ4eKhgYKltbbRlZbhkZ7VnaL1maLVra6tycr5xcrR8fMBbXsNeYcViZMhlZslmaMNrbctqbM5ucNBucMV2d8l2d815etJzdNR3edZ6fNl8ftt/gYSEhIyMjI+QkJOTk5eYmJycnLKDg7yFhbePj7eVlbmdnaCfoKSkpKeoqKurq7ukpK+wsLS0tL2xsby8vMePj9aCg92DhOGGh+GHiOOLjOWPkOqUlO6bm/Kfn86pqdCrq8OyssK8vPOhocTExM3Nzd7FxdDPz8/Q0NXV1dbY2Nvb2+bOzufQ0OrV1eDf3+3c3N7g4OPj4+3i4ufp6evr6/Lt7e/w8PP19fn19ff4+Pz8/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAAJsALAAAAACAAIAAAAj+ADcJHEiwoMGDCBMqXMiwocOHECNKnEixosWLGDNq3Mixo8ePIEOKHEmypMmTKFOqXMmypcuXMGPKnEmzps2bGcPoJKlTDM6OZSJNUlRGpBhHlCKx+bkxTqSkcES2eTopDtOEEyJo3cq1awSnkxzF8Uq2rNmtD+BEqlQVwlmvE2CCIXTokKG7dfPqJTRmzlOxZAjdHWxIr2G8dvMOtluYUBm1lCbNITPIcN3CluNUePkhk6bPoEOHzgTHLyVHc9xgEs26tWvRc5xaijQHjufXogdB4IypUo4cN3Dg4CG8OA8eN34M+juIw43j0I8fic5jenXr1alDxxHDr6VJhH7+BN9+XPjz4HQ0DUrwUgMmSTqgyI8iZUqV+1aubKEChNDTSIME0cUdeOSRhx567KHgHnw0KMiDffTR4IJ7IKhHHnjc4UUQyyVFCBD5XXGFFVOUKEUUUTwBxRNJqIdAe+/poCKK9dlnRYhU/GAIc0BoQaCBFi7YYIQQTsiHghZimGEXQHQ4ySEcUCGiFfdNIcWJ8j3BoovtZQKfilDQZx9+IlLBwSFCRUIIBz7iUeCBCSo4ZB8QRnjkghcqaYcWHOwYWSJRbjEilVVcieKKW64H4yPxhUmfFPdVkd8WSnCAyF+GcJAFgQUGyeAeEtIJ4adIImigm1n8cGlSgCoh6I3+VZgoRZZPHMGlSxpc8uWKV9aHnxVbICFEImkioumPQFI4Z519fJqgqW7igYUHxLKlCAhIbAFrlVgiausgL+LqpYyORlGipDdaMQMIq0aCyAY4VOEmnMoOWeedpeoRrR03sBvJbNfOoMO2Jh7aQw9L3NqSe/DJp4SYY+aXggIhEDsJJIlgQEADV7yJoJzLFjlhqW7eIYMCw641FBgEODAwlSXS14N8CCvMEgaYMKriE2JGSuIJBoRBLKshGPDADkB+DDIfotqJ7x4HYngHCkEPPUkiYRiQAA2E3odiFFD0kGi4C48LJoqxojvFCxBU/FciGkAggRQe10ukIE7jiWD+yTBA8EG1Q4Xg1suxnnioli3OQTZLGow7M42Q/mrEBA988EEIH2jwwAQbcJF0hQw2yLTIpFaYJx5LTJCABpdbDsEEGQwa6xRfa9lDi+C+hMG4WfZq4413CCHBAcQfgAAFQpwxL71LNy3h09ASSIQECBR/gARCmOEFlSRaCXaKt2ui+EsZ5NwojRHfyMWeTbTfBBZ1GHjH50Iy7fzIpuub4R161IGF+1jIgh3sMCj7eI9mUMDd4laCM/j0IEUQm0K6JsWFLnTBC1/4gh04Rb/QjQ5vTLvTs043wAxaUAtcENSUqkS7KCghbOEbn0t2tyueXclGktqCDivoBQxq0A7+bzqQkEB1NzuRykIlK+EXetgFLqRwhbNrYZYSt0CVlI9RDnvU7/SzBSde0IcbnJenPlXE5+HLVHm4wwDtsEQvODGFryKUFFfUA1vJsCUZsITOypU+YHWRCz3M4A+DqDQGRahp+EvSqdaYQS80UYfaglXMDFYr8VUxJRiwRCPI9bVzUYmLf7zgDzn4sQQdyV4glBCF0JghNi7Rgk7U4bYKZ7A6WpJ8ldjk4x71K/28EZAYXKMY4wSyQ+LtkKsk4QCZ2MQnFnCSYNOSHS+JEgxQYhE0sIE2dcBNHezgYFrimRZLdCMRdbGLX1yiBu+gRgwyM5YqlGSsDhWmJxysB9z+zIEOtFmDGuAAE3dkSQauyYCCGvSgCE2oQhfK0IY6dKE3ACg1T5LHSvzhohjNqB8yytGOevSjIO2oH0ZK0pKalKSFwEQcJmqSPGoiE5fIhExnGlOZxvSmMK3pJXA6057uFKZAxelOh3oJTBSVqDu1xCUswdSmWqISTS3qLWdoiZfmdKhXFWpQtzpUo2L1q0G9qVe9utSlWoISZ6WEWtU6ibYKpa1tpUQm4mAA8l1CE0TNqk5/+lW+IjWvR82rXpXK1LW6NRKRgERiHcHYSCgCsZCdBCbgwNKSZBKvOyXrX4mqWaR2drOgXWpkJoFYSJhWEahNhGpXy9pEoPYpbqj+K1UxC9rPhva2fyXsWSchCdJCNrWJQIRwCXMXwRAGEY9tg2xbctmhKjWp0H3ucwN7iUo4F7qhfSpbDwsJRQQXM4YgxCDGO945jDcQ5B0EIQSjiDUs92ZVfa5tNzvf2xY2MmpNE2O9i4jikncOAI5DbOJA4ALHBsCVIcN7GVhVs+q2rEx1sIMJW10KmzWplKiEYeGK2NT2d73lHTAcRuyGNrRhDWwowxpUzAY2tKE0YSiA7iqhCQtjQrqYMOp0b0zU6ZZ1qRrWcFpHi1hH8BfEgxhwHOBg4hSTYQxiCEOUxUDlKothDGX4gIxnSIkaO3XCTo1qmL+s3bXidxKUkMT+bxXx4UEEYsBucAMb1kCGKPeEymPAcotXTOcri+EDFaisSDKZCabyOMKItsSNdWvhp1YXzWaGdGTUbOTgijfJS5Zziu0sZTGQIQ6DSIQkbjPUz1hCEYaYwxqgLDiVVCC+Y461rJtq5loTucPBPUR5M33iMowBDD0pwyAccZvP2PQS4r2raChxCDiMIQwZqKZcZ83oMEP1vrY+82GPvGsm9xnYUnaDIkBDU6S2gQxIlSloKDGZP5ukAtOOtVqpvdsz9zauHE6sd8Pr5iW3IcXPBgOV49Blq3a1x5agc2hvkwnHhAEDJMEAjekda6iiGa6RHi1vI2HkNsdGznQOA7D+xdCGgmdVrzudRBnQ7VfPGnsQYgADe0IC70zMW7tnzTlada7xe7f13qNVM8fZHN4Ay9nXIg/DGAwBmvoSdRJPxm1Rb6MUMWwGJBOYhM2duvMxZxjjcMV32PWrWn7Xxg0rjjmw11BwqTsXEnCAhI9vC5o5hCEEWJdrrTWMVlvDVRI+Jy1v/17kfY/X32kHA7Dj4BmU97XHcJf740P7GUTE/CNZ37qZh5zWsHs+8IcdeiLMDge01xncg/gMaB2fVEi0QfJuL7UmIkGGMHgk82vF9pA/7/nB53u/iBBvbHqt9jAQQvWxv+klLlaGRMzd7Z+hBBnA0JEISFbS2f45Yt3+qmbBR6K33y/86Pt99OIfn7axz+nFyHCI57vdM5S4/Eayjgm/57f3nn9r6PebCF0PIg5H92vAFgjIl3w9lgiBMQkGSFSfEQl3txHWt3zZ9lagJ3h/Z4GlZWlJBgcAd3dh4AYvtYBIdTVjMAgKiFus5xlYE20ZMQGRsHxg532QNYNCh1jdR1qMhTH9hWknVnvARgarIYJIZQmJIAZxAAlCmFmfMQhhsBsY4YIwKAkaJxT39n0cdoFvJX6GUF4BqHhhAAkhmIRJVYSvJ4Y79RlrQH0YYX1sMXYz+IZw+IaM5QjftYFtsHKKBwZzEIZm+CQkB3tJ6BmQEAYasIYKeG/+NZgmMqiI2yeH+0YIgQCAfRYCYFAGnmGGznUInuZ8mPgZdreGL5iFcTiKcchYdVgbd/hslHgI6GeGlnAIAseJnWgJYfABFxEB/0KKutiIRVZp/LZkIUeJZHCJmJhUhxACIdB+xShTg6CGFYGLszGDvvWG0ziK/LeFw7dqdwcGrIiJV/WKmHMIJ+iNlyAGhfiMuVhkcqiOkKVYjgBZ71hpwZdkcnZ6YDAGxCh1rPd0x/gBJiiCKOcZceCME4GLlKBYu6iLc8hfh2d62xgHfFiMr5g5/xiIXTV7YHB1Bfkfi5VYigUJ78hxicVxCBmP+naKTOZrYECJ47aPIshsH4D+AXEwjsr3V59FYUCFCWSAdxQBje24jiYJh/FoZGwmfBxYBmJAiWJwbMVojCGAAXHndti2VnyHVjI1BwQZEQnQBiH2Zm6WZOiFXm4WCOiFYAB2lnFgF/TIBrVHibaRgkJICYkQk2sgeVlFVpMwZNmWe5mgCGDghBKBABpgCDkGWtJ1XZylYeNFCMPHBqrIjRGJglI3kRhABoBIVJHxHReHX36HZjdGiBQBARoQAojwUlZlVQx3mjNlmjm1mNmoiiEwCa1YjHIZk0Izd9q2l3tZVGXAkxIRAdMXAoqQCavRU8QpU5hgnD51Y+bFmHFwhzFHiTUphlkFkxhQMdOVZmL+h38/N2lxFVNx4JsRMQFuAAfICAlGhZzqOVPJmWPt6VzmZYdIiYz4OJuYCJMV8AGIUFhxxVa6WWvYpx7iCRETMAeG4AaUGAljpXxG1aBHVZhmFZ+1sWmYswZM2ZSXYJ0a4B+ep3H4pnEg6qGYcAi2OBETsByD0AaUOI6VYF1I5aJIpVYSyoG1JwQawAYXaoCsN5EVIJOH8FqK+HO9Z4Wk5XO9hQlwM3Pj6ReIMAhs8AFhQJM71aJ/ZV3Xxm7ldZRJqQFtEJmTZ4DMpgEVUAEhQAZuAGqVgQiuJZJEqn8VKFmKoAFKSqAmyGZOCqWUcGHVNaVJ5aJnxRYSWn4hoAH+b7mAeqVXlEAIYjqmjIoBHyAEYbAGpaFehqCmj7V9F3hxmBAJGhABJuoXqNWka/ABYkClUWVdYlZdGlYVXEihGsB2kJAXhKCALolbcqkGFTABurqrupqrvZoBHwAGZFAahOBa79hWlzZeTyKnnzp0djqqY/BU0pphf5pz9yUZWQqdgzoGiRAHY0AG4JoIX+p2cikHJmABrMM6lqMB6Zo57mo5liOsXPlYk8CWK7cGiDAHzCoRBepY8hgIWUYGOMd1TNWiu8WqPDifGlAGZ2VapuV+sXc1gbACKyAH6nVp65WxGrtehSFcxvpWqiVcipCoGiBoB9GvqJWyW0gGH8D+BjuXYWpVlZ0nCWiKikgJBhoQBtMphL3hWIPgAiUgB8g1kjMYkj+5WEYLh545BxDXrCmbWocwB2Nwo9mml6OFpv/nBirJOneFiaSlCINAA0GLCAhZjbpYhZj6hpSACWxwjvw6B0+bsv0XCCzrBj2HZr1lWJEAB+RVeiGXOVq3cFL3tYPAA0G7IwmZuHC4tmLAgvzqFKrlXa61b3NgjnYbgx4aCW6gXhvIlrWIAd0Yl/pmCEswAm+AJoqbupDFVCGgkUtKlHHrXYzZuATHneC3ucJXYmVwdxgAgn9Vq9HoWIfgBKb7o0A5gwj5hgh5qXE4GxgAmOMZB3ToXY2wWpL+O7sYMAcbV6Rpgli4i2loNwaDCgYFuFmD9VuHgAbFy7yqq7gKyLTPOAdEOblPa2l2hwHL4X3V+L0AVmLTx66QUKtP95GKcAhqIAJqYLzwSIpJ+4aX2sBPgQnQVhETEAeSu1pqKrJ2CgcPtxyjyJVmV34xCZECfJCmlViI8AYkkAY/eqmP9cKOoFiKEMMx/FinZWSO1WEvnMP/gQFx0ZNw2wiSS70XHFz/B20eLIebO49zIAfhCwYYAAY7q1cXc8JsJgcqwMLMa5IgWVoQLJJfXFo5bAlx4LgmasHWm8bedWRwAAYfQAgxzHHxyJWHcGmpcYeEiAGIsLNJ5ZEOCwn+iBAILMDCxIJaOLzDMwzDiYzIM0yUjvXCD0sJT2kRFcxfatparRVebjAEH2AIJwySjsAGg9AYPLgGYiAEGBCt2HVWPPxbTdoCZnAIxJK0QNph6jiHOUi0RuvCj2UJg4ABJpsQEQAH1IvJGFy9o0cI5qmf3UXDXJnM2bq7GoABzjeDimDF9WsIM2AGhkC/T/vIKQvOhtzIsWvIh/wUlJCRtwgHwSVcHhuyGQzPumaepIla3cWViKCWGxi+TykGagXJ75jIjjV6RcDNk2vLO8zDQBrQc8jDMpzD4WwJTBvMwhwHxuzOyMxa+ZyimINcqCXK/XUXSWazeTwISQG79pz+WoZgBEwgyzMszuUcuyg9zjP9WkkBaBghAezszu/M0z4tXLrGBsjoWonABoQQ0mYHgGwJBsAKCafxWERZaYdgBkxQqeSsw4X3yCKpyI31wl5tZPGIZmTQtOscXKrVCP1l1u5s1hi8hUI9LO01CMPFb6X8Z6kcGSkL1sGlBlVtqbHbXd9czjXtyOP8WJQwCBngqRgBAewMz5ccz48d2eK1BpRY1KE218I3fLsLaHGgZnndf29Q1d4ct4kgxDEttzWdWsAlFHDjuhbB2D8d27I9j6MaBsJWdoWR1CkJbfibFHILCEZg1ae9yMNNh/t12iHZuhsBAcJ2ls7t3GT53M7+/ZwPF8Urt3LgWmc6sZKZM6YZMKuRQNSAIMuJYNyRu8YXvMZEjd6Tq8aq7Vqkpc7LDQEJAAH0Xd/2Td/5/QD5nd/4fd8GgAAFMAACMAAEXuACkOAKruAToAHiONBqXNoyPdr1G7epPcNq9nAU/RAA0OEe/uEgHuIiPuIkHgAN7h/hXeFpvOL0y1pCHLkw3t7EglhhUAFzehUNAQAC0KNOAeGqLbmNUGnl/eNnXd4wPsREXd7uAmjQi+MEWgFkoMOYjOTGnORWPuWINQcZEGhOXhEQQKY7QoerJeasheQceN5oXsxKThS52uUXYQBjygYvbNYZfdaX3Ahs2Vp1Hrm2iBUIYqrYbn4RX06ojvAIdJjBj63Wed7OPB25ikUIGfnDgZ4RBaCrC4smwKXWHtsG3BqyZv3SihCeuXrjk44RBRABE1ABejjkma6md2i95DwIZZABurrhpa6VvfoBZWCgQ45YzwnYutYGUKyrEbBgt/4RBZAAqI7qFbAByAhsQuDGPhwBEjABEGDsx04SAxDg9s0VbpEAAp7t4j7u5F7u5n7u6J7u6r7u7N7u7v7u8B7v8j4TAQEAOw==")
note=Notebook(root, width= 400, height =400, activefg = 'red', inactivefg = 'blue')
note.grid()
tab1 = note.add_tab(text = "Tab One",image=scheduledimage)
tab2 = note.add_tab(text = "Tab Two")
tab3 = note.add_tab(text = "Tab Three")
root.mainloop()
| 173.176471
| 7,811
| 0.779637
| 482
| 11,776
| 19.012448
| 0.510373
| 0.021606
| 0.011785
| 0.00993
| 0.029136
| 0.017678
| 0.004583
| 0
| 0
| 0
| 0
| 0.09814
| 0.173658
| 11,776
| 67
| 7,812
| 175.761194
| 0.843593
| 0.002802
| 0
| 0.035088
| 0
| 0.017544
| 0.673436
| 0.666324
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.017544
| null | null | 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83653be6051c139ff35b1b646caf954226430577
| 10,739
|
py
|
Python
|
python/dqTools/recordDB.py
|
WhiteTshirtXI/subgrid
|
724dfd543c19563bd7874cf952c21f25dd7d1352
|
[
"MIT"
] | null | null | null |
python/dqTools/recordDB.py
|
WhiteTshirtXI/subgrid
|
724dfd543c19563bd7874cf952c21f25dd7d1352
|
[
"MIT"
] | 3
|
2018-10-22T08:35:17.000Z
|
2018-11-07T16:14:36.000Z
|
python/dqTools/recordDB.py
|
otbrown/subgrid
|
724dfd543c19563bd7874cf952c21f25dd7d1352
|
[
"MIT"
] | 1
|
2020-05-07T07:48:02.000Z
|
2020-05-07T07:48:02.000Z
|
"""A simple library for serializing data to disk and allowing random
access.
Probably best not to use this-- either straightforward pickling or a
"proper" scientific format like netCDF or HDF.
I don't think any subgrid runs use this, but I can't be sure.
"""
import cPickle
import os.path
# i.e. 0.5GB
MAXFILESIZE = 2**29
VERSION = 3
def new(name):
"""Factory function that creates a new RecordDB with filename name."""
rdb = RecordDB()
rdb.new(name)
return rdb
def open(name, readonly=False):
"""Factory function that opens an existing RecordDB with filename name."""
ver = _versionCheck(name)
assert ver <= VERSION
if ver == 1:
rdb = RecordDB_V1()
elif ver == 2:
rdb = RecordDB_V2()
else:
rdb = RecordDB()
rdb.open(name, readonly)
return rdb
def _versionCheck(name):
name = os.path.abspath(name)
assert os.path.exists(name)
assert os.path.isdir(name)
v = 1
for f in os.listdir(name):
(base,ext) = os.path.splitext(f)
if base == 'version':
v = int(ext[1:])
break
continue
return v
class RecordDB_V3(object):
"""Class that wraps an indexed store of records.
Slightly altered to append only, making it suitable for use with
auto-checkpointing!"""
def __init__(self):
self.version = 3
self.readonly = False
return
def new(self, name):
"""Initialises a new RDB."""
name = os.path.abspath(name)
assert not os.path.exists(name)
self.name = name
os.mkdir(name)
file(os.path.join(name, 'version.%d' % self.version), 'w').close()
self.map = []
self.mapFN = os.path.join(name, 'map.dat')
file(self.mapFN, 'w').close()
return
def open(self, name, readonly):
"""Opens an existing RDB."""
name = os.path.abspath(name)
assert os.path.exists(name)
assert os.path.isdir(name)
self.name = name
self.mapFN = os.path.join(self.name, 'map.dat')
assert os.path.exists(self.mapFN)
assert os.path.isfile(self.mapFN)
self.loadMap()
self.readonly = readonly
return
def __len__(self):
return len(self.map)
def __getitem__(self, key):
if type(key) is int:
(fn, posn, tag) = self.map[key]
else:
found = []
for entry in self.map:
(fn, posn, tag) = entry
if tag == key:
found.append(entry)
pass
continue
if len(found) == 0:
raise KeyError(key)
if len(found) > 1:
raise KeyError("Duplicate keys cannot be searched for!")
(fn, posn, tag) = found[0]
f = file(os.path.join(self.name,fn), 'rb')
try:
f.seek(posn)
p = cPickle.Unpickler(f)
return p.load()
finally:
f.close()
def append(self, item, tag=None):
"""Appends item to the DB."""
assert self.readonly == False
fn = self.chooseFile(item)
f = file(os.path.join(self.name,fn), 'ab')
try:
posn = f.tell()
p = cPickle.Pickler(f, protocol=2)
p.dump(item)
finally:
f.close()
# update the map
self.map.append((fn, posn, tag))
mapfile = file(self.mapFN, 'ab')
cPickle.dump((fn, posn, tag), mapfile, protocol=2)
mapfile.close()
return
def __setitem__(self, key, value):
raise NotImplementedError
def loadMap(self):
"""Loads the map from the mapfile."""
f = file(self.mapFN, 'rb')
self.map = []
while True:
try:
self.map.append(cPickle.load(f))
except EOFError:
break
f.close()
return
def chooseFile(self, item):
"""Selects the filename to which to write the next object."""
# get the last file
if len(self.map) == 0:
fn = "000000.dat"
else:
(fn, posn, tag) = self.map[-1]
sizeB = os.path.getsize(os.path.join(self.name,fn))
if sizeB > MAXFILESIZE:
ind = int(fn[:6]) + 1
assert ind <= 999999
fn = "%.6d.dat" % ind
return fn
class RecordDB_V2(object):
"""Class that wraps an indexed store of records."""
def __init__(self):
self.readonly = False
return
def new(self, name):
"""Initialises a new RDB."""
name = os.path.abspath(name)
assert not os.path.exists(name)
self.name = name
os.mkdir(name)
file(os.path.join(name, 'version.%d' % 2), 'w').close()
self.version = 2
self.map = []
self.mapFN = os.path.join(name, 'map.dat')
self.dumpMap()
return
def open(self, name, readonly):
"""Opens an existing RDB."""
name = os.path.abspath(name)
assert os.path.exists(name)
assert os.path.isdir(name)
self.name = name
self.mapFN = os.path.join(self.name, 'map.dat')
assert os.path.exists(self.mapFN)
assert os.path.isfile(self.mapFN)
self.loadMap()
self.readonly = readonly
return
def __len__(self):
return len(self.map)
def __getitem__(self, key):
if type(key) is int:
(fn, posn, tag) = self.map[key]
else:
found = []
for entry in self.map:
(fn, posn, tag) = entry
if tag == key:
found.append(entry)
pass
continue
if len(found) == 0:
raise KeyError(key)
if len(found) > 1:
raise KeyError("Duplicate keys cannot be searched for!")
(fn, posn, tag) = found[0]
f = file(os.path.join(self.name,fn), 'rb')
try:
f.seek(posn)
p = cPickle.Unpickler(f)
return p.load()
finally:
f.close()
def append(self, item, tag=None):
"""Appends item to the DB."""
assert self.readonly == False
fn = self.chooseFile(item)
f = file(os.path.join(self.name,fn), 'ab')
try:
posn = f.tell()
p = cPickle.Pickler(f, protocol=2)
p.dump(item)
finally:
f.close()
# update the map
self.map.append((fn, posn, tag))
self.dumpMap()
return
def __setitem__(self, key, value):
raise NotImplementedError
def dumpMap(self):
"""Writes the map to the mapfile."""
f = file(self.mapFN, 'wb')
try:
p = cPickle.Pickler(f, protocol=2)
p.dump(self.map)
finally:
f.close()
return
def loadMap(self):
"""Loads the map from the mapfile."""
f = file(self.mapFN, 'rb')
try:
p = cPickle.Unpickler(f)
self.map = p.load()
finally:
f.close()
return
def chooseFile(self, item):
"""Selects the filename to which to write the next object."""
# get the last file
if len(self.map) == 0:
fn = "000000.dat"
else:
(fn, posn, tag) = self.map[-1]
sizeB = os.path.getsize(os.path.join(self.name,fn))
if sizeB > MAXFILESIZE:
ind = int(fn[:6]) + 1
assert ind <= 999999
fn = "%.6d.dat" % ind
return fn
class RecordDB_V1(object):
"""Class that wraps an indexed store of records."""
def __init__(self):
self.readonly = False
return
def new(self, name):
"""Initialises a new RDB."""
name = os.path.abspath(name)
assert not os.path.exists(name)
self.name = name
os.mkdir(name)
self.map = []
self.mapFN = os.path.join(name, 'map.dat')
self.dumpMap()
return
def open(self, name, readonly):
"""Opens an existing RDB."""
name = os.path.abspath(name)
assert os.path.exists(name)
assert os.path.isdir(name)
self.name = name
self.mapFN = os.path.join(self.name, 'map.dat')
assert os.path.exists(self.mapFN)
assert os.path.isfile(self.mapFN)
self.loadMap()
self.readonly = readonly
return
def __len__(self):
return len(self.map)
def __getitem__(self, key):
(fn, posn) = self.map[key]
f = file(os.path.join(self.name,fn), 'rb')
try:
f.seek(posn)
p = cPickle.Unpickler(f)
return p.load()
finally:
f.close()
def append(self, item):
"""Appends item to the DB."""
assert self.readonly == False
fn = self.chooseFile(item)
f = file(os.path.join(self.name,fn), 'ab')
try:
posn = f.tell()
p = cPickle.Pickler(f, protocol=2)
p.dump(item)
finally:
f.close()
# update the map
self.map.append((fn, posn))
self.dumpMap()
return
def __setitem__(self, key, value):
raise NotImplementedError
def dumpMap(self):
"""Writes the map to the mapfile."""
f = file(self.mapFN, 'wb')
try:
p = cPickle.Pickler(f, protocol=2)
p.dump(self.map)
finally:
f.close()
return
def loadMap(self):
"""Loads the map from the mapfile."""
f = file(self.mapFN, 'rb')
try:
p = cPickle.Unpickler(f)
self.map = p.load()
finally:
f.close()
return
def chooseFile(self, item):
"""Selects the filename to which to write the next object."""
# get the last file
if len(self.map) == 0:
fn = "000000.dat"
else:
(fn, posn) = self.map[-1]
sizeB = os.path.getsize(os.path.join(self.name,fn))
if sizeB > MAXFILESIZE:
ind = int(fn[:6]) + 1
assert ind <= 999999
fn = "%.6d.dat" % ind
return fn
pass
RecordDB = RecordDB_V3
| 26.71393
| 78
| 0.495763
| 1,280
| 10,739
| 4.115625
| 0.142969
| 0.052392
| 0.03227
| 0.031891
| 0.812642
| 0.812642
| 0.812642
| 0.812642
| 0.812642
| 0.794609
| 0
| 0.012424
| 0.385418
| 10,739
| 401
| 79
| 26.780549
| 0.785758
| 0.11705
| 0
| 0.863014
| 0
| 0
| 0.024171
| 0
| 0
| 0
| 0
| 0
| 0.082192
| 1
| 0.109589
| false
| 0.010274
| 0.006849
| 0.010274
| 0.226027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
55e0cc112266db53f5499fa90a7d42f520af95c8
| 160
|
py
|
Python
|
hass/triggers.py
|
drejkim/potter-pi
|
c900cf82877b0ef462f999e1620523b845faf45c
|
[
"MIT"
] | 5
|
2019-10-21T20:29:23.000Z
|
2019-12-13T23:34:46.000Z
|
hass/triggers.py
|
drejkim/potter-pi
|
c900cf82877b0ef462f999e1620523b845faf45c
|
[
"MIT"
] | 8
|
2020-09-25T19:51:22.000Z
|
2022-02-10T00:29:04.000Z
|
hass/triggers.py
|
estherjk/potter-pi
|
c900cf82877b0ef462f999e1620523b845faf45c
|
[
"MIT"
] | null | null | null |
triggers = {
'lumos': 'automation.potter_pi_spell_lumos',
'nox': 'automation.potter_pi_spell_nox',
'revelio': 'automation.potter_pi_spell_revelio'
}
| 32
| 51
| 0.725
| 19
| 160
| 5.631579
| 0.421053
| 0.448598
| 0.504673
| 0.64486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13125
| 160
| 5
| 52
| 32
| 0.769784
| 0
| 0
| 0
| 0
| 0
| 0.689441
| 0.596273
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36306d9af175e649b8e9ba89c460c4df1038467f
| 117
|
py
|
Python
|
Beginner/2172.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
Beginner/2172.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
Beginner/2172.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
X, M = tuple(map(int,input().split()))
while X != 0 or M != 0:
print(X*M)
X, M = tuple(map(int,input().split()))
| 19.5
| 40
| 0.547009
| 23
| 117
| 2.782609
| 0.478261
| 0.09375
| 0.21875
| 0.3125
| 0.71875
| 0.71875
| 0.71875
| 0
| 0
| 0
| 0
| 0.020833
| 0.179487
| 117
| 5
| 41
| 23.4
| 0.645833
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3640f43e6e97b1279d048873fb4e3a64fbfb7ac9
| 6,244
|
py
|
Python
|
code/week0/ship.py
|
aaditgupta21/aaditgupta-tri3
|
b37b113fd0c911e1f12163b3c037d4a2d750602d
|
[
"MIT"
] | null | null | null |
code/week0/ship.py
|
aaditgupta21/aaditgupta-tri3
|
b37b113fd0c911e1f12163b3c037d4a2d750602d
|
[
"MIT"
] | 4
|
2022-03-14T21:08:38.000Z
|
2022-03-28T21:14:59.000Z
|
code/week0/ship.py
|
aaditgupta21/aaditgupta-tri3
|
b37b113fd0c911e1f12163b3c037d4a2d750602d
|
[
"MIT"
] | 2
|
2022-03-17T21:35:49.000Z
|
2022-03-28T06:27:41.000Z
|
#prefuncy.py
import time
import os
Color34 = "\u001b[34m"
Color37 = "\u001b[37m"
# As you can see, its not very optimal
def ship1():
print(" |\ ")
print(" |/ ")
print("\__ |__/ ")
print(" \____/ ")
print("\u001b[34m -------------------------------------------- \u001b[37m")
def ship2():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship3():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship4():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship5():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m -------------------------------------------- \u001b[37m")
def ship6():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship7():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship8():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m -------------------------------------------- \u001b[37m")
def ship9():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship10():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship11():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m -------------------------------------------- \u001b[37m")
def ship12():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship13():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship14():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship15():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m -------------------------------------------- \u001b[37m")
def ship16():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship17():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m -------------------------------------------- \u001b[37m")
def ship18():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \u001b[37m")
def ship19():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m -------------------------------------------- \u001b[37m")
def ship20():
print(" |\ ")
print(" |/ ")
print(" \__ |__/ ")
print(" \____/ ")
print("\u001b[34m -------------------------------------------- \u001b[37m")
os.system("clear")
time.sleep(.1)
ship1()
time.sleep(.5)
os.system("clear")
ship2()
time.sleep(.5)
os.system("clear")
ship3()
time.sleep(.5)
os.system("clear")
ship4()
time.sleep(.5)
os.system("clear")
ship5()
time.sleep(.5)
os.system("clear")
ship6()
time.sleep(.5)
os.system("clear")
ship7()
time.sleep(.5)
os.system("clear")
ship8()
time.sleep(.5)
os.system("clear")
ship9()
time.sleep(.5)
os.system("clear")
ship10()
time.sleep(.5)
os.system("clear")
ship11()
time.sleep(.5)
os.system("clear")
ship12()
time.sleep(.5)
os.system("clear")
ship13()
time.sleep(.5)
os.system("clear")
ship14()
time.sleep(.5)
os.system("clear")
ship15()
time.sleep(.5)
os.system("clear")
ship16()
time.sleep(.5)
os.system("clear")
ship17()
time.sleep(.5)
os.system("clear")
ship18()
time.sleep(.5)
os.system("clear")
ship19()
time.sleep(.5)
os.system("clear")
ship20()
time.sleep(.5)
os.system("clear")
print("or so you thought...")
time.sleep(.5)
os.system("clear")
| 26.570213
| 79
| 0.299968
| 397
| 6,244
| 4.314861
| 0.120907
| 0.467017
| 0.525394
| 0.467017
| 0.793929
| 0.793929
| 0.511967
| 0.511967
| 0.511967
| 0.488033
| 0
| 0.07985
| 0.402306
| 6,244
| 234
| 80
| 26.683761
| 0.379153
| 0.007687
| 0
| 0.756614
| 0
| 0
| 0.582916
| 0.142096
| 0
| 0
| 0
| 0
| 0
| 1
| 0.10582
| false
| 0
| 0.010582
| 0
| 0.116402
| 0.534392
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
364e33cdd0396009d1d801788153717ed9c495b5
| 9,026
|
py
|
Python
|
userbot/plugins/maskit.py
|
justteen/BUZZ-USERBOT
|
55651cce150e1d04d2c61efb2565ef9f46b42933
|
[
"BSL-1.0"
] | null | null | null |
userbot/plugins/maskit.py
|
justteen/BUZZ-USERBOT
|
55651cce150e1d04d2c61efb2565ef9f46b42933
|
[
"BSL-1.0"
] | null | null | null |
userbot/plugins/maskit.py
|
justteen/BUZZ-USERBOT
|
55651cce150e1d04d2c61efb2565ef9f46b42933
|
[
"BSL-1.0"
] | null | null | null |
"""
Kanged from Dark Cobra userbot
thanks to hellboy atul and his team
"""
import os
from PIL import Image
from userbot.utils import lightning_cmd
if not os.path.isdir("./dcobra/"):
os.makedirs("./dcobra/")
# made by @THE_B_LACK_HAT Some errors solved by Sh1vam
@borg.on(lightning_cmd(pattern=r".cmask"))
async def scan(event):
path = "dcobra"
await event.edit("HeHe He Wants A Mask 🤪")
reply = await event.get_reply_message()
lol = await borg.download_media(reply.media, path)
linc = event.text
link = linc[7:]
pic = linc[31:]
import cv2
os.system(
"wget https://datreon.000webhostapp.com/haarcascade_frontalface_default.xml"
)
os.system(f"wget {link}")
imagePath = lol
maskPath = f"{pic}"
cascPath = "haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(cascPath)
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, 1.15)
background = Image.open(imagePath)
for (x, y, w, h) in faces:
mask = Image.open(maskPath)
mask = mask.resize((w, h), Image.ANTIALIAS)
offset = (x, y)
background.paste(mask, offset, mask=mask)
file_name = "old.png"
hehe = path + "/" + file_name
background.save(hehe, "PNG")
await borg.send_file(event.chat_id, hehe)
for files in (hehe, lol):
if files and os.path.exists(files):
os.remove(files)
await event.delete()
@borg.on(lightning_cmd(pattern=r".ppro"))
async def scan(event):
path = "dcobra"
await event.edit("HeHe He Wants A Mask 🤪")
reply = await event.get_reply_message()
lol = await borg.download_media(reply.media, path)
import cv2
os.system(
"wget https://datreon.000webhostapp.com/haarcascade_frontalface_default.xml"
)
os.system("wget https://telegra.ph/file/f061c861ba85fbb23a51e.png")
imagePath = lol
maskPath = "f061c861ba85fbb23a51e.png"
cascPath = "haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(cascPath)
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, 1.15)
background = Image.open(imagePath)
for (x, y, w, h) in faces:
mask = Image.open(maskPath)
mask = mask.resize((w, h), Image.ANTIALIAS)
offset = (x, y)
background.paste(mask, offset, mask=mask)
file_name = "pro.png"
hehe = path + "/" + file_name
background.save(hehe, "PNG")
await borg.send_file(event.chat_id, hehe)
for files in (hehe, lol):
if files and os.path.exists(files):
os.remove(files)
await event.delete()
@borg.on(lightning_cmd(pattern=r".oxy"))
async def scan(event):
path = "dcobra"
await event.edit("HeHe He Wants A Mask 🤪")
reply = await event.get_reply_message()
lol = await borg.download_media(reply.media, path)
import cv2
os.system(
"wget https://datreon.000webhostapp.com/haarcascade_frontalface_default.xml"
)
os.system("wget https://telegra.ph/file/df2d739544595ae337642.png")
imagePath = lol
maskPath = "df2d739544595ae337642.png"
cascPath = "haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(cascPath)
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, 1.15)
background = Image.open(imagePath)
for (x, y, w, h) in faces:
mask = Image.open(maskPath)
mask = mask.resize((w, h), Image.ANTIALIAS)
offset = (x, y)
background.paste(mask, offset, mask=mask)
file_name = "old.png"
hehe = path + "/" + file_name
background.save(hehe, "PNG")
await borg.send_file(event.chat_id, hehe)
for files in (hehe, lol):
if files and os.path.exists(files):
os.remove(files)
await event.delete()
@borg.on(lightning_cmd(pattern=r".gold"))
async def scan(event):
path = "dcobra"
await event.edit("HeHe He Wants A Mask 🤪")
reply = await event.get_reply_message()
lol = await borg.download_media(reply.media, path)
import cv2
os.system(
"wget https://datreon.000webhostapp.com/haarcascade_frontalface_default.xml"
)
os.system("wget https://telegra.ph/file/4cc40d1e0846667488341.png")
imagePath = lol
maskPath = "4cc40d1e0846667488341.png"
cascPath = "haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(cascPath)
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, 1.15)
background = Image.open(imagePath)
for (x, y, w, h) in faces:
mask = Image.open(maskPath)
mask = mask.resize((w, h), Image.ANTIALIAS)
offset = (x, y)
background.paste(mask, offset, mask=mask)
file_name = "old.png"
hehe = path + "/" + file_name
background.save(hehe, "PNG")
await borg.send_file(event.chat_id, hehe)
for files in (hehe, lol):
if files and os.path.exists(files):
os.remove(files)
await event.delete()
@borg.on(lightning_cmd(pattern=r".old"))
async def scan(event):
path = "dcobra"
await event.edit("HeHe He Wants A Mask 🤪")
reply = await event.get_reply_message()
lol = await borg.download_media(reply.media, path)
import cv2
os.system(
"wget https://datreon.000webhostapp.com/haarcascade_frontalface_default.xml"
)
os.system("wget https://telegra.ph/file/55fcb205c6f8f4790585e.png")
imagePath = lol
maskPath = "55fcb205c6f8f4790585e.png"
cascPath = "haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(cascPath)
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, 1.15)
background = Image.open(imagePath)
for (x, y, w, h) in faces:
mask = Image.open(maskPath)
mask = mask.resize((w, h), Image.ANTIALIAS)
offset = (x, y)
background.paste(mask, offset, mask=mask)
file_name = "old.png"
hehe = path + "/" + file_name
background.save(hehe, "PNG")
await borg.send_file(event.chat_id, hehe)
for files in (hehe, lol):
if files and os.path.exists(files):
os.remove(files)
await event.delete()
@borg.on(lightning_cmd(pattern=r".krish"))
async def scan(event):
path = "dcobra"
await event.edit("HeHe He Wants A Mask 🤪")
reply = await event.get_reply_message()
lol = await borg.download_media(reply.media, path)
import cv2
os.system(
"wget https://datreon.000webhostapp.com/haarcascade_frontalface_default.xml"
)
os.system("wget https://telegra.ph/file/54d2a267d411951b41a20.png")
imagePath = lol
maskPath = "54d2a267d411951b41a20.png"
cascPath = "haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(cascPath)
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, 1.15)
background = Image.open(imagePath)
for (x, y, w, h) in faces:
mask = Image.open(maskPath)
mask = mask.resize((w, h), Image.ANTIALIAS)
offset = (x, y)
background.paste(mask, offset, mask=mask)
file_name = "old.png"
hehe = path + "/" + file_name
background.save(hehe, "PNG")
await borg.send_file(event.chat_id, hehe)
for files in (hehe, lol):
if files and os.path.exists(files):
os.remove(files)
await event.delete()
@borg.on(lightning_cmd(pattern=r".cprotect"))
async def scan(event):
path = "dcobra"
await event.edit("HeHe He Wants A Mask 🤪")
reply = await event.get_reply_message()
lol = await borg.download_media(reply.media, path)
import cv2
os.system(
"wget https://datreon.000webhostapp.com/haarcascade_frontalface_default.xml"
)
os.system("wget https://telegra.ph/file/b934a713abb321bd1a9fe.png")
imagePath = lol
maskPath = "b934a713abb321bd1a9fe.png"
cascPath = "haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(cascPath)
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, 1.15)
background = Image.open(imagePath)
for (x, y, w, h) in faces:
mask = Image.open(maskPath)
mask = mask.resize((w, h), Image.ANTIALIAS)
offset = (x, y)
background.paste(mask, offset, mask=mask)
file_name = "old.png"
hehe = path + "/" + file_name
background.save(hehe, "PNG")
await borg.send_file(event.chat_id, hehe)
for files in (hehe, lol):
if files and os.path.exists(files):
os.remove(files)
await event.delete()
| 22.397022
| 84
| 0.651562
| 1,155
| 9,026
| 5.015584
| 0.110823
| 0.036251
| 0.070085
| 0.077335
| 0.882099
| 0.882099
| 0.877611
| 0.877611
| 0.877611
| 0.877611
| 0
| 0.037185
| 0.225349
| 9,026
| 402
| 85
| 22.452736
| 0.790332
| 0.013406
| 0
| 0.840517
| 0
| 0
| 0.177925
| 0.044397
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.043103
| 0
| 0.043103
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
367b4206d0a7c802cde179041a8c565c2a07954c
| 17
|
py
|
Python
|
tests/inputs/if-branching/17-mergin-with-nonexistent.py
|
helq/pytropos
|
497ed5902e6e4912249ca0a46b477f9bfa6ae80a
|
[
"MIT"
] | 4
|
2019-10-06T18:01:24.000Z
|
2020-07-03T05:27:35.000Z
|
tests/inputs/if-branching/17-mergin-with-nonexistent.py
|
helq/pytropos
|
497ed5902e6e4912249ca0a46b477f9bfa6ae80a
|
[
"MIT"
] | 5
|
2021-06-07T15:50:04.000Z
|
2021-06-07T15:50:06.000Z
|
tests/inputs/if-branching/17-mergin-with-nonexistent.py
|
helq/pytropos
|
497ed5902e6e4912249ca0a46b477f9bfa6ae80a
|
[
"MIT"
] | null | null | null |
if _:
l = 21
| 5.666667
| 10
| 0.352941
| 3
| 17
| 1.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.529412
| 17
| 2
| 11
| 8.5
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36844291ff3d0cd8be56a3c30b58711ae344f700
| 5,892
|
py
|
Python
|
muarch/funcs/moments.py
|
DanielBok/muarch
|
c9bf60e3e029a443646fa35479ca2ed0dd23c31e
|
[
"MIT"
] | 14
|
2019-03-14T10:10:17.000Z
|
2022-01-31T19:44:24.000Z
|
muarch/funcs/moments.py
|
DanielBok/muarch
|
c9bf60e3e029a443646fa35479ca2ed0dd23c31e
|
[
"MIT"
] | 2
|
2020-04-30T13:35:42.000Z
|
2021-08-12T07:52:06.000Z
|
muarch/funcs/moments.py
|
DanielBok/muarch
|
c9bf60e3e029a443646fa35479ca2ed0dd23c31e
|
[
"MIT"
] | 11
|
2019-05-27T15:55:10.000Z
|
2021-06-25T16:59:32.000Z
|
from typing import Union
import numpy as np
from scipy.stats import kurtosis, skew
from .time_unit import get_integer_time_unit
__all__ = ["get_annualized_kurtosis", "get_annualized_mean", "get_annualized_sd", "get_annualized_skew"]
def get_annualized_kurtosis(data: np.ndarray, time_unit: Union[int, str] = 'monthly') -> Union[float, np.ndarray]:
"""
Gets the annualized kurtosis for each asset class in the data cube
Parameters
----------
data
Data matrix or tensor. The axis must represent time, trials and assets respectively where the assets axis
is valid only if the data is a tensor.
time_unit: int or str
Specifies how many units (first axis) is required to represent a year. For example, if each time period
represents a month, set this to 12. If quarterly, set to 4. Defaults to 12 which means 1 period represents
a month. Alternatively, you could put in a string name of the time_unit. Accepted values are weekly,
monthly, quarterly, semi-annually and yearly
Returns
-------
ndarray
The annualized kurtosis for the asset class or an array containing the annualized kurtosis for each
asset class.
"""
return np.mean([kurtosis(i) for i in annualize_data(data, time_unit)], 0)
def get_annualized_mean(data: np.ndarray, time_unit: Union[int, str] = 'monthly') -> Union[float, np.ndarray]:
"""
Gets the annualized mean for each asset class in the data cube
Parameters
----------
data
Data matrix or tensor. The axis must represent time, trials and assets respectively where the assets axis
is valid only if the data is a tensor.
time_unit: int or str
Specifies how many units (first axis) is required to represent a year. For example, if each time period
represents a month, set this to 12. If quarterly, set to 4. Defaults to 12 which means 1 period represents
a month. Alternatively, you could put in a string name of the time_unit. Accepted values are weekly,
monthly, quarterly, semi-annually and yearly
Returns
-------
float or ndarray
The annualized mean for the asset class or an array containing the annualized mean for each asset class.
"""
time_unit = get_integer_time_unit(time_unit)
years = len(data) // time_unit
data = (annualize_data(data, time_unit) + 1).prod(0)
return np.mean(np.sign(data) * np.abs(data) ** (1 / years), 0) - 1
def get_annualized_sd(data: np.ndarray, time_unit: Union[int, str] = 'monthly') -> Union[float, np.ndarray]:
"""
Gets the annualized standard deviation for each asset class in the data cube
Parameters
----------
data
Data matrix or tensor. The axis must represent time, trials and assets respectively where the assets axis
is valid only if the data is a tensor.
time_unit: int or str
Specifies how many units (first axis) is required to represent a year. For example, if each time period
represents a month, set this to 12. If quarterly, set to 4. Defaults to 12 which means 1 period represents
a month. Alternatively, you could put in a string name of the time_unit. Accepted values are weekly,
monthly, quarterly, semi-annually and yearly
Returns
-------
float ndarray
The annualized standard deviation (volatility) for the asset class or an array containing the annualized
standard deviation for each asset class.
"""
return annualize_data(data, time_unit).std(1).mean(0)
def get_annualized_skew(data: np.ndarray, time_unit: Union[int, str] = 'monthly') -> Union[float, np.ndarray]:
"""
Gets the annualized skew for each asset class in the data cube
Parameters
----------
data
Data matrix or tensor. The axis must represent time, trials and assets respectively where the assets axis
is valid only if the data is a tensor.
time_unit: int or str
Specifies how many units (first axis) is required to represent a year. For example, if each time period
represents a month, set this to 12. If quarterly, set to 4. Defaults to 12 which means 1 period represents
a month. Alternatively, you could put in a string name of the time_unit. Accepted values are weekly,
monthly, quarterly, semi-annually and yearly
Returns
-------
float or ndarray
The annualized skew for the asset class or an array containing the annualized skew for each asset class.
"""
return np.mean([skew(i) for i in annualize_data(data, time_unit)], 0)
def annualize_data(data: np.ndarray, time_unit: Union[str, int]) -> np.ndarray:
"""
Annualizes the data. Note that the dimension of the data will change from this transformation
Parameters
----------
data
Data matrix or tensor. The axis must represent time, trials and assets respectively where the assets axis
is valid only if the data is a tensor.
time_unit: int or str
Specifies how many units (first axis) is required to represent a year. For example, if each time period
represents a month, set this to 12. If quarterly, set to 4. Defaults to 12 which means 1 period represents
a month. Alternatively, you could put in a string name of the time_unit. Accepted values are weekly,
monthly, quarterly, semi-annually and yearly
Returns
-------
ndarray
The annualized data.
"""
data = np.asarray(data)
time_unit = get_integer_time_unit(time_unit)
assert data.ndim in (2, 3), "data must be 2 or 3 dimensional"
if data.ndim == 2:
t, n = data.shape
return (data.reshape((t // time_unit, time_unit, n)) + 1).prod(1) - 1
else:
t, n, a = data.shape
return (data.reshape((t // time_unit, time_unit, n, a)) + 1).prod(1) - 1
| 40.634483
| 114
| 0.683299
| 887
| 5,892
| 4.467869
| 0.137542
| 0.064598
| 0.042897
| 0.055514
| 0.845067
| 0.830936
| 0.824375
| 0.797376
| 0.767348
| 0.767348
| 0
| 0.011173
| 0.240496
| 5,892
| 144
| 115
| 40.916667
| 0.874413
| 0.66463
| 0
| 0.076923
| 0
| 0
| 0.084988
| 0.014268
| 0
| 0
| 0
| 0
| 0.038462
| 1
| 0.192308
| false
| 0
| 0.153846
| 0
| 0.576923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
36c35ae84774de6132db9626e3e3eca1b1ecd0c2
| 58,083
|
py
|
Python
|
lib/load_mersi.py
|
NingAnMe/snow_cover_of_remote_sensing
|
aabd0f4754eb5200203fc8a90f06b603dcd260a8
|
[
"Apache-2.0"
] | 1
|
2020-08-19T08:34:53.000Z
|
2020-08-19T08:34:53.000Z
|
lib/load_mersi.py
|
NingAnMe/snow_cover_of_remote_sensing
|
aabd0f4754eb5200203fc8a90f06b603dcd260a8
|
[
"Apache-2.0"
] | null | null | null |
lib/load_mersi.py
|
NingAnMe/snow_cover_of_remote_sensing
|
aabd0f4754eb5200203fc8a90f06b603dcd260a8
|
[
"Apache-2.0"
] | 1
|
2020-07-01T16:32:15.000Z
|
2020-07-01T16:32:15.000Z
|
# -*- coding: utf-8 -*-
from datetime import datetime
import os
import re
import h5py
from lib.pb_io import attrs2dict
from lib.pb_sat import planck_r2t
from lib.read_base import ReadL1
import numpy as np
import pandas as pd
__description__ = 'MERSI传感器读取类'
__author__ = 'wangpeng'
__date__ = '2018-08-28'
__version__ = '1.0.0_beat'
g_main_path, g_main_file = os.path.split(os.path.realpath(__file__))
class ReadMersiL1(ReadL1):
"""
读取 MERSI 传感器的 L1 数据
分辨率:1000m
卫星: [FY3A FY3B FY3C]
通道数量:20
可见光通道:1,2,3,4,6~20
红外通道:5
分辨率:1000m
卫星: [FY3D]
通道数量:25
可见光通道:1~20
红外通道:20~25
分辨率:250
卫星:
通道数量:
可见光通道:
红外通道:
"""
def __init__(self, in_file, geo_file=None, cloud_file=None, in_ir_file=None, in_vis_file=None, coef_txt_flag=None):
sensor = 'MERSI'
self.in_ir_file = in_ir_file
self.in_vis_file = in_vis_file
self.coef_txt_flag = coef_txt_flag
super(ReadMersiL1, self).__init__(in_file, sensor)
self.geo_file = geo_file
self.cloud_file = cloud_file
def set_resolution(self):
"""
use filename set self.resolution
:return:
"""
file_name = os.path.basename(self.in_file)
if '1000M' in file_name:
self.resolution = 1000
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
def set_satellite(self):
"""
use filename set self.satellite
:return:
"""
file_name = os.path.basename(self.in_file)
pattern = r'([A-Z0-9]+)_%s.*' % self.sensor
m = re.match(pattern, file_name)
if m:
self.satellite = m.groups()[0]
else:
raise ValueError('Cant get the satellite name from file name.')
def set_ymd_hms(self):
"""
use filename set self.ymd self.hms
"""
file_name = os.path.basename(self.in_file)
pat = '\w{4}_\w{5}_\w{4}_L1_(\d{8})_(\d{4})_\w{5}_MS.HDF$'
g = re.match(pat, file_name)
if g:
self.ymd = g.group(1)
self.hms = g.group(2) + '00'
else:
raise ValueError('Cant get the ymdhms from file name.')
def set_file_attr(self):
"""
get hdf5 file attrs self.file_attr
:return:
"""
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B', 'FY3C', 'FY3D']
if self.satellite in satellite_type1:
with h5py.File(self.in_file, 'r') as h5r:
self.file_attr = attrs2dict(h5r.attrs)
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
"Cant handle this resolution: ".format(self.resolution))
def set_data_shape(self):
"""
use dataset set self.data_shape
:return:
"""
# 如果分辨率是 1000 米
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B', 'FY3C', 'FY3D']
if self.satellite in satellite_type1:
self.data_shape = (2000, 2048)
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# elif self.resolution == 250:
else:
raise ValueError(
"Cant handle this resolution: ".format(self.resolution))
def set_channels(self):
"""
return sensor channels
"""
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B', 'FY3C']
satellite_type2 = ['FY3D']
if self.satellite in satellite_type1:
self.channels = 20
elif self.satellite in satellite_type2:
self.channels = 25
# elif self.resolution == 250:
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
def __get_geo_file(self):
"""
return 定位文件
"""
if self.geo_file is not None:
return self.geo_file
else:
if self.resolution == 1000:
satellite_type1 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
geo_file = self.in_file[:-12] + 'GEO1K_MS.HDF'
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
"Cant handle this resolution: ".format(self.resolution))
return geo_file
def __get_clm_file(self):
"""
return 定位文件
"""
if self.cloud_file is not None:
return self.cloud_file
else:
if self.resolution == 1000:
satellite_type1 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
clm_file = self.in_file.replace(
'GBAL_L1', 'ORBT_L2_CLM_MLT_NUL')
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
"Cant handle this resolution: ".format(self.resolution))
return clm_file
def get_cloudmask(self):
data = None
clm_flag = np.full(self.data_shape, -999)
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B', 'FY3C', 'FY3D']
if self.satellite in satellite_type1:
in_file = self.__get_clm_file()
with h5py.File(in_file, 'r') as h5r:
data_pre = h5r.get('Cloud_Mask')[:]
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
z = data_pre[0, :, :]
# 0 表示无效值
z0 = z & 0b1
z12 = (z >> 1) & 0b11
z4 = (z >> 4) & 0b1
z67 = (z >> 6) & 0b11
# Invalid 0
mask = (z == 0)
idx = np.where(mask)
clm_flag[idx] = 0
# Coastlines
mask = (z67 == 0b01)
idx = np.where(mask)
clm_flag[idx] = 1
# Uncertain
mask = (z12 == 0b01) & (z0 == 0b1)
idx = np.where(mask)
clm_flag[idx] = 2
# Cloud
mask = (z12 == 0b00) & (z0 == 0b1)
idx = np.where(mask)
clm_flag[idx] = 3
# Poss Land Clear
mask = ((z67 == 0b11) | (z67 == 0b10)) & (
z12 == 0b10) & (z0 == 0b1)
idx = np.where(mask)
clm_flag[idx] = 4
# Land Clear
mask = ((z67 == 0b11) | (z67 == 0b10)) & (
z12 == 0b11) & (z0 == 0b1)
idx = np.where(mask)
clm_flag[idx] = 5
# Poss Sea Clear
mask = (z67 == 0b00) & (z12 == 0b10) & (z0 == 0b1)
idx = np.where(mask)
clm_flag[idx] = 6
# Sea Clear
mask = (z67 == 0b00) & (z12 == 0b11) & (z4 == 0b1) & (z0 == 0b1)
idx = np.where(mask)
clm_flag[idx] = 7
# Sun Glint
mask = (z67 == 0b00) & (z12 == 0b11) & (z4 == 0b0) & (z0 == 0b1)
idx = np.where(mask)
clm_flag[idx] = 8
data = clm_flag
return data
def get_dn(self):
"""
return DN
"""
data = dict()
if self.resolution == 1000: # 分辨率为 1000
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C']
satellite_type3 = ['FY3D']
if self.satellite in satellite_type1:
data_file = self.in_file
with h5py.File(data_file, 'r') as h5r:
ary_ch1 = h5r.get('/EV_250_Aggr.1KM_RefSB')[:]
ary_ch5 = h5r.get('/EV_250_Aggr.1KM_Emissive')[:]
ary_ch6 = h5r.get('/EV_1KM_RefSB')[:]
vmin = 0
vmax = 10000
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
if i < 4:
k = i
data_pre = ary_ch1[k]
# 开始处理
elif i == 4:
data_pre = ary_ch5
else:
k = i - 5
data_pre = ary_ch6[k]
data_pre = data_pre.astype(np.float32)
invalid_index = np.logical_or(
data_pre <= vmin, data_pre > vmax)
data_pre[invalid_index] = np.nan
data[band] = data_pre
elif self.satellite in satellite_type2:
data_file = self.in_file
with h5py.File(data_file, 'r') as h5r:
ary_ch1 = h5r.get('/Data/EV_250_Aggr.1KM_RefSB')[:]
ary_ch5 = h5r.get('/Data/EV_250_Aggr.1KM_Emissive')[:]
ary_ch6 = h5r.get('/Data/EV_1KM_RefSB')[:]
vmin = 0
vmax = 10000
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
if i < 4:
k = i
data_pre = ary_ch1[k]
# 开始处理
elif i == 4:
data_pre = ary_ch5
else:
k = i - 5
data_pre = ary_ch6[k]
data_pre = data_pre.astype(np.float32)
invalid_index = np.logical_or(
data_pre <= vmin, data_pre > vmax)
data_pre[invalid_index] = np.nan
data[band] = data_pre
elif self.satellite in satellite_type3:
data_file = self.in_file
with h5py.File(data_file, 'r') as h5r:
ary_ch1_4 = h5r.get('/Data/EV_250_Aggr.1KM_RefSB')[:]
ary_ch5_19 = h5r.get('/Data/EV_1KM_RefSB')[:]
ary_ch20_23 = h5r.get('/Data/EV_1KM_Emissive')[:]
ary_ch24_25 = h5r.get('/Data/EV_250_Aggr.1KM_Emissive')[:]
vmin = 0
vmax = 65000
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
if i < 4:
k = i
data_pre = ary_ch1_4[k]
# 开始处理
elif i >= 4 and i < 19:
k = i - 4
data_pre = ary_ch5_19[k]
elif i >= 19 and i < 23:
k = i - 19
data_pre = ary_ch20_23[k]
else:
k = i - 23
data_pre = ary_ch24_25[k]
data_pre = data_pre.astype(np.float32)
invalid_index = np.logical_or(
data_pre <= vmin, data_pre > vmax)
data_pre[invalid_index] = np.nan
data[band] = data_pre
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_k0_from_txt(self):
k0_vis = k0_ir = None
if self.in_vis_file is not None:
k0_k1_vis_df = pd.read_table(self.in_vis_file, sep='\t')
k0_vis = k0_k1_vis_df.iloc[:, [0, 1]].to_numpy()
if self.in_ir_file is not None:
k0_k1_ir_df = pd.read_table(self.in_ir_file, sep='\t')
k0_ir = k0_k1_ir_df.iloc[:, [0, 1]].to_numpy()
return k0_vis, k0_ir
def get_k1_from_txt(self):
k1_vis = k1_ir = None
if self.in_vis_file is not None:
k0_k1_vis_df = pd.read_table(self.in_vis_file, sep='\t')
k1_vis = k0_k1_vis_df.iloc[:, [0, 2]].to_numpy()
if self.in_ir_file is not None:
k0_k1_ir_df = pd.read_table(self.in_ir_file, sep='\t')
k1_ir = k0_k1_ir_df.iloc[:, [0, 2]].to_numpy()
return k1_vis, k1_ir
def get_k2_from_txt(self):
k2_vis = k2_ir = None
if self.in_vis_file is not None:
k0_k1_vis_df = pd.read_table(self.in_vis_file, sep='\t')
k2_vis = k0_k1_vis_df.iloc[:, [0, 3]].to_numpy()
if self.in_ir_file is not None:
k0_k1_ir_df = pd.read_table(self.in_ir_file, sep='\t')
k2_ir = k0_k1_ir_df.iloc[:, [0, 3]].to_numpy()
return k2_vis, k2_ir
def get_k0(self):
"""
return K0
"""
data = dict()
if self.resolution == 1000: # 分辨率为 1000
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C']
satellite_type3 = ['FY3D']
# FY3AB
if self.satellite in satellite_type1:
# vis_k, 54 = 19*3 (19通道 3个系数)
ary_vis_coeff = self.file_attr['VIR_Cal_Coeff']
K = np.full((19, 3), 0.)
for i in range(19):
for j in range(3):
K[i, j] = ary_vis_coeff[i * 3 + j]
# 变成20*3 k0,k1,k2
values = np.array([0, 1, 0])
K = np.insert(K, 4, values, 0)
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
# k0
channel_data = np.full(
self.data_shape, K[i, 0], dtype=np.float32)
data[band] = channel_data
# FY3C
elif self.satellite in satellite_type2:
with h5py.File(self.in_file, 'r') as h5r:
ary_vis_coeff = h5r.get('/Calibration/VIS_Cal_Coeff')[:]
# 19*3 变成20*3 红外通道给定值不影响原dn值
values = np.array([0, 1, 0])
K = np.insert(ary_vis_coeff, 4, values, 0)
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
# k0
channel_data = np.full(
self.data_shape, K[i, 0], dtype=np.float32)
data[band] = channel_data
# FY3D
elif self.satellite in satellite_type3:
with h5py.File(self.in_file, 'r') as h5r:
ary_ir_coeff = h5r.get('/Calibration/IR_Cal_Coeff')[:]
ary_vis_coeff = h5r.get('/Calibration/VIS_Cal_Coeff')[:]
# 转维度
s = self.data_shape
ary_vis_coeff1 = np.repeat(
ary_vis_coeff[:, 0], s[0] * s[1])
ary_ir_coeff1 = np.repeat(
ary_ir_coeff[:, 0, :], 10 * s[1], axis=1)
# 转维度 19*2000*2048,6*2000*2048
ary_vis_coeff2 = ary_vis_coeff1.reshape(
(-1,) + self.data_shape)
ary_ir_coeff2 = ary_ir_coeff1.reshape(
(-1,) + self.data_shape)
# 逐个通道处理
s = self.data_shape
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
if i < 19:
k = i
data[band] = ary_vis_coeff2[k]
else:
k = i - 19
data[band] = ary_ir_coeff2[k]
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
if self.coef_txt_flag: # 在这处理,达到的效果是,如果有某些通道不需要重新定标也可以处理
k0_vis, k0_ir = self.get_k0_from_txt()
if k0_vis is not None:
for channel_name, k0 in k0_vis:
if channel_name in data:
data[channel_name][:] = k0
if k0_ir is not None:
for channel_name, k0 in k0_vis:
if channel_name in data:
data[channel_name][:] = k0
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_k1(self):
"""
return K1
"""
data = dict()
if self.resolution == 1000: # 分辨率为 1000
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C']
satellite_type3 = ['FY3D']
# FY3AB
if self.satellite in satellite_type1:
# vis_k, 54 = 19*3 (19通道 3个系数)
ary_vis_coeff = self.file_attr['VIR_Cal_Coeff']
K = np.full((19, 3), 0.)
for i in range(19):
for j in range(3):
K[i, j] = ary_vis_coeff[i * 3 + j]
# 变成20*3 k0,k1,k2
values = np.array([0, 1, 0])
K = np.insert(K, 4, values, 0)
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
# k0
channel_data = np.full(
self.data_shape, K[i, 1], dtype=np.float32)
data[band] = channel_data
# FY3C
elif self.satellite in satellite_type2:
with h5py.File(self.in_file, 'r') as h5r:
ary_vis_coeff = h5r.get('/Calibration/VIS_Cal_Coeff')[:]
# 19*3 变成20*3 红外通道给定值不影响原dn值
values = np.array([0, 1, 0])
K = np.insert(ary_vis_coeff, 4, values, 0)
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
# k0
channel_data = np.full(
self.data_shape, K[i, 1], dtype=np.float32)
data[band] = channel_data
# FY3D
elif self.satellite in satellite_type3:
with h5py.File(self.in_file, 'r') as h5r:
ary_ir_coeff = h5r.get('/Calibration/IR_Cal_Coeff')[:]
ary_vis_coeff = h5r.get('/Calibration/VIS_Cal_Coeff')[:]
# 转维度
s = self.data_shape
ary_vis_coeff1 = np.repeat(
ary_vis_coeff[:, 1], s[0] * s[1])
ary_ir_coeff1 = np.repeat(
ary_ir_coeff[:, 1, :], 10 * s[1], axis=1)
# 转维度 19*2000*2048,6*2000*2048
ary_vis_coeff2 = ary_vis_coeff1.reshape(
(-1,) + self.data_shape)
ary_ir_coeff2 = ary_ir_coeff1.reshape(
(-1,) + self.data_shape)
# 逐个通道处理
s = self.data_shape
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
if i < 19:
k = i
data[band] = ary_vis_coeff2[k]
else:
k = i - 19
data[band] = ary_ir_coeff2[k]
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
if self.coef_txt_flag: # 在这处理,达到的效果是,如果有某些通道不需要重新定标也可以处理
k1_vis, k1_ir = self.get_k1_from_txt()
if k1_vis is not None:
for channel_name, k1 in k1_vis:
if channel_name in data:
data[channel_name][:] = k1
if k1_ir is not None:
for channel_name, k1 in k1_vis:
if channel_name in data:
data[channel_name][:] = k1
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_k2(self):
"""
return K2
"""
data = dict()
if self.resolution == 1000: # 分辨率为 1000
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C']
satellite_type3 = ['FY3D']
# FY3AB
if self.satellite in satellite_type1:
# vis_k, 54 = 19*3 (19通道 3个系数)
ary_vis_coeff = self.file_attr['VIR_Cal_Coeff']
K = np.full((19, 3), 0.)
for i in range(19):
for j in range(3):
K[i, j] = ary_vis_coeff[i * 3 + j]
# 变成20*3 k0,k1,k2
values = np.array([0, 1, 0])
K = np.insert(K, 4, values, 0)
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
# k0
channel_data = np.full(
self.data_shape, K[i, 2], dtype=np.float32)
data[band] = channel_data
# FY3C
elif self.satellite in satellite_type2:
with h5py.File(self.in_file, 'r') as h5r:
ary_vis_coeff = h5r.get('/Calibration/VIS_Cal_Coeff')[:]
# 19*3 变成20*3 红外通道给定值不影响原dn值
values = np.array([0, 1, 0])
K = np.insert(ary_vis_coeff, 4, values, 0)
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
# k0
channel_data = np.full(
self.data_shape, K[i, 2], dtype=np.float32)
data[band] = channel_data
# FY3D
elif self.satellite in satellite_type3:
with h5py.File(self.in_file, 'r') as h5r:
ary_ir_coeff = h5r.get('/Calibration/IR_Cal_Coeff')[:]
ary_vis_coeff = h5r.get('/Calibration/VIS_Cal_Coeff')[:]
# 转维度
s = self.data_shape
ary_vis_coeff1 = np.repeat(
ary_vis_coeff[:, 2], s[0] * s[1])
ary_ir_coeff1 = np.repeat(
ary_ir_coeff[:, 2, :], 10 * s[1], axis=1)
# 转维度 19*2000*2048,6*2000*2048
ary_vis_coeff2 = ary_vis_coeff1.reshape(
(-1,) + self.data_shape)
ary_ir_coeff2 = ary_ir_coeff1.reshape(
(-1,) + self.data_shape)
# 逐个通道处理
s = self.data_shape
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
if i < 19:
k = i
data[band] = ary_vis_coeff2[k]
else:
k = i - 19
data[band] = ary_ir_coeff2[k]
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
if self.coef_txt_flag: # 在这处理,达到的效果是,如果有某些通道不需要重新定标也可以处理
k2_vis, k2_ir = self.get_k2_from_txt()
if k2_vis is not None:
for channel_name, k2 in k2_vis:
if channel_name in data:
data[channel_name][:] = k2
if k2_ir is not None:
for channel_name, k2 in k2_vis:
if channel_name in data:
data[channel_name][:] = k2
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_k3(self):
pass
def get_ref(self):
"""
return Ref
"""
data = dict()
if self.resolution == 1000: # 分辨率为 1000
satellite_type1 = ['FY3A', 'FY3B', 'FY3C']
satellite_type2 = ['FY3D']
# FY3A/B/C
if self.satellite in satellite_type1:
dn = self.get_dn()
k0 = self.get_k0()
k1 = self.get_k1()
k2 = self.get_k2()
if 'FY3B' in self.satellite:
if int(self.ymd + self.hms) <= 20130306001500:
scales = 100.
else:
scales = 10000.
else:
scales = 100.
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
if 'CH_05' in band:
continue
channel_data = dn[band] ** 2 * k2[band] + dn[band] * \
k1[band] + k0[band]
pre_data = channel_data / scales
idx = np.where(pre_data < 0.)
if len(idx[0] > 0):
pre_data[idx] = np.nan
data[band] = pre_data
# FY3D
elif self.satellite in satellite_type2:
dn = self.get_dn()
k0 = self.get_k0()
k1 = self.get_k1()
k2 = self.get_k2()
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
if i < 19:
pre_data = dn[band] ** 2 * k2[band] + dn[band] * \
k1[band] + k0[band]
idx = np.where(pre_data < 0.)
if len(idx[0] > 0):
pre_data[idx] = np.nan
data[band] = pre_data / 100.
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_rad(self):
"""
return rad
"""
data = dict()
if self.resolution == 1000: # 分辨率为 1000
satellite_type1 = ['FY3A', 'FY3B', 'FY3C']
satellite_type2 = ['FY3D']
if self.satellite in satellite_type1:
dn = self.get_dn()
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
if 'CH_05' in band:
data[band] = dn[band] / 100.
elif self.satellite in satellite_type2:
dn = self.get_dn()
with h5py.File(self.in_file, 'r') as h5r:
ary_a1 = h5r.get('/Data/EV_1KM_Emissive').attrs['Slope']
ary_b1 = h5r.get(
'/Data/EV_1KM_Emissive').attrs['Intercept']
ary_a2 = h5r.get(
'/Data/EV_250_Aggr.1KM_Emissive').attrs['Slope']
ary_b2 = h5r.get(
'/Data/EV_250_Aggr.1KM_Emissive').attrs['Intercept']
a = np.concatenate((ary_a1, ary_a2))
b = np.concatenate((ary_b1, ary_b2))
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
if i >= 19:
k = i - 19
data[band] = dn[band] * a[k] + b[k]
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_tbb_k1(self):
"""
return tbb_k1 dict one value
"""
data = dict()
if self.resolution == 1000: # 分辨率为 1000
satellite_type1 = ['FY3A', 'FY3B', 'FY3C']
satellite_type2 = ['FY3D']
if self.satellite in satellite_type1:
data['CH_05'] = 1
elif self.satellite in satellite_type2:
data['CH_20'] = 1.00103
data['CH_21'] = 1.00085
data['CH_22'] = 1.00125
data['CH_23'] = 1.00030
data['CH_24'] = 1.00133
data['CH_25'] = 1.00065
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_tbb_k0(self):
"""
return tbb_k0 dict one value
"""
data = dict()
if self.resolution == 1000: # 分辨率为 1000
satellite_type1 = ['FY3A', 'FY3B', 'FY3C']
satellite_type2 = ['FY3D']
if self.satellite in satellite_type1:
data['CH_05'] = 0
elif self.satellite in satellite_type2:
data['CH_20'] = -0.4759
data['CH_21'] = -0.3139
data['CH_22'] = -0.2662
data['CH_23'] = -0.0513
data['CH_24'] = -0.0734
data['CH_25'] = 0.0875
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_tbb(self):
"""
return tbb
"""
data = dict()
if self.resolution == 1000: # 分辨率为 1000
satellite_type1 = ['FY3A', 'FY3B', 'FY3C', 'FY3D']
if self.satellite in satellite_type1:
# rad转tbb的修正系数,所有时次都是固定值
tbb_k0 = self.get_tbb_k0()
tbb_k1 = self.get_tbb_k1()
rads = self.get_rad()
central_wave_numbers = self.get_central_wave_number()
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
if band in list(rads.keys()):
k0 = tbb_k0[band]
k1 = tbb_k1[band]
central_wave_number = central_wave_numbers[band]
rad = rads[band]
tbb = planck_r2t(rad, central_wave_number)
data[band] = tbb * k1 + k0
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_sv(self):
"""
return sv
"""
data = dict()
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
try:
data_pre = h5r.get('/SV_DN_average')[:]
# 过滤无效值
invalid_index = np.logical_or(
data_pre <= 0, data_pre > 4095)
data_pre = data_pre.astype(np.float32)
data_pre[invalid_index] = np.nan
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
channel_data = np.full(
self.data_shape, np.nan, dtype=np.float32)
channel_data[:] = data_pre[i, :].reshape(-1, 1)
data[band] = channel_data
except Exception as e:
print(str(e))
elif self.satellite in satellite_type2:
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get(
'/Calibration/SV_DN_average')[:]
# 过滤无效值
invalid_index = np.logical_or(data_pre <= 0, data_pre > 4095)
data_pre = data_pre.astype(np.float32)
data_pre[invalid_index] = np.nan
s0 = data_pre.shape[1]
print(data_pre.shape)
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
channel_data = np.full(
self.data_shape, np.nan, dtype=np.float32)
# 把200 转成2000
if s0 == 200:
data_pre_new = np.repeat(data_pre[i, :], 10)
elif s0 == 2000:
data_pre_new = data_pre[i, :]
else:
raise ValueError(
'Cant read this satellite`s dataset sv .: {}'.format(self.satellite))
channel_data[:] = data_pre_new.reshape(-1, 1)
data[band] = channel_data
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_bb(self):
"""
return bb
"""
data = dict()
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
try:
data_pre = h5r.get('/BB_DN_average')[:]
# 过滤无效值
invalid_index = np.logical_or(
data_pre <= 0, data_pre > 4095)
data_pre = data_pre.astype(np.float32)
data_pre[invalid_index] = np.nan
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
channel_data = np.full(
self.data_shape, np.nan, dtype=np.float32)
channel_data[:] = data_pre[i, :].reshape(-1, 1)
data[band] = channel_data
except Exception as e:
print(str(e))
elif self.satellite in satellite_type2:
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get(
'/Calibration/BB_DN_average')[:]
# 过滤无效值
invalid_index = np.logical_or(data_pre <= 0, data_pre > 4095)
data_pre = data_pre.astype(np.float32)
data_pre[invalid_index] = np.nan
# 逐个通道处理
s0 = data_pre.shape[1]
# 逐个通道处理
for i in range(self.channels):
band = 'CH_{:02d}'.format(i + 1)
channel_data = np.full(
self.data_shape, np.nan, dtype=np.float32)
# 把200 转成2000
if s0 == 200:
data_pre_new = np.repeat(data_pre[i, :], 10)
elif s0 == 2000:
data_pre_new = data_pre[i, :]
else:
raise ValueError(
'Cant read this satellite`s dataset bb .: {}'.format(self.satellite))
channel_data[:] = data_pre_new.reshape(-1, 1)
data[band] = channel_data
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_longitude(self):
"""
return longitude
"""
data = None
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get('/Longitude')[:]
elif self.satellite in satellite_type2:
geo_file = self.__get_geo_file()
with h5py.File(geo_file, 'r') as h5r:
data_pre = h5r.get('/Geolocation/Longitude')[:]
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
# invalid_index = np.logical_or(data_pre < -180, data_pre > 180)
data_pre = data_pre.astype(np.float32)
# data_pre[invalid_index] = np.nan
data = data_pre
return data
def get_latitude(self):
"""
return latitude
"""
data = None
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get('/Latitude')[:]
elif self.satellite in satellite_type2:
geo_file = self.__get_geo_file()
with h5py.File(geo_file, 'r') as h5r:
data_pre = h5r.get('/Geolocation/Latitude')[:]
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
# invalid_index = np.logical_or(data_pre < -90, data_pre > 90)
data_pre = data_pre.astype(np.float32)
# data_pre[invalid_index] = np.nan
data = data_pre
return data
def get_land_sea_mask(self):
"""
return land_sea_mask
"""
data = None
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get('/LandSeaMask')[:]
elif self.satellite in satellite_type2:
geo_file = self.__get_geo_file()
with h5py.File(geo_file, 'r') as h5r:
data_pre = h5r.get('/Geolocation/LandSeaMask')[:]
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
# invalid_index = np.logical_or(data_pre < 0, data_pre > 7)
# data_pre = data_pre.astype(np.float32)
# data_pre[invalid_index] = np.nan
data = data_pre
return data
def get_height(self):
"""
return land_sea_mask
"""
data = None
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get('/DEM')[:]
elif self.satellite in satellite_type2:
geo_file = self.__get_geo_file()
with h5py.File(geo_file, 'r') as h5r:
data_pre = h5r.get('/Geolocation/DEM')[:]
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
# invalid_index = np.logical_or(data_pre < -400, data_pre > 10000)
# data_pre = data_pre.astype(np.float32)
# data_pre[invalid_index] = np.nan
data = data_pre
return data
def get_land_cover(self):
"""
return land_cover
"""
data = None
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get('/LandCover')[:]
elif self.satellite in satellite_type2:
geo_file = self.__get_geo_file()
with h5py.File(geo_file, 'r') as h5r:
data_pre = h5r.get('/Geolocation/LandCover')[:]
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
# invalid_index = np.logical_or(data_pre < 0, data_pre > 17)
# data_pre = data_pre.astype(np.float32)
# data_pre[invalid_index] = np.nan
data = data_pre
return data
def get_sensor_azimuth(self):
"""
return sensor_azimuth
"""
data = None
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get('/SensorAzimuth')[:]
vmin = -18000
vmax = 18000
elif self.satellite in satellite_type2:
geo_file = self.__get_geo_file()
with h5py.File(geo_file, 'r') as h5r:
data_pre = h5r.get(
'/Geolocation/SensorAzimuth')[:]
if 'FY3D' in self.satellite:
vmin = 0
vmax = 36000
else:
vmin = -18000
vmax = 18000
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
# invalid_index = np.logical_or(data_pre < vmin, data_pre > vmax)
# data_pre = data_pre.astype(np.float32)
# data_pre[invalid_index] = np.nan
data = data_pre / 100.
return data
def get_sensor_zenith(self):
"""
return sensor_zenith
"""
data = None
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get('/SensorZenith')[:]
vmin = 0
vmax = 18000
elif self.satellite in satellite_type2:
geo_file = self.__get_geo_file()
with h5py.File(geo_file, 'r') as h5r:
data_pre = h5r.get('/Geolocation/SensorZenith')[:]
vmin = 0
vmax = 18000
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
# invalid_index = np.logical_or(data_pre < vmin, data_pre > vmax)
# data_pre = data_pre.astype(np.float32)
# data_pre[invalid_index] = np.nan
data = data_pre / 100.
return data
def get_solar_azimuth(self):
"""
return solar_azimuth
"""
data = None
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get('/SolarAzimuth')[:]
vmin = -18000
vmax = 18000
elif self.satellite in satellite_type2:
geo_file = self.__get_geo_file()
with h5py.File(geo_file, 'r') as h5r:
data_pre = h5r.get('/Geolocation/SolarAzimuth')[:]
if 'FY3D' in self.satellite:
vmin = 0
vmax = 36000
else:
vmin = -18000
vmax = 18000
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
# invalid_index = np.logical_or(data_pre < vmin, data_pre > vmax)
# data_pre = data_pre.astype(np.float32)
# data_pre[invalid_index] = np.nan
data = data_pre / 100.
return data
def get_solar_zenith(self):
"""
return solar_zenith
"""
data = None
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get('/SolarZenith')[:]
vmin = 0
vmax = 18000
elif self.satellite in satellite_type2:
geo_file = self.__get_geo_file()
with h5py.File(geo_file, 'r') as h5r:
data_pre = h5r.get('/Geolocation/SolarZenith')[:]
vmin = 0
vmax = 18000
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
# invalid_index = np.logical_or(data_pre < vmin, data_pre > vmax)
# data_pre = data_pre.astype(np.float32)
# data_pre[invalid_index] = np.nan
data = data_pre / 100.
return data
def get_hight(self):
"""
return hight
"""
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
# s = self.data_shape # FY3A数据不规整,存在 1810,2048 的数据,取 1800,2048
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get('/DEM')[:]
vmin = -400
vmax = 10000
elif self.satellite in satellite_type2:
geo_file = self.__get_geo_file()
with h5py.File(geo_file, 'r') as h5r:
data_pre = h5r.get('/Geolocation/DEM')[:]
vmin = -400
vmax = 10000
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
# invalid_index = np.logical_or(data_pre < vmin, data_pre > vmax)
# data_pre = data_pre.astype(np.float32)
# data_pre[invalid_index] = np.nan
data = data_pre
return data
def get_day_night_flag(self):
"""
Nadir Day(0) Night(1) or Mix(2) Flag
return day_night_flag
"""
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
return
elif self.satellite in satellite_type2:
geo_file = self.__get_geo_file()
with h5py.File(geo_file, 'r') as h5r:
data_pre = h5r.get('/Timedata/DayNightFlag')[:]
vmin = 0
vmax = 2
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
invalid_index = np.logical_or(data_pre < vmin, data_pre > vmax)
data_pre = data_pre.astype(np.float)
data_pre[invalid_index] = np.nan
data = data_pre
return data
def get_mirror_side(self):
data = None
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B']
satellite_type2 = ['FY3C', 'FY3D']
if self.satellite in satellite_type1:
return
elif self.satellite in satellite_type2:
with h5py.File(self.in_file, 'r') as h5r:
data_pre = h5r.get('/Calibration/Kmirror_Side')[:]
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
# 过滤无效值
data = data_pre
return data
def get_timestamp(self):
"""
return from 1970-01-01 00:00:00 seconds
"""
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B', 'FY3C', 'FY3D']
if self.satellite in satellite_type1:
seconds_of_file = 300 # 一个时次持续 300 秒
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
file_date = datetime.strptime(self.ymd + self.hms, '%Y%m%d%H%M%S')
timestamp = (
file_date - datetime(1970, 1, 1, 0, 0, 0)).total_seconds()
row_length = self.data_shape[0]
delta = np.linspace(0, seconds_of_file - 1, row_length)
data = np.full(self.data_shape, np.nan, dtype=np.float64)
data[:] = (delta + timestamp).reshape(-1, 1)
data = data.astype(np.int32)
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_central_wave_number(self):
"""
return 中心波数
central_wave_number
wn(cm-1) = 10 ^ 7 / wave_length(nm)
"""
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B', 'FY3C']
satellite_type2 = ['FY3D']
if self.satellite in satellite_type1:
data = {'CH_05': 869.565}
elif self.satellite in satellite_type2:
data = {'CH_20': 2634.359, 'CH_21': 2471.654, 'CH_22':
1382.621, 'CH_23': 1168.182, 'CH_24': 933.364, 'CH_25': 836.941}
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data
def get_spectral_response(self):
"""
return 光谱波数和响应值,两个字典
"""
data1 = dict()
data2 = dict()
if self.resolution == 1000:
satellite_type1 = ['FY3A', 'FY3B', 'FY3C', 'FY3D']
if self.satellite in satellite_type1:
dtype = {
'names': ('wave_length', 'response'), 'formats': ('f4', 'f4')}
for i in range(self.channels):
k = i + 1
band = "CH_{:02d}".format(k)
file_name = '{}_{}_SRF_CH{:02d}_Pub.txt'.format(
self.satellite, self.sensor, k)
data_file = os.path.join(g_main_path, 'SRF', file_name)
if not os.path.isfile(data_file):
continue
datas = np.loadtxt(data_file, dtype=dtype)
# 波长转波数
wave_length = datas['wave_length'][::-1]
wave_number = 10 ** 7 / wave_length
# 响应
response = datas['response'][::-1]
data1[band] = wave_number
data2[band] = response
else:
raise ValueError(
'Cant read this satellite`s data.: {}'.format(self.satellite))
else:
raise ValueError(
'Cant read this data, please check its resolution: {}'.format(self.in_file))
return data1, data2
if __name__ == '__main__':
# L1File = 'D:/data/MERSI/FY3A_MERSI_GBAL_L1_20141230_1145_1000M_MS.HDF'
# L1File = 'D:/data/MERSI/FY3B_MERSI_GBAL_L1_20130101_0005_1000M_MS.HDF'
L1File = 'D:/data/MERSI/FY3D_MERSI_GBAL_L1_20181001_0020_1000M_MS.HDF'
L1File = 'D:/data/MERSI/FY3D_MERSI_GBAL_L1_20190825_1755_1000M_MS.HDF'
mersi = ReadMersiL1(L1File)
print(mersi.satellite) # 卫星名
print(mersi.sensor) # 传感器名
print(mersi.ymd) # L1 文件年月日 YYYYMMDD
print(mersi.hms) # L1 文件时分秒 HHMMSS
print(mersi.resolution) # 分辨率
print(mersi.channels) # 通道数量
print(mersi.data_shape)
print(type(mersi.file_attr))
def print_data_status(datas, name=None):
data_shape = datas.shape
data_min = np.nanmin(datas)
data_max = np.nanmax(datas)
data_mean = np.nanmean(datas)
data_median = np.nanmedian(datas)
print("{}: shape: {}, min: {}, max: {}, mean: {}, median: {}".format(
name, data_shape, data_min, data_max, data_mean, data_median))
def print_channel_data(datas):
if not isinstance(datas, dict):
return
keys = list(datas.keys())
keys.sort()
for t_channel_name in keys:
channel_data = datas[t_channel_name]
print_data_status(channel_data, name=t_channel_name)
# print('cloud mask')
# t_data = mersi.get_cloudmask()
# print('dn:')
# t_data = mersi.get_dn()
# print_channel_data(t_data)
# print('k0:')
# t_data = mersi.get_k0()
# print_channel_data(t_data)
# print('k1:')
# t_data = mersi.get_k1()
# print_channel_data(t_data)
# print('k2:')
# t_data = mersi.get_k2()
# print_channel_data(t_data)
# print('ref:')
# t_data = mersi.get_ref()
# print_channel_data(t_data)
#
# print('rad:')
# t_data = mersi.get_rad()
# print_channel_data(t_data)
#
# print('tbb:')
# t_data = mersi.get_tbb()
# print_channel_data(t_data)
# print(t_data['CH_24'][1000, 1000])
#
# print('sv:')
# t_data = mersi.get_sv()
# print_channel_data(t_data)
#
# print('bb:')
# t_data = mersi.get_bb()
# print_channel_data(t_data)
#
# print('longitude:')
# t_data = mersi.get_longitude()
# print_data_status(t_data)
#
# print('latitude:')
# t_data = mersi.get_latitude()
# print_data_status(t_data)
#
# print('land_sea_mask:')
# t_data = mersi.get_land_sea_mask()
# print_data_status(t_data)
#
# print('land_cover:')
# t_data = mersi.get_land_cover()
# print_data_status(t_data)
#
# print('sensor_azimuth:')
# t_data = mersi.get_sensor_azimuth()
# print_data_status(t_data)
# print('sensor_zenith:')
# t_data = mersi.get_sensor_zenith()
# print_data_status(t_data)
# print('solar_azimuth:')
# t_data = mersi.get_solar_azimuth()
# print_data_status(t_data)
# print('solar_zenith:')
# t_data = mersi.get_solar_zenith()
# print_data_status(t_data)
# print('timestamp:')
# t_data = mersi.get_timestamp()
# print_data_status(t_data)
#
# print('get_spectral_response:')
# wavenums, wave_spec = mersi.get_spectral_response()
# print_channel_data(wavenums)
# print_channel_data(wave_spec)
print('ref:')
t_data = mersi.get_ref()
for key in sorted(t_data.keys()):
print("%s, %0.6f %0.6f" % (key, np.nanmin(t_data[key]), np.nanmax(t_data[key])))
print('rad:')
t_data = mersi.get_rad()
for key in sorted(t_data.keys()):
print("%s, %0.6f %0.6f" % (key, np.nanmin(t_data[key]), np.nanmax(t_data[key])))
print('tbb:')
t_data = mersi.get_tbb()
for key in sorted(t_data.keys()):
print("%s, %0.6f %0.6f" % (key, np.nanmin(t_data[key]), np.nanmax(t_data[key])))
| 36.370069
| 119
| 0.469845
| 6,516
| 58,083
| 3.977747
| 0.066912
| 0.042671
| 0.034724
| 0.055558
| 0.806628
| 0.788996
| 0.764381
| 0.739535
| 0.731664
| 0.709672
| 0
| 0.065202
| 0.42305
| 58,083
| 1,596
| 120
| 36.392857
| 0.708245
| 0.100305
| 0
| 0.721495
| 0
| 0.000935
| 0.091434
| 0.019499
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039252
| false
| 0.000935
| 0.008411
| 0
| 0.083178
| 0.019626
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36c5fe93bceb2ad1716f1173accb1e652c56a768
| 23,500
|
py
|
Python
|
tensorflow/python/ops/gen_linalg_ops.py
|
shishaochen/TensorFlow-0.8-Win
|
63221dfc4f1a1d064308e632ba12e6a54afe1fd8
|
[
"Apache-2.0"
] | 1
|
2017-09-14T23:59:05.000Z
|
2017-09-14T23:59:05.000Z
|
tensorflow/python/ops/gen_linalg_ops.py
|
shishaochen/TensorFlow-0.8-Win
|
63221dfc4f1a1d064308e632ba12e6a54afe1fd8
|
[
"Apache-2.0"
] | 1
|
2016-10-19T02:43:04.000Z
|
2016-10-31T14:53:06.000Z
|
tensorflow/python/ops/gen_linalg_ops.py
|
shishaochen/TensorFlow-0.8-Win
|
63221dfc4f1a1d064308e632ba12e6a54afe1fd8
|
[
"Apache-2.0"
] | 8
|
2016-10-23T00:50:02.000Z
|
2019-04-21T11:11:57.000Z
|
"""Python wrappers around Brain.
This file is MACHINE GENERATED! Do not edit.
"""
from google.protobuf import text_format
from tensorflow.core.framework import op_def_pb2
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import ops
from tensorflow.python.ops import op_def_library
def batch_cholesky(input, name=None):
r"""Calculates the Cholesky decomposition of a batch of square matrices.
The input is a tensor of shape `[..., M, M]` whose inner-most 2 dimensions
form square matrices, with the same constraints as the single matrix Cholesky
decomposition above. The output is a tensor of the same shape as the input
containing the Cholesky decompositions for all input submatrices `[..., :, :]`.
Args:
input: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[..., M, M]`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `input`. Shape is `[..., M, M]`.
"""
return _op_def_lib.apply_op("BatchCholesky", input=input, name=name)
def batch_matrix_determinant(input, name=None):
r"""Calculates the determinants for a batch of square matrices.
The input is a tensor of shape `[..., M, M]` whose inner-most 2 dimensions
form square matrices. The output is a 1-D tensor containing the determinants
for all input submatrices `[..., :, :]`.
Args:
input: A `Tensor`. Must be one of the following types: `float32`, `float64`.
Shape is `[..., M, M]`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `input`. Shape is `[...]`.
"""
return _op_def_lib.apply_op("BatchMatrixDeterminant", input=input,
name=name)
def batch_matrix_inverse(input, adjoint=None, name=None):
r"""Calculates the inverse of square invertible matrices or their adjoints
(conjugate transposes).
The input is a tensor of shape `[..., M, M]` whose inner-most 2 dimensions
form square matrices. The output is a tensor of the same shape as the input
containing the inverse for all input submatrices `[..., :, :]`.
The op uses LU decomposition with partial pivoting to compute the inverses.
If a matrix is not invertible there is no guarantee what the op does. It
may detect the condition and raise an exception or it may simply return a
garbage result.
Args:
input: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[..., M, M]`.
adjoint: An optional `bool`. Defaults to `False`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `input`. Shape is `[..., M, M]`.
"""
return _op_def_lib.apply_op("BatchMatrixInverse", input=input,
adjoint=adjoint, name=name)
def batch_matrix_solve(matrix, rhs, adjoint=None, name=None):
r"""Solves systems of linear equations. Checks for invertibility.
Matrix is a tensor of shape `[..., M, M]` whose inner-most 2 dimensions
form square matrices. Rhs is a tensor of shape
`[..., M, K]`. The output is a tensor shape `[..., M, K]`. If `adjoint` is `False` then each output
matrix satisfies `matrix[..., :, :] * output[..., :, :] = rhs[..., :, :]`.
If `adjoint` is `True` then each output
matrix satisfies `adjoint(matrix[..., :, :]) * output[..., :, :] = rhs[..., :, :]`.
Args:
matrix: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[..., M, M]`.
rhs: A `Tensor`. Must have the same type as `matrix`.
Shape is `[..., M, K]`.
adjoint: An optional `bool`. Defaults to `False`.
Boolean indicating whether to solve with `matrix` or its (block-wise)
adjoint.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `matrix`. Shape is `[..., M, K]`.
"""
return _op_def_lib.apply_op("BatchMatrixSolve", matrix=matrix, rhs=rhs,
adjoint=adjoint, name=name)
def batch_matrix_solve_ls(matrix, rhs, l2_regularizer, fast=None, name=None):
r"""Solves multiple linear least-squares problems.
`matrix` is a tensor of shape `[..., M, N]` whose inner-most 2 dimensions
form square matrices. Rhs is a tensor of shape `[..., M, K]`. The output
is a tensor shape `[..., N, K]` where each output matrix solves each of
the equations matrix[..., :, :] * output[..., :, :] = rhs[..., :, :] in the
least squares sense.
Below we will use the following notation for each pair of
matrix and right-hand sides in the batch:
`matrix`=\\(A \in \Re^{m \times n}\\),
`rhs`=\\(B \in \Re^{m \times k}\\),
`output`=\\(X \in \Re^{n \times k}\\),
`l2_regularizer`=\\(\lambda\\).
If `fast` is `True`, then the solution is computed by solving the normal
equations using Cholesky decomposition. Specifically, if \\(m \ge n\\) then
\\(X = (A^T A + \lambda I)^{-1} A^T B\\), which solves the least-squares
problem \\(X = \mathrm{argmin}_{Z \in \Re^{n \times k}} ||A Z - B||_F^2 +
\lambda ||Z||_F^2\\). If \\(m \lt n\\) then `output` is computed as
\\(X = A^T (A A^T + \lambda I)^{-1} B\\), which (for \\(\lambda = 0\\)) is the
minimum-norm solution to the under-determined linear system, i.e.
\\(X = \mathrm{argmin}_{Z \in \Re^{n \times k}} ||Z||_F^2 \\), subject to
\\(A Z = B\\). Notice that the fast path is only numerically stable when
\\(A\\) is numerically full rank and has a condition number
\\(\mathrm{cond}(A) \lt \frac{1}{\sqrt{\epsilon_{mach}}}\\) or\\(\lambda\\) is
sufficiently large.
If `fast` is `False` an algorithm based on the numerically robust complete
orthogonal decomposition is used. This computes the minimum-norm
least-squares solution, even when \\(A\\) is rank deficient. This path is
typically 6-7 times slower than the fast path. If `fast` is `False` then
`l2_regularizer` is ignored.
Args:
matrix: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[..., M, N]`.
rhs: A `Tensor`. Must have the same type as `matrix`.
Shape is `[..., M, K]`.
l2_regularizer: A `Tensor` of type `float64`.
fast: An optional `bool`. Defaults to `True`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `matrix`. Shape is `[..., N, K]`.
"""
return _op_def_lib.apply_op("BatchMatrixSolveLs", matrix=matrix, rhs=rhs,
l2_regularizer=l2_regularizer, fast=fast,
name=name)
def batch_matrix_triangular_solve(matrix, rhs, lower=None, adjoint=None,
name=None):
r"""Solves systems of linear equations with upper or lower triangular matrices by
backsubstitution.
`matrix` is a tensor of shape `[..., M, M]` whose inner-most 2 dimensions form
square matrices. If `lower` is `True` then the strictly upper triangular part
of each inner-most matrix is assumed to be zero and not accessed.
If `lower` is False then the strictly lower triangular part of each inner-most
matrix is assumed to be zero and not accessed.
`rhs` is a tensor of shape [..., M, K]`.
The output is a tensor of shape `[..., M, K]`. If `adjoint` is `True` then the
innermost matrices in output` satisfy matrix equations
`matrix[..., :, :] * output[..., :, :] = rhs[..., :, :]`.
If `adjoint` is `False` then the strictly then the innermost matrices in
`output` satisfy matrix equations
`adjoint(matrix[..., i, k]) * output[..., k, j] = rhs[..., i, j]`.
Args:
matrix: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[..., M, M]`.
rhs: A `Tensor`. Must have the same type as `matrix`.
Shape is `[..., M, K]`.
lower: An optional `bool`. Defaults to `True`.
Boolean indicating whether the innermost matrices in `matrix` are
lower or upper triangular.
adjoint: An optional `bool`. Defaults to `False`.
Boolean indicating whether to solve with `matrix` or its (block-wise)
adjoint.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `matrix`. Shape is `[..., M, K]`.
"""
return _op_def_lib.apply_op("BatchMatrixTriangularSolve", matrix=matrix,
rhs=rhs, lower=lower, adjoint=adjoint,
name=name)
def batch_self_adjoint_eig(input, name=None):
r"""Calculates the Eigen Decomposition of a batch of square self-adjoint matrices.
The input is a tensor of shape `[..., M, M]` whose inner-most 2 dimensions
form square matrices, with the same constraints as the single matrix
SelfAdjointEig.
The result is a '[..., M+1, M] matrix with [..., 0,:] containing the
eigenvalues, and subsequent [...,1:, :] containing the eigenvectors.
Args:
input: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[..., M, M]`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `input`. Shape is `[..., M+1, M]`.
"""
return _op_def_lib.apply_op("BatchSelfAdjointEig", input=input, name=name)
def cholesky(input, name=None):
r"""Calculates the Cholesky decomposition of a square matrix.
The input has to be symmetric and positive definite. Only the lower-triangular
part of the input will be used for this operation. The upper-triangular part
will not be read.
The result is the lower-triangular matrix of the Cholesky decomposition of the
input.
Args:
input: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[M, M]`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `input`. Shape is `[M, M]`.
"""
return _op_def_lib.apply_op("Cholesky", input=input, name=name)
def cholesky_grad(l, grad, name=None):
r"""Calculates the reverse mode backpropagated gradient of the Cholesky algorithm.
For an explanation see "Differentiation of the Cholesky algorithm" by Iain Murray http://arxiv.org/abs/1602.07527.
Args:
l: A `Tensor`. Must be one of the following types: `float32`, `float64`.
Output of Cholesky algorithm l = chol(A). Shape is `[M, M]`. Algorithm depends only on lower triangular part of this matrix.
grad: A `Tensor`. Must have the same type as `l`.
df/dl where f is some scalar function. Shape is `[M, M]'. Algorithm depends only on lower triangular part of this matrix.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `l`.
Symmetrized version of df/dA . Shape is `[M, M]'
"""
return _op_def_lib.apply_op("CholeskyGrad", l=l, grad=grad, name=name)
def matrix_determinant(input, name=None):
r"""Calculates the determinant of a square matrix.
Args:
input: A `Tensor`. Must be one of the following types: `float32`, `float64`.
A tensor of shape `[M, M]`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `input`.
A scalar, equal to the determinant of the input.
"""
return _op_def_lib.apply_op("MatrixDeterminant", input=input, name=name)
def matrix_inverse(input, adjoint=None, name=None):
r"""Calculates the inverse of a square invertible matrix or its adjoint (conjugate
transpose).
The op uses LU decomposition with partial pivoting to compute the inverse.
If the matrix is not invertible there is no guarantee what the op does. It
may detect the condition and raise an exception or it may simply return a
garbage result.
Args:
input: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[M, M]`.
adjoint: An optional `bool`. Defaults to `False`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `input`.
Shape is `[M, M]`. If `adjoint` is `False` then `output` contains the
matrix inverse of `input`. If `adjoint` is `True` then `output` contains the
matrix inverse of the adjoint of `input`.
"""
return _op_def_lib.apply_op("MatrixInverse", input=input, adjoint=adjoint,
name=name)
def matrix_solve(matrix, rhs, adjoint=None, name=None):
r"""Solves a system of linear equations. Checks for invertibility.
Args:
matrix: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[M, M]`.
rhs: A `Tensor`. Must have the same type as `matrix`. Shape is `[M, K]`.
adjoint: An optional `bool`. Defaults to `False`.
Boolean indicating whether to solve with `matrix` or its adjoint.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `matrix`.
Shape is `[M, K]`. If `adjoint` is `False` then `output` that solves
`matrix` * `output` = `rhs`. If `adjoint` is `True` then `output` that solves
`adjoint(matrix)` * `output` = `rhs`.
"""
return _op_def_lib.apply_op("MatrixSolve", matrix=matrix, rhs=rhs,
adjoint=adjoint, name=name)
def matrix_solve_ls(matrix, rhs, l2_regularizer, fast=None, name=None):
r"""Solves a linear least-squares problem.
Below we will use the following notation
`matrix`=\\(A \in \Re^{m \times n}\\),
`rhs`=\\(B \in \Re^{m \times k}\\),
`output`=\\(X \in \Re^{n \times k}\\),
`l2_regularizer`=\\(\lambda\\).
If `fast` is `True`, then the solution is computed by solving the normal
equations using Cholesky decomposition. Specifically, if \\(m \ge n\\) then
\\(X = (A^T A + \lambda I)^{-1} A^T B\\), which solves the least-squares
problem \\(X = \mathrm{argmin}_{Z \in \Re^{n \times k}} ||A Z - B||_F^2 +
\lambda ||Z||_F^2\\). If \\(m \lt n\\) then `output` is computed as
\\(X = A^T (A A^T + \lambda I)^{-1} B\\),
which (for \\(\lambda = 0\\)) is the minimum-norm solution to the
under-determined linear system, i.e.
\\(X = \mathrm{argmin}_{Z \in \Re^{n \times k}} ||Z||_F^2 \\),
subject to \\(A Z = B\\).
Notice that the fast path is only numerically stable when \\(A\\) is
numerically full rank and has a condition number
\\(\mathrm{cond}(A) \lt \frac{1}{\sqrt{\epsilon_{mach}}}\\)
or \\(\lambda\\) is sufficiently large.
If `fast` is `False` an algorithm based on the numerically robust complete
orthogonal decomposition is used. This computes the minimum-norm
least-squares solution, even when \\(A\\) is rank deficient. This path is
typically 6-7 times slower than the fast path. If `fast` is `False` then
`l2_regularizer` is ignored.
Args:
matrix: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[M, N]`.
rhs: A `Tensor`. Must have the same type as `matrix`. Shape is `[M, K]`.
l2_regularizer: A `Tensor` of type `float64`.
fast: An optional `bool`. Defaults to `True`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `matrix`.
Shape is `[N, K]` containing the tensor that solves
`matrix * output = rhs` in the least-squares sense.
"""
return _op_def_lib.apply_op("MatrixSolveLs", matrix=matrix, rhs=rhs,
l2_regularizer=l2_regularizer, fast=fast,
name=name)
def matrix_triangular_solve(matrix, rhs, lower=None, adjoint=None, name=None):
r"""Solves a system of linear equations with an upper or lower triangular matrix by
backsubstitution.
`matrix` is a matrix of shape `[M, M]`. If `lower` is `True` then the strictly
upper triangular part of `matrix` is assumed to be zero and not accessed.
If `lower` is False then the strictly lower triangular part of `matrix` is
assumed to be zero and not accessed.
`rhs` is a matrix of shape [M, K]`.
The output is a matrix of shape `[M, K]`. If `adjoint` is `False` the output
satisfies the matrix equation `matrix` * `output` = `rhs`.
If `adjoint` is `False` then `output` satisfies the matrix equation
`matrix` * `output` = `rhs`.
If `adjoint` is `True` then `output` satisfies the matrix equation
`adjoint(matrix)` * `output` = `rhs`.
Args:
matrix: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[M, M]`.
rhs: A `Tensor`. Must have the same type as `matrix`. Shape is `[M, K]`.
lower: An optional `bool`. Defaults to `True`.
Boolean indicating whether `matrix` is lower or upper triangular
adjoint: An optional `bool`. Defaults to `False`.
Boolean indicating whether to solve with `matrix` or its adjoint.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `matrix`. Shape is `[M, K]`.
"""
return _op_def_lib.apply_op("MatrixTriangularSolve", matrix=matrix, rhs=rhs,
lower=lower, adjoint=adjoint, name=name)
def self_adjoint_eig(input, name=None):
r"""Calculates the Eigen Decomposition of a square Self-Adjoint matrix.
Only the lower-triangular part of the input will be used in this case. The
upper-triangular part will not be read.
The result is a M+1 x M matrix whose first row is the eigenvalues, and
subsequent rows are eigenvectors.
Args:
input: A `Tensor`. Must be one of the following types: `float64`, `float32`.
Shape is `[M, M]`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `input`. Shape is `[M+1, M]`.
"""
return _op_def_lib.apply_op("SelfAdjointEig", input=input, name=name)
def _InitOpDefLibrary():
op_list = op_def_pb2.OpList()
text_format.Merge(_InitOpDefLibrary.op_list_ascii, op_list)
op_def_registry.register_op_list(op_list)
op_def_lib = op_def_library.OpDefLibrary()
op_def_lib.add_op_list(op_list)
return op_def_lib
_InitOpDefLibrary.op_list_ascii = """op {
name: "BatchCholesky"
input_arg {
name: "input"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
}
op {
name: "BatchMatrixDeterminant"
input_arg {
name: "input"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_FLOAT
type: DT_DOUBLE
}
}
}
}
op {
name: "BatchMatrixInverse"
input_arg {
name: "input"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "adjoint"
type: "bool"
default_value {
b: false
}
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
}
op {
name: "BatchMatrixSolve"
input_arg {
name: "matrix"
type_attr: "T"
}
input_arg {
name: "rhs"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "adjoint"
type: "bool"
default_value {
b: false
}
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
}
op {
name: "BatchMatrixSolveLs"
input_arg {
name: "matrix"
type_attr: "T"
}
input_arg {
name: "rhs"
type_attr: "T"
}
input_arg {
name: "l2_regularizer"
type: DT_DOUBLE
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
attr {
name: "fast"
type: "bool"
default_value {
b: true
}
}
}
op {
name: "BatchMatrixTriangularSolve"
input_arg {
name: "matrix"
type_attr: "T"
}
input_arg {
name: "rhs"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "lower"
type: "bool"
default_value {
b: true
}
}
attr {
name: "adjoint"
type: "bool"
default_value {
b: false
}
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
}
op {
name: "BatchSelfAdjointEig"
input_arg {
name: "input"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
}
op {
name: "Cholesky"
input_arg {
name: "input"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
}
op {
name: "CholeskyGrad"
input_arg {
name: "l"
type_attr: "T"
}
input_arg {
name: "grad"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_FLOAT
type: DT_DOUBLE
}
}
}
}
op {
name: "MatrixDeterminant"
input_arg {
name: "input"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_FLOAT
type: DT_DOUBLE
}
}
}
}
op {
name: "MatrixInverse"
input_arg {
name: "input"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "adjoint"
type: "bool"
default_value {
b: false
}
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
}
op {
name: "MatrixSolve"
input_arg {
name: "matrix"
type_attr: "T"
}
input_arg {
name: "rhs"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "adjoint"
type: "bool"
default_value {
b: false
}
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
}
op {
name: "MatrixSolveLs"
input_arg {
name: "matrix"
type_attr: "T"
}
input_arg {
name: "rhs"
type_attr: "T"
}
input_arg {
name: "l2_regularizer"
type: DT_DOUBLE
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
attr {
name: "fast"
type: "bool"
default_value {
b: true
}
}
}
op {
name: "MatrixTriangularSolve"
input_arg {
name: "matrix"
type_attr: "T"
}
input_arg {
name: "rhs"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "lower"
type: "bool"
default_value {
b: true
}
}
attr {
name: "adjoint"
type: "bool"
default_value {
b: false
}
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
}
op {
name: "SelfAdjointEig"
input_arg {
name: "input"
type_attr: "T"
}
output_arg {
name: "output"
type_attr: "T"
}
attr {
name: "T"
type: "type"
allowed_values {
list {
type: DT_DOUBLE
type: DT_FLOAT
}
}
}
}
"""
_op_def_lib = _InitOpDefLibrary()
| 27.262181
| 130
| 0.617532
| 3,282
| 23,500
| 4.335161
| 0.094455
| 0.027059
| 0.023405
| 0.020101
| 0.849592
| 0.82668
| 0.805032
| 0.77467
| 0.751898
| 0.734467
| 0
| 0.006903
| 0.254128
| 23,500
| 861
| 131
| 27.293844
| 0.804827
| 0.591915
| 0
| 0.596349
| 1
| 0
| 0.660158
| 0.018409
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032454
| false
| 0
| 0.010142
| 0
| 0.075051
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
36cb5d1c4b7bec5e622a27a676bcc79456f406f1
| 271
|
py
|
Python
|
source/assetmgr_nuke/specifications.py
|
IngenuityEngine/ftrack-connect-foundry
|
a0d5ba788e3dc5c1536ebe9740bcf4393e3f5e1d
|
[
"MIT"
] | 1
|
2019-10-22T06:33:08.000Z
|
2019-10-22T06:33:08.000Z
|
source/assetmgr_nuke/specifications.py
|
IngenuityEngine/ftrack-connect-foundry
|
a0d5ba788e3dc5c1536ebe9740bcf4393e3f5e1d
|
[
"MIT"
] | null | null | null |
source/assetmgr_nuke/specifications.py
|
IngenuityEngine/ftrack-connect-foundry
|
a0d5ba788e3dc5c1536ebe9740bcf4393e3f5e1d
|
[
"MIT"
] | null | null | null |
from FnAssetAPI.specifications import FileSpecification
from FnAssetAPI.specifications.Specification import TypedProperty as P
__all__ = ['NukeScriptSpecification',]
## @todo clean up code that uses these
from FnAssetAPI.specifications import NukeScriptSpecification
| 27.1
| 70
| 0.845018
| 27
| 271
| 8.333333
| 0.666667
| 0.186667
| 0.373333
| 0.302222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107011
| 271
| 9
| 71
| 30.111111
| 0.929752
| 0.129151
| 0
| 0
| 0
| 0
| 0.099567
| 0.099567
| 0
| 0
| 0
| 0.111111
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
36cc684054c5a101956b321c2173093b607d6120
| 298
|
py
|
Python
|
pynumdiff/optimize/kalman_smooth/__init__.py
|
luckystarufo/PyNumDiff
|
99ffeb0c118c6de715414af042020bb268941c99
|
[
"MIT"
] | 60
|
2020-09-07T20:39:04.000Z
|
2022-03-31T05:49:56.000Z
|
pynumdiff/optimize/kalman_smooth/__init__.py
|
luckystarufo/PyNumDiff
|
99ffeb0c118c6de715414af042020bb268941c99
|
[
"MIT"
] | 15
|
2020-11-09T03:58:39.000Z
|
2022-03-15T16:35:31.000Z
|
pynumdiff/optimize/kalman_smooth/__init__.py
|
luckystarufo/PyNumDiff
|
99ffeb0c118c6de715414af042020bb268941c99
|
[
"MIT"
] | 8
|
2021-02-12T01:50:14.000Z
|
2022-03-21T22:48:54.000Z
|
"""
import useful functions from __kalman_smooth__
"""
from pynumdiff.optimize.kalman_smooth.__kalman_smooth__ import constant_velocity
from pynumdiff.optimize.kalman_smooth.__kalman_smooth__ import constant_acceleration
from pynumdiff.optimize.kalman_smooth.__kalman_smooth__ import constant_jerk
| 42.571429
| 84
| 0.885906
| 36
| 298
| 6.611111
| 0.333333
| 0.352941
| 0.264706
| 0.340336
| 0.743697
| 0.743697
| 0.743697
| 0.743697
| 0.743697
| 0
| 0
| 0
| 0.063758
| 298
| 6
| 85
| 49.666667
| 0.853047
| 0.154362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
7fe690dececc6e447ca3ca49651f8dab323121e6
| 27,196
|
py
|
Python
|
test/test_introspect.py
|
datakaveri/iudx-auth-server
|
c1ffc770f604e512aab332d89688ebbc6216f387
|
[
"MIT"
] | null | null | null |
test/test_introspect.py
|
datakaveri/iudx-auth-server
|
c1ffc770f604e512aab332d89688ebbc6216f387
|
[
"MIT"
] | 7
|
2020-10-13T12:38:40.000Z
|
2021-02-16T13:17:13.000Z
|
test/test_introspect.py
|
datakaveri/iudx-auth-server
|
c1ffc770f604e512aab332d89688ebbc6216f387
|
[
"MIT"
] | 4
|
2020-09-03T09:29:57.000Z
|
2020-12-21T07:55:10.000Z
|
import os
import pytest
from init import consumer
from init import provider
from init import resource_server
from init import catalogue_server
from init import file_server
from init import expect_failure
# for registration and resetting roles
from access import *
from consent import role_reg
# for setting session ID
from session import *
from expire_token import expire_token
import hashlib
email = "barun@iisc.ac.in"
def rand_rsg():
return ''.join(random.choice(string.ascii_lowercase) for _ in range(10))
@pytest.fixture(scope="session", autouse=True)
def init():
init_provider("arun.babu@rbccps.org") # provider
init_provider("abc.123@iisc.ac.in") # alt_provider
######### session ID setup for provider, alt_provider ###########
r = provider.get_session_id(ALL_SECURE_ENDPOINTS_BODY)
assert r['success'] is True
provider.set_user_session_id(fetch_sessionId("arun.babu@rbccps.org"))
'''
r = alt_provider.get_session_id(ALL_SECURE_ENDPOINTS_BODY)
assert r['success'] is True
alt_provider.set_user_session_id(fetch_sessionId("abc.123@iisc.ac.in"))
'''
# register the consumer
assert reset_role(email) == True
org_id = add_organization("iisc.ac.in")
r = role_reg(email, '9454234223', name , ["consumer", "onboarder", "data ingester"], org_id, csr)
assert r['success'] == True
assert r['status_code'] == 200
def test_empty_token():
r = resource_server.introspect_token(' ')
assert r['success'] is False
assert r['status_code'] == 400
def test_invalid_token():
token = 'auth.iudx.io/xyz.abc@datakaveri.org/e7444fab9a74ffb6da795a69c0eeb3b5/4238265a-611f-41c0-813a-6e16cf8cc228'
r = resource_server.introspect_token(token)
assert r['success'] is False
assert r['status_code'] == 400
token = 'auth.iudx.org.in/xy#)(@datakaveri.org/e7444fab9a74ffb6da795a69c0eeb3b5/4238265a-611f-41c0-813a-6e16cf8cc228'
r = resource_server.introspect_token(token)
assert r['success'] is False
assert r['status_code'] == 400
token = 'auth.iudx.org.in/xyz.abc@datakaveri.org/eAZZfab9a74ffb6da795a69c0eeb3b5/4238265a-611f-41c0-813a-6e16cf8cc228'
r = resource_server.introspect_token(token)
assert r['success'] is False
assert r['status_code'] == 400
token = 'auth.iudx.org.in/xyz.abc@datakaveri.org/e7444fab9a74ffb6da795a69c0eeb3b5/4238265a611f-41c0-813a-6e16cf8cc228'
r = resource_server.introspect_token(token)
assert r['success'] is False
assert r['status_code'] == 400
token = 'auth.iudx.org.in/xyz.abc@datakaveri.org/e7444fab9a74ffb6da795a69c0eeb3b5/4238265a-z11f-41c0-813a-6e16cf8cc228'
r = resource_server.introspect_token(token)
assert r['success'] is False
assert r['status_code'] == 400
def test_valid_token():
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":["complex","subscription","temporal"]
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
response = r['response']
assert response['consumer'] == token.split('/')[1]
assert response['request'][0]['id'] == resource_id + '/*' # since its res group
assert len(response['request'][0]['apis']) > 1
def test_expired_token():
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":["complex","subscription","temporal"]
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
assert len(r['response']['request']) == 1
s = token.split("/")
uuid = s[3]
assert expire_token(uuid) is True
r = resource_server.introspect_token(token)
assert r['success'] is False
assert r['status_code'] == 403
def test_deleted_token():
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":["complex","subscription","temporal"]
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
s = token.split("/")
uuid = s[3]
body = {'tokens':[uuid]}
r = consumer.delete_token(body)
assert r['success'] is True
assert r['status_code'] == 200
r = resource_server.introspect_token(token)
assert r['success'] is False
assert r['status_code'] == 403
def test_token_belonging_diff_server():
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/file.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":["download"]
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is False
assert r['status_code'] == 403
r = file_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
assert len(r['response']['request']) == 1
def test_revoked_rule():
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":["complex","subscription","temporal"]
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
# delete rule
# find access ID and delete it
r = provider.get_provider_access()
assert r['success'] == True
assert r['status_code'] == 200
rules = r['response']
for r in rules:
if resource_id == r['item']['cat_id']:
access_id = r['id']
break
assert access_id != -1
r = provider.delete_rule([{'id':access_id}])
assert r['success'] == True
assert r['status_code'] == 200
r = resource_server.introspect_token(token)
assert r['success'] is False
assert r['status_code'] == 403
def test_onboarder_token():
access_req = {"user_email": email,
"user_role":'onboarder'
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
body = {}
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/catalogue.iudx.io/catalogue/crud"
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = catalogue_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
resp = r['response']
assert len(resp['request']) == 1
assert resp['request'][0]['id'] == resource_id
assert len(resp['request'][0]['apis']) == 0
def test_rs_consumer_caps():
with open('../capabilities.json') as f:
caps = json.load(f)
for cap, apis in caps['rs.iudx.io']['consumer'].items():
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":[cap]
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
apis = [str.replace('{{RESOURCE_GROUP_ID}}',resource_id) for str in apis]
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
resp = r['response']
assert len(resp['request']) == 1
assert resp['request'][0]['id'] == resource_id + '/*'
assert set(resp['request'][0]['apis']) == set(apis)
def test_file_consumer_caps():
with open('../capabilities.json') as f:
caps = json.load(f)
for cap, apis in caps['file.iudx.io']['consumer'].items():
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/file.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":[cap]
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
apis = [str.replace('{{RESOURCE_GROUP_ID}}',resource_id) for str in apis]
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = file_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
resp = r['response']
assert len(resp['request']) == 1
assert resp['request'][0]['id'] == resource_id + '/*'
assert set(resp['request'][0]['apis']) == set(apis)
def test_ingester_rs():
with open('../capabilities.json') as f:
caps = json.load(f)
for cap, apis in caps['rs.iudx.io']['data ingester'].items():
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'data ingester',
"item_id":resource_id,
"item_type":"resourcegroup"
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
resp = r['response']
assert len(resp['request']) == 1
assert resp['request'][0]['id'] == resource_id + '/*'
assert set(resp['request'][0]['apis']) == set(apis)
def test_ingester_file():
with open('../capabilities.json') as f:
caps = json.load(f)
for cap, apis in caps['file.iudx.io']['data ingester'].items():
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/file.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'data ingester',
"item_id":resource_id,
"item_type":"resourcegroup"
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = file_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
resp = r['response']
assert len(resp['request']) == 1
assert resp['request'][0]['id'] == resource_id + '/*'
assert set(resp['request'][0]['apis']) == set(apis)
def test_rs_all_caps():
with open('../capabilities.json') as f:
caps = json.load(f)
all_caps = list(caps['rs.iudx.io']['consumer'].keys())
all_apis = set()
apis = list(caps['rs.iudx.io']['consumer'].values())
for i in apis:
all_apis.update(i)
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":all_caps
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
all_apis = {str.replace('{{RESOURCE_GROUP_ID}}',resource_id) for str in all_apis}
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
resp = r['response']
assert len(resp['request']) == 1
assert resp['request'][0]['id'] == resource_id + '/*'
assert set(resp['request'][0]['apis']) == all_apis
def test_deleted_cap():
with open('../capabilities.json') as f:
caps = json.load(f)
all_caps = list(caps['rs.iudx.io']['consumer'].keys())
all_apis = set()
apis = list(caps['rs.iudx.io']['consumer'].values())
for i in apis:
all_apis.update(i)
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":all_caps
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
resp = r['response']
all_apis = {str.replace('{{RESOURCE_GROUP_ID}}',resource_id) for str in all_apis}
assert len(resp['request']) == 1
assert resp['request'][0]['id'] == resource_id + '/*'
assert set(resp['request'][0]['apis']) == all_apis
# delete subscription capability and then introspect
# find access ID and delete it
access_id = -1
r = provider.get_provider_access()
assert r['success'] == True
assert r['status_code'] == 200
rules = r['response']
for r in rules:
if r['item'] and resource_id == r['item']['cat_id']:
access_id = r['id']
break
assert access_id != -1
r = provider.delete_rule([{'id':access_id, 'capabilities':['subscription']}])
assert r['success'] == True
assert r['status_code'] == 200
subscription_api = caps['rs.iudx.io']['consumer']['subscription'][0]
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
resp = r['response']
assert len(resp['request']) == 1
assert resp['request'][0]['id'] == resource_id + '/*'
assert subscription_api not in set(resp['request'][0]['apis'])
def test_consumer_ingester_same_resource():
with open('../capabilities.json') as f:
caps = json.load(f)
all_caps = list(caps['rs.iudx.io']['consumer'].keys())
all_apis = set()
consumer_apis = list(caps['rs.iudx.io']['consumer'].values())
ingester_apis = list(caps['rs.iudx.io']['data ingester']['default'])
for i in consumer_apis:
all_apis.update(i)
all_apis.update(ingester_apis)
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req_c = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":all_caps
}
access_req_di = {"user_email": email,
"user_role":'data ingester',
"item_id":resource_id,
"item_type":"resourcegroup"
}
r = provider.provider_access([access_req_c, access_req_di])
assert r['success'] == True
assert r['status_code'] == 200
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
check = False
all_apis = {str.replace('{{RESOURCE_GROUP_ID}}',resource_id) for str in all_apis}
assert len(r['response']['request']) == 1
for i in r['response']['request']:
assert i['id'] == resource_id + '/*'
if all_apis == set(i['apis']):
check = True
assert check is True
# token w/ expired rule
def test_expired_rule():
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":["complex","subscription","temporal"]
}
r = provider.provider_access([access_req])
assert r['success'] == True
assert r['status_code'] == 200
body = {}
body['request'] = [resource_id]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
# delete rule
# find access ID and delete it
r = provider.get_provider_access()
assert r['success'] == True
assert r['status_code'] == 200
rules = r['response']
for r in rules:
if r['item'] and resource_id == r['item']['cat_id']:
access_id = r['id']
break
assert access_id != -1
assert expire_rule(access_id) is True
r = resource_server.introspect_token(token)
assert r['success'] is False
assert r['status_code'] == 403
def test_different_items():
resource_id = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id,
"item_type":"resourcegroup",
"capabilities":["complex","subscription","temporal"]
}
r = provider.provider_access([access_req])
body = {}
body['request'] = [resource_id, resource_id + "/item-1", resource_id + "/item-2/item-3"]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
assert len(r['response']['request']) == 3
for i in r['response']['request']:
assert i['id'] in [resource_id + '/*', resource_id + "/item-1", resource_id + "/item-2/item-3"]
def test_different_resources():
resource_id_1 = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
resource_id_2 = "rbccps.org/9cf2c2382cf661fc20a4776345a3be7a143a109c/rs.iudx.io/" + rand_rsg()
access_req_1 = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id_1,
"item_type":"resourcegroup",
"capabilities":["complex","subscription","temporal"]
}
access_req_2 = {"user_email": email,
"user_role":'consumer',
"item_id":resource_id_2,
"item_type":"resourcegroup",
"capabilities":["complex","subscription","temporal"]
}
r = provider.provider_access([access_req_1, access_req_2])
body = {}
body['request'] = [resource_id_1, resource_id_2, resource_id_1 + "/item-1"]
r = consumer.get_token(body)
assert r['success'] is True
assert r['status_code'] == 200
token = r['response']['token']
r = resource_server.introspect_token(token)
assert r['success'] is True
assert r['status_code'] == 200
assert len(r['response']['request']) == 3
for i in r['response']['request']:
assert i['id'] in [resource_id_1 + '/*', resource_id_2 + '/*', resource_id_1 + "/item-1"]
| 40.896241
| 127
| 0.502868
| 2,749
| 27,196
| 4.787559
| 0.064387
| 0.070207
| 0.071271
| 0.08396
| 0.87972
| 0.864296
| 0.852899
| 0.84021
| 0.832763
| 0.822354
| 0
| 0.053152
| 0.371856
| 27,196
| 664
| 128
| 40.957831
| 0.717263
| 0.01298
| 0
| 0.784906
| 0
| 0.009434
| 0.217306
| 0.065406
| 0
| 0
| 0
| 0
| 0.326415
| 1
| 0.037736
| false
| 0
| 0.024528
| 0.001887
| 0.064151
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e9e2b9e0b229db108387b5c215ceee9a47ba4fab
| 3,178
|
py
|
Python
|
tests/test_converter_bad_request.py
|
NHSDigital/fhir-converter
|
02d3650dd328eb6ccd52b2fbabcc2b38e95625fc
|
[
"MIT"
] | null | null | null |
tests/test_converter_bad_request.py
|
NHSDigital/fhir-converter
|
02d3650dd328eb6ccd52b2fbabcc2b38e95625fc
|
[
"MIT"
] | 53
|
2021-09-22T09:34:31.000Z
|
2022-03-28T15:18:09.000Z
|
tests/test_converter_bad_request.py
|
NHSDigital/fhir-converter
|
02d3650dd328eb6ccd52b2fbabcc2b38e95625fc
|
[
"MIT"
] | null | null | null |
import pytest
import requests
from .example_loader import load_example, load_error
class TestConverterBadRequest:
@pytest.fixture()
def url(self, proxy_url: str) -> str:
return f"{proxy_url}/$convert"
@pytest.mark.parametrize("accept", [
# "application/fhir+json; fhirVersion=4.0" -> valid header
"application/fhir+json; fhirVersion=2.0",
"fhir+json; fhirVersion=4.0",
"application/json; fhirVersion=4.0",
"application fhir+json; fhirVersion=4.0",
"application/fhir+json fhirVersion=4.0"
])
def test_converter_invalid_accept_header(self, url, token, accept):
# Given
stu3_payload = load_example("MedicationRequest/stu3.json")
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/fhir+json; fhirVersion=3.0",
"Accept": accept
}
# When
res = requests.post(url, json=stu3_payload, headers=headers)
# Then
assert res.status_code == 400
expected_response = load_error(diagnostics="Accept Header is missing or invalid")
assert res.json() == expected_response
def test_converter_missing_accept_header(self, url, token):
# Given
stu3_payload = load_example("MedicationRequest/stu3.json")
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/fhir+json; fhirVersion=3.0",
}
# When
res = requests.post(url, json=stu3_payload, headers=headers)
assert res.status_code == 400
expected_response = load_error(diagnostics="Accept Header is missing or invalid")
assert res.json() == expected_response
@pytest.mark.parametrize("content_type", [
# "application/fhir+json; fhirVersion=3.0" <-- valid header
"application/fhir+json; fhirVersion=2.0",
"application/json; fhirVersion=3.0",
"application/fhir+json; fhirVersion=5.0"
])
def test_converter_invalid_content_type_header(self, url, token, content_type):
# Given
stu3_payload = load_example("MedicationRequest/stu3.json")
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": content_type,
"Accept": "application/fhir+json; fhirVersion=4.0"
}
# When
res = requests.post(url, json=stu3_payload, headers=headers)
assert res.status_code == 400
expected_response = load_error(diagnostics="Content-Type Header is missing or invalid")
assert res.json() == expected_response
def test_converter_missing_content_type_header(self, url, token):
# Given
stu3_payload = load_example("MedicationRequest/stu3.json")
headers = {
"Authorization": f"Bearer {token}",
"Accept": "application/fhir+json; fhirVersion=4.0"
}
# When
res = requests.post(url, json=stu3_payload, headers=headers)
assert res.status_code == 400
expected_response = load_error(diagnostics="Content-Type Header is missing or invalid")
assert res.json() == expected_response
| 36.113636
| 95
| 0.637193
| 354
| 3,178
| 5.567797
| 0.169492
| 0.106545
| 0.115677
| 0.167428
| 0.863014
| 0.808219
| 0.77169
| 0.752917
| 0.731608
| 0.686961
| 0
| 0.021776
| 0.248584
| 3,178
| 87
| 96
| 36.528736
| 0.803601
| 0.05129
| 0
| 0.590164
| 0
| 0
| 0.297369
| 0.094239
| 0
| 0
| 0
| 0
| 0.131148
| 1
| 0.081967
| false
| 0
| 0.04918
| 0.016393
| 0.163934
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e9e364c5a69decd3ab711e4c1dfade27dd507d1b
| 34,433
|
py
|
Python
|
validitysensor/blobs_9d.py
|
hmatrjp/python-validity
|
5bf6b2bd10444fef1ed1552e71e9a08bf12d1b8b
|
[
"MIT"
] | 510
|
2019-05-19T15:49:40.000Z
|
2022-03-29T19:57:39.000Z
|
validitysensor/blobs_9d.py
|
hmatrjp/python-validity
|
5bf6b2bd10444fef1ed1552e71e9a08bf12d1b8b
|
[
"MIT"
] | 114
|
2020-03-31T20:38:21.000Z
|
2022-03-28T17:16:55.000Z
|
validitysensor/blobs_9d.py
|
hmatrjp/python-validity
|
5bf6b2bd10444fef1ed1552e71e9a08bf12d1b8b
|
[
"MIT"
] | 56
|
2019-07-30T11:25:40.000Z
|
2022-03-18T18:16:24.000Z
|
from .util import unhex
init_hardcoded = unhex('''
06020000014a231406e5542fc6dc3b1aedebe68f55596ad3ca13f6e019994c6f71672fff756fbde0511d09d45978b12ba415b3694a0e763
48c8cfe9dbb9abf86813fc0c67c1005519a6f87360c2fb3e12bd0a9e012b06d9f5c9b44ccc6645b0fbd47afe45c8c874fcb88fbfd18fb7a
9b3241351f256acce689f9586a52b01f8fdcb66cdf3b340b1f9f386d58ca24fdfcdfbcebefb5f3a3c2a08357721040235a20ce1ee2f4f78
56e0d9c27b92cd9b975c86f2c8cab1179868f795da674004b93c15e6ac8aa825a1907f2003cb9e6df096423167b2cabae98c0cd3fd200d1
1c7e0ee1ba5a725f7f2022886f3caa5f68b688ba61bc5cb0190db569efa0a57aa9d76ecdc7440c8920ea02768734221260d083bebb39c17
6d129c01d1a0f1388497171402ba041afd925d71e76ce4905e44fa5fd528759a2c9f12895864b5aa394dc71a4a17161dd82197a10742fa5
f3135c5e78820e36653fa3db535f57c71897242939d7da50f81070ce9ab81c61af6ac29a6c6c4a5df73ffd08542fb540e417939ed117298
051d27736c2faf1c5577a2133b6f60ea7484c692faae2a49c51c8e6f69af77774bad51a9adeea3109d3611d6a437cdc0c356e46a8f9a6d3
054c5519c17c986e54f61f8329006ce184c275984799dbdf5512188fa8ff10aa2ddc25eb697ebdcb156509309ade5d7909a734bf35ec69e
062cb941c2ea4af0958110da93bd2b5f17fc9b1ebdbd8020a3c36f22a68f707226cec716126d6a830219521669bd59fa0e4bd35db6ef0aa
2999d1c0e7acf67e598696cd58cc4bdb1b7c037ee9a085f784c4
''')
init_hardcoded_clean_slate = unhex('''
06020000012c40c9d271378bc0912ef5dced69bd81b7fc16972c7b46e621af54a00e2cc6baca6eb83ea30222dfc6c925262006ae93412ea
cf482f2034ee7b13297474b7e1e91f279cac2ccb7195443e4dd3328cfd292ade073fcc2eaa8f07b77231130ba997f921b9be7b4fb6cc691
0d2976b3e050913b27dbe73afd6e964260b9435ebab5117e71f7cb68464d4b6f8afc7e1a421f671f5854a1d0c8ab93ed3b88b2bc1a42875
e40b15f0e78496ac40e4a4a7fd39a97534ce1866479e0384f0789bbfc2fea0ce982bf7a9df97d60b237edbe1b26c9791043a96b81e435d6
de5971c758d374905df95b0cddabfbf531749ba191f07a6f5e2722852f137a53513a9ec6ab30c3f09aa6ce21b391e55cf81dcda6422011b
f163317a9a4382546141d45f2274bd660103bd3af705f3ed12e493bc4f834d5d7f162e2c3405cf857b00129789a3353bf7fab7796e267e3
062d55660dbbb857911ac8e871c460dd31c56a86a5631475f0f2ee5e9ce2af0faec0931a640ba2394025f29ffeca3a7e99c15a78ce1f1f7
808cedd7601b9b6382d72ca873257d4f6af70e29e22afea15e36e0282b8f0bfc68ffa3417d212b8bbe11bb73b363a19872e6e947d45de30
fbc493ca083a0a4650615d8628606362081ca6df5d67527971d1776ad76a7a28c932f0317b59cb4a82a14b2bcb7b01fb662be1496d24d91
9140ec80068b21a818daa2fb8e05f63edbd4bd5797c74a28b3e7cf81c9045248584977711341fca3f08ba91ff853b62dc24ce4bba4ed57f
47bd458545d805b6bb14fe0cde01440b60bf7be937f6444a8e2a10ed8fa9ddb8604bb95fe411b97112e78dbf5a4a0f004669c93765a9f38
665cb55f5658895c1c06a7aedf694bfb3afa9b8b1dea5ab85c821ac20b0663b95023642fda36ad78e3e00140b966f404f7e55f0b416ea43
b4c74c39900830abc6906a1004bef1b5b7dbbbeb5ec1b22604ac86429b9f56511b746a7124c449b8c9498f49144abc2d64f6a114f1d7f91
aa41249faeef4d838e280cb5d6fc19cfe86c75f
''')
reset_blob = unhex('''
06020000018772bd56dd58d64023e1745f7c253a49b32dd6a02bc32347195b6763bfcc23c9e0beb0c5809e06a5628629f28c4048530a5cd
df6f48391ea0c2cb5a7ffe93ef04c8b4dad584117e65aac085c25062a0f12a8ee432c7ecbb6613c28b743e4a75e382afc6b8037e342d466
7b66a73691edc6b25698c15e78d9d67f7cc56274e99e6b7bb5fba32dd42d74dfa672f414c4a29302b30a202d00a2571d2a884169e82106c
3dce195eb81b62aa7d29481d5d4d5318d8dd2901594752092cdbcd3b5f9f73eac99efb57012305e8aa06e0fced2b0a92150b261c3cb86b4
32db0b6aa7ee398c2cb094de1393e063094dae769acb692cda9c6a6393db82db00b6d8b5b58787529a8e1667e1641498f63621b81f583a7
614edbb40cf5f2ecd2514799dc6a267109a1773dfaf6e75ddeccd6dcc600b9b8697683ab391a5ae00f498b2742cc124d56dd06fab1d3609
3733730e57436a742ab022bc1079cd5a182c665ce7fb84bd33531ff22387da107af7cb0a8eae6340b5b0a9902aa4bb5c67af09d45cf79bf
9fd210be476b3540c8c98de9e9c9c0aa57bf3286806e7cfeef2d2768a60ce06abe57105f054814af2c58d707216cb0a3d57265833103a0c
5476fbfac1e6232854049353f6212a2dd4a86a5afb4d9cceb4d497cc1e1a60b7a29114cd3173d0e53ddb7ff75d6307f3472d0979f275704
43114624902608334c95711d1b98f9f9e1f5100e9633c7edb1821930455c8af061e826d218320bd2fad3452e1fc99d04fbcb4ef0af13cd9
31f507db95608979d44345b34e5d18d1306e6eb4a8e5a6e1d8f5b729ee01ed9fb7b9f8a13fee9098c2302807c7069f7dab0607ad34e7df8
f329dff61d6b0b6579c05d8306b604e1a99d1d4cdb2acc39d46960fdee90a47e77b01f057d60979aac5d5497785acfce5a3f1e6a96f06ad
099f57a729a7e2e7820f657a823f1b7688bb104f9b5297a543ee2d326a13bc8243df4ffeee7156005d648b189169875b8e4197eef5fd832
a20755a03c12a936565896f457dc4a1c90e333e383b233d9a8f8cf0f70c12d779a6095ea92fc6ba9040a6a68edfe9aaa76436a5a3c55bab
afa391934fc4847b0ba17b94dac8f8bcf0587e1a74dd654a78f60fcd5bfc151ca10fdbfc1c7ca26c954380a9406fbd3aececb69b6ed5905
aba3436bd331da878aef62963fe9d3fa481f79719886b201581ebf2cb05742779f18f6375401fd9a24d018f5f7692e78c9066ff26a9dec1
395ae3f7b4359eccf34ff3d91ad7014560e40365815e4846599ea50d15bf3fde561907f1dfc58505f1eafbb86d6a1f1eb354b3857c50ab1
8dcacffea97e5421a5504c05a48dbbe5933aefdf525529bb0a74677778cc6c9eabe0847daa1151902280a6d0ebf266ffcf24294c03f72af
4409541f6ab76f59e9083b3c87064c0ee59279542bc04b210c3d7f484b8fe32403c8bcb7306777d1a79461068d96662d76babe39bc1059c
ae30152d8c65765b96dac42006b0443722b700009941942738e1092559400b9aad99dabb58dde270e7117bc0962fe0c4bdc4948dc2c54cc
e4d0160332120e7edc5f03862aa271bb249200a38ff5afd4cc10e146e8127eea60a21dec67276c29eb51b20ac316378c2d8318cc118d5f2
7853292c65ee1f344447e327651b336a434a62eea2c69929ec0b78411cc1a26934128875155cfd8478ef11fcc987d6323a5574905f14452
107c2ade173c166e981de0aa3f7ed0ce55fdf0bcafa48381b216244225b4f45200cd107e923545cbff5a608bc78f77c9cb56af94ea69294
40237583d5567c3ea45cf788d248c64726db1b0e6278c4aac4d5b4972196b149379834b4fb7d5635b93102ed8bf7a1715c8a5fb1b9b1e06
128c9224157cb99ee1caa6183dc85e4eb12df7a021db9b47607e3bdccdc04c7276fee38c90569d2e429ee3434e950dba0f95c48131eaddb
b578659fdff548c5e87a63cbc1bd7d3804f6ec9d23eea0c4bd6339c7d8e02f87a4cd0666ddeb45735d28c196b056eb43e6a21b4caede9e6
bd05ef701d843a1302ef666b6051f7c995cb145f93a98a14af719ca9c2a47ad0ad04bad312b7fa09fccfaec2806f0be71783cf2f2c8a609
daa40a00a03b8c826d178631084df666f599080e401461676003e9ccbdfcc461074bb7f18c2638715641606bb670d99a95862522317b4e7
bc29dae5af1686fb927a02589bd7376f6c03bbc5d865eaf7bf2eeb89cdd91e3f36ebd4200ead2f2676dcf13917fc6b0b4b7716efcb8936c
a66fc1e971316f20f8dff4f9de2fd5f0e582049e20db3eb041aac0278a5a2d011a7626079338aa2c29ba36cbe75c663c805407e80c334b1
c13ff0a23287738974a9ebe2ae57d3279009bd562949e8975bd24c441839758a18b57c8d3cad25d7d97445ee3d5353717a881e095565a85
a4c54bd38db42403602377b64ad1383fdc55b5782142b79eefde654a7d1a265a046a6eccdd423305cd8b9b31dcac2286228f5df16193317
6af14836938ce0f060b2d994c40ac956f4cb69f0e0f2b30b92bc4ec1e57361b07179561cbdc9d6170d08efc70acd36330d2df87a6c4f85e
2157fd0d9a5fa7b075d13833784d02089abae613ae301b9881dd32ee7fb55477591fa05b1abcae5bc12c445ea1b4bac596d28234879c13f
9dde60b3e6e07abc117dc517b1ef1bf89b33d2d37d37e0381f47f7062ef3d64ef1e0c24ba931f0d00059a144e09e90b15ea6dc90eaf63fe
d5881afcfc950c18ee33d55d863b56b1ac015a81372183e63e797a27c0c3368034120834987391db4706cf3df4b4294ccadb8f89beef332
a0a5c69887a141e0e84f286e96a942e0a97e3c99fa869879af94cc0efe1daceeecd3ef68a32c648f5605b0edb68c4ad926e556ef0620b09
6d0871214eabac748e1496f63c55c565ff5892b83211ec21f03062613760768f15d4f1b18cf871f7bfff34be4c037c055fd2b4cb1eebbf6
2bba0c3cc2756edbb705e809f1ec3376a69e92b87402029c146ca0728b0e9e964b7557831210d1a0eb7ac238ea5e450753b347b8eac961c
ffd7f53a37d1bf2787750a8dd744cd33e66116f5c3ab5ad715306d7fbbc967463a4c85092fe07460c26b434112f3535064d7406ebdbba49
19149eec2eb6628c59c1432b325b79cd8c8b227bdd1a9d364b6e2195a772d0231efd85029a893278a8ee140706e03508fe60ac2b84935c7
753a72dcfb19968c8a597e086f43666945132e0128ff50ecd80e17528975a3b5cd3f401d14645a3b96cf78266d77e686a78578eed858d5e
a71e0db79978815804065ab1b02afd025ff0a0050618b79952be1f72bd404ab46be600f5da3a9e7f21e8960b833686762913979c26c3ddb
47e5aa32a182681b94612f03dc5842937cc2c417d5b9a5d663255fac6e5a8fc4a7daeaf65f8f5a2cb58b8b72df04772add3ece18d65f193
baf8e7d63cf938df5b7bbff273c5fb77bea7a09bc93f908bf774b15ec6647089821a05e4b0784fef1c07fd8eafbc10467298521b7429e14
aefa16fb6cf9385d63d919da2cb4fbe91c369f1e64eeb64857e0412354c8bb62de804d842fa0264c2dee3dda5d6ee26742ea0a7cda41b55
ae19c95d633259fd3e758c7c8a45c370852d7cb8d0513abe1a1887e9dde40562817f46f55186bad6664679c71bf09ad55295dbe2fd72d0e
2f4f48f719077ac06f52ac37765e63ece56a298116c4dbb315dcc04dfdb0e92f183bea0ecc7256818d8cf4939e6bd64816d8062404825cc
0df90210f00f2a8fc19e9ef9f032eb20df619c6c8363d803869bd747b4ff41361392bed560e32872851b7df36cd4cb2658bfa1883947f83
6d347333f5b77b394b1290b13b76cea0cc337f9d5ab21ec91eeac1dea65216b90442bf88158ab40c8b488c5e8342fce21489ef36c5b3928
30f95b8fe6fb0ee3a095abfe8f31a159b0ab8c3b48cc163c996dd75002b3b742289e28afb0854212bd84dc5509a40a2766c10027192f6db
7053b2f460b609b857db7710e3457906496203e53c4bc3b39407e39bfe06015d6d460873a2f1e05c46d2f673086edf2e7b4f5c4982334e0
eae3eaf78f80d5967a5266a7c5d2ebb9c04baffd56ef426f7bd4dc91b6f1e1c273e00f2d261e86ca919ca1f48b86a7a84e9612096a2457a
6ab1206245729e76676d99d7facd57b9277dcd4b70f0e6826dcbb8747b63f5d5c04158433a70a441875328719123e27da511521056fd7a8
90a9947f177bb13edce7ed55376f2bf13fa8cf09b27a43ccbe071dcc97c385db7e44a4b15de14ca0012e6c3a8e41306a4b6fb58adec7089
9cc19138d7fdcf8db684459f6f5a24f7415872da5b8f01c80a75a8b1708ccafc2efc224250b462647849caae820c37ef69fadb4134c002a
a9365a4df9bab1ee34b2fae1ef4fa6ed1562ec0a7784cfbc3ff129c7ab02fd316f2268a5caac67ed371aa2d9f8f63185e87f9db5ee427b5
6078b27eed7a36f48351729f7008a13f29e9ebf5bcaf815563ce76add94a547d96e638621afc743465b49c0091750b2e518ca398b77bc6b
b44d6d0b95019fef04fb2b0c61f9b85a353a15e54452d9307513e40cad6d22295a32dac6a44fd3e5149fc791c50a6403aa5d7f649ae8577
057cef9eed7cd2889cbcfca443c740575bfe39ad245e61214c67dbf6f3af05ef65aba392955f613702d6eacd7421797a5f87af8812b55bd
7305ff01075ac14d9ffc9ceafd445a875685642470113307c93446dbfee27db08dcc7f822d32bdbe46949eba44b900ac6b72f7dc961d2af
fca0c6b73bdc099183a7124652227fa06629f35dda82f6e6c248f554c3abb4822f5c55c9aa492c527de24783e26a14416b4120dd27aface
9cb3c516a1fc2c0c48006c789129e6459e276a7f54df3a4812b788b275e541d946f48ccf04ae7190b0f10bc403ae6a3c58b1ec4da885d5a
4e6af7df1d0020814bd095b499bf34acae57a2882bc1ea932730d93c82545e0708ec67e46544034171420ebdd36c44db41b96dcfe5f13e2
52fdb347b6c1257c7af8b2416ba25ab09bfe6f606a1cb4e8a7c5179e9b11c10595e16e378bfaa8647cb29c76d9947c4f38db6f8d28d5e98
189219724430256233350cf66972f4ee96b79f9e76f7f892f552e87a747f8df4bbbfd76498e23dba7ac2bd122130d9fb0642327c0feed93
b3dad9f9f6c6175ab6bfd80936906c061899b8e7f6cdcfec0d6f6ad5d8e9dc0c325b471d5714bbb21600f38642076aeac17ff86e29349ff
7e653ee4abf8a2b314d7799ae6f7773f6e0566c7f9f600f9ac173641862c16815e8175c4f46182706b4a72ee3f68fba749f4ba84397dcfc
36221e6cf77e5e381897b2a5723d10d603d24bcc734ba7725d4425bd09066a7e18e41e53375abd8e0f6fa5335ffeb93cf805c1cda2daa9b
e119f64800f310ad1f2bebe1f6e5e3730417fb3804218a4535b7add147a3deded57036251b2b03d83c297fb0c3341d48e512616c61b5bd3
a63faa55a624acec36ef1e377bef570663e69b7d908df6450a1506e8033d9d8033fca549213c2935acacffdbf16af79a829d5d0b28ef473
96515eb7b0fcf574e63d89c021f24040713f78a38b38b4e8f3491b517a6fa0a4068c185dbbe7d1e9c52c0e8f0bb7ecbc912ea3cebcdb080
02460c402fcfcb4ccb0ed77f7ccbac010467b885f04a0e9646388b9b8a0961416b096af9644edb9aa120dad144cec504c139d6063e0ab40
95c3008a3d46bc4a89bed935920e6091380d1e24821629534d46a3a834481107d6565729531955dd13cb299a7f83d43cb79340a772a7428
09abe7b2ee4ded9ed9c6c316932f0c7af782416f6ff5034d52cb9d5b9e30ac80cf88ea4b2edbfbea4d717594633acefa19be4fd84ae9c64
1c0c404458144b88b76c2480bcd47da397a7acd60121ea30793f56a2dde1b76b50fb3b2bd7cd917b41fcdf75a3886c5c5f1f9fc5abb51a6
dfa95091b4c8b30a0e7a09e5f6f64330b3e2cac2e5cdfd89376a1e46c2dc6a14ff726f76d610a8c42b714ab1ee7e5323b0632f4389e3ddb
1ea48a203aff0aee98388c7313fdd84ec448d1480ef9538fbcdbf407ae7bbc4078ba490015bd804f8feef8e417be8d61e239a79f237a448
a2bb70c1e70e8682c9d031cea28d16b12a190a88624148a9356ac5f5b85db0c709606eb5e65e334f5c320a667bc2fc5b134096ca1a74c71
e5ecb1b3a94238d7fa4154af8f467f91c5fa79c97944c85b6cd5956e0f346d08d443cbc4497290c40275a4a90707615e75ae90fad773d6e
5a6b1fd2827614155a8538871936647a5ddddaec8c720f27d7355128ef46d2712f019af6e19b4859441bc8b11a729559b9a13afebd93435
777d18fb01fd98923d4e433c0966e13c72c90a0becf585b6a1ac00f6b8984baf86cf3395425d3c3dd302067945789160d94009e5e9a1367
b75e9cd719d3bfa0474f1179e0ca6f4b5df714520bc4528fab43aaca37862e83bcca4a91ce51958e6b75edb3221a879f382c06ac195e694
c867c2b91c6b680598ce9d284522dab5e24d530859993230aba0c7f7509f6bb5ba8033d04bb808cbdae641a974d6d6e4756ce822b8115a7
c0cbbd394412a143c4886237ebba6399d5445954d99c27f3b956b31b0e5da3366cefeed07200287e083feb46980868b93c951e182378c66
0987bb3fc4857dcbaf20129ad7cb50459647464af9403bda41033a0fffc48240542d966897231c2483247c928644ccc7f5b3b83f5812b90
9a61d07abbef1cfcafde5e778cceb5181b65a94d4e8a47a2e7f64002c02edbeb6b9b2c2c764121b755d5b28a33a465fbf99339632ef29b5
124147d9577d354b468926174edfabbc9ae2c994582f63ea179ca92ec49a88a02678846ee1e929993fe24c2c182dd8478726b9a0333074f
5f969cc0786f9d08658cdd1c86c165c4caa7d71abc35a7518d022a06a8593fc70728ad3ce86b3791c7b6f708ef67801f65acbdcddfe4097
c3bc556ac769eba132ed39b7ff1d1b8973dba0aa07d38d613eea23583251d8dd643040d1e23e85d80a7881777229a8b667c9b885ccb36b2
107e1bea2527852a5c6c8e9b0ec08e6418e204b135ee499430030d7dc292416e8a4db0051ffd4abc2afb3407da84853cd3b6c03e9e6e8fe
cafc8b6cf086852ac0325cd30bbcc0a1e6ed76ba11678ecfead8b328e8e7a4b6ab11ea9849c4a797c1505e0d6927f0d490e2a6f23ad8ca7
c39bc70c47bd69520fbc0f4cafe6ce2841ff145ca372582b3d4b5b65022d268b50b22331acc749322b6d95a341d0de2a19d45029114ed82
999d71db0af22730ef4088b3ee49105a72d826276fabcf54a6f22c2e2f8d7e64a1a6f472eb00926503b456f20e503904cb17bfb2a5333eb
b94068dcc9a43717ae994d19c9159a3c913565d0570ce6a4f06aba86cd7a73658da93ba16181a4c3723bd9f013e7f87b7b939ab144a7832
91669b2264b5cdfe9aa350f37539f63a43a88620c40d6855c6868c5574072d99f05bdcdb77c5ec08894da6fd2d97ebecebaaacd67ecd426
c262320ae2d6ab5bfe16f419fcd9fc4cace7cf2d6d929eacf96532b47cdc9c033085d820f051fe4bda9617afd2f7de60c6ead253815d50e
e3d990f84676ed77e35e1afeebd575bb265c4425bae8338b5d8b487a546e2931706f17872a452d41bbe27488cc8cc9bbf0ec9b925398128
732f1e0ebd23478990bdb318a4aa4cfcb48e2bc8b80ea9d1f2e66b50beb048b4df661e75348d348229798426b2ab9a74e5e748e1e6b1e15
ce3d9a94cb9672e00b3221d2009a096b4aef8e665b239486a5e6129a679b274ffc15de7def66417265356c69e258874698356c7f623a8cd
c3fb390b75cd06dcaf085198020d7c1fb10be60fadb58268a6be902069dea0a0508434f5b6a8b4ea39dabb51957095af0e12e9a354d4448
c1000408f74ec7fd2e62cb00e2f5869ddc5cc9bc8a7ab717eb28ea1784692fede6ca9387497632c27bea3317623d17447d8632503014460
4754e471643d39c649a33933286d970efdc4fab1706498e675ac2e9d8656fed087151d9b9169bc1a90b7c95d7c736dd6f497ed15bacbfe0
9ea3680060a9ce4c6ddf834c9dab4896c371624390b8d7f08be51052eecb66dc1dab15fc6755f6e05d3ffef0d5adb01679cd7d9b0e22b1a
af62191ac6e569520325f8dbef37739ebc8150e5246ad7e38774731ce1f0f2e64c00b7a2b7a331a4576e364f6db0596bc1b31e6c4a061c4
8e85f18b00e256cbbe136637eaf5d56b523212875cb3d74f74ea54a213a5e10098a1973adbdd34d66003b31d685d0d0218e648ae33b8c76
c8ee454adc9d9589d4ef6ebe551fc3fb266225d9ca33dd11fabc977a43f0f9900f5ba841fcd4370f64f11ef5c42c041c9a4cd7d6153172b
3b9b3939057d115a577bb686278df0f5dc95df07aafd7da89789606534df4a7a37271b901309ee5e4fe79d2f87627fc73ce9e1965a13d27
579e8c6bd91f7e8019cfcbe40a2e7797453734442010a9afbaef3a3545641b6fba07385faf45b84402d166f7dd36f2be06473ff2eaeb873
c4e96ca9dec80ef2398970c643ab70d1f4e89db432128bce7547ea3b3a6c4d26c2e11e460c1d44880f799630e3b9db9e7ffae65deec50c2
8f7e2d9d8b2b810e3c4d8673c4f21db9af66cd69833b697e907b6c885ee9bace76ffb265694f3ff011ee3b8f60e19727e33ff30177367df
880c6ae2b5c35d4fc60eaa93864e2003c018cfbda08189e42f7831774c00a192abeb81fe921d66be0ac3db3814fb44770d6b10e475758b7
730a50c8f1b34b359c3828e0ac4adc39d6a2a21912b3e96f0e43e16bae7cd553d8a2ffc95e9421aeb75a274250b5856304179dda05de11e
9825e1bc54942b312269a7f944e3384bbc09657241c0c785358c145b5da2c249c9f51fa5e7f0eeb148bd567ad514675b64f73199627af2f
1bf7ecf45129987442b71f818ea760530553102dafede00f4b5218c83b7d4ac0f535b98bd76b084f271aefd510f093bc5b1fa49dab3b747
3d2d1aaa5a87dc1de18b9f04f8efc8e825b02e0b02870b5e957889e91f65d8d17a40c85ed30f4e256b248cda865b687d3886a5ac7f3c7f4
05bad5366a6f7ae635614df3cb20d1941574aa7033a8313cdaa4029112d670e71377092e70acc38e368f1616596c0220ca96c402f0b0c0d
ccd5b01dbe26e9ea41aad811b0d28e3aa02b45d72e7fa43e690708ba9503a1af4e1b6bf29951571f384f922737d7b9dadcd3da40550ac5c
ad2424da37382b7cef2529dde9340ae9fa26d5c9ed2c4c43c0f44d20b481f40515a9f8afacb83041528564a105024b49708ed701f23c900
8e444362bd2707f16c60096ab912a440658446b19f391a1a700bc1d2fa04716ab2db2e80e9cd41b604cd686f56ddb820236cfc08db0755b
4f123693bf0912d0bd79c399b2d303d75ab5de2d776ab363442554658ff105245dfae18677379f28d6053aa166f3785a47f386299f57ff9
4017e5136d6fd711ff5db82d20def1835798b1a533a43c21451e15ce063b0baeb29e2e7279d818ff6e8c339cd7daa24537b5aba6b764c4a
0dc826681d370cba556b9c0436d5bd8f4d5815cb4c29ac91391e1c35dc138445eb75072704ccc0bd5653dd4f02077e7f8743385d8f3cb68
989813943946c1a8d3745931473edb9bf06c45985de74fc33120d7eb641b6ad7171e4e92a66f996a52eff9cf7a76ef7f799cc30123372d4
3dbf46fdd64b3f69e402a210e4be1369f5fcccf8a207e50a47713995864d47a710d050d3bd664c2a8966ebe311eb604727f79380127d883
69ef5610867f586f5061f481f3743d6a2c9cad173ad8c1013fc136f24b36604df60aa264d7bebe5c798c1c1628f597fa2c9dcb6b37d816e
c3f543aa910ffcf20460f927ede990e0964f9d81bb13ca8bc4bc092aa24bb0458a34dcdacb625fbf4d03ecea9fac1e3f61be208d540726b
1794ab2e689fe99530e18e61359d87e1ef25534585088b5b05d7d6a776c2b3d1a5853e22c8c118489a72dbea10f24b7cc70ee7476f80aec
62f2760b99c9d83186cd863007c0b4c52a93dea6aaf76bfea9f8994890dd1e45b26bc3826fd069d2747f7adbfdb7b666cd940c917c8e52a
bd5a793e625ad28340323fa8ab8a7437797e947a77d7dcf5ee5a7cfb939de9b3b867ebd162ba16aa2dabba95ae7ee4c2f9570e558fd7796
297ccff9896145fba3fc83e68b75fa56dcc5d793e1e3400487fba3617a591bce456e11b9e9435a2c321ff93ebbf889eccc02a415f78335e
462a4e91b7d8a2319a0e3dc3fa2e513ef7694844bb26bf576db382d98109fcc233a806aa080fc660f90523c3924b6cd5a35133d3925aaa0
97fd2c06a5cb1a3309e81bae2dd52cb14d6a2d14af77cceeba07cc31a839b0c5d821f8ed1fb8cb3b308b86cffc72632d126c66ba473831a
2cbc6cfca70c111e3f4651a74eb9902e153de26a3aa426963ab630e47350f1c307a56da061ffec19e2b77a4b4338b11d9d6381c69c70682
091bf075897d812ad71e30f1b6512ff48113ab54be22e3466c68740df7c6cfdd1b2115d416d55a379d9b665086203e84701fcb428ce41b9
5b15caa4ff3d847c0ac4ee2f279bf7182d4943eb19e9d395d9b5e4d169b862b4fba45a82e3336a3c7af85feecde8fbda1ed5c829112207a
d7d77b451629ad188167ad09a1d9c4241002a451ac2804bffa6b833a19e62a3cec35b9f03bb4f850697ee86e75f9b5692da99503415820a
3297272189d4665df347bd3795a86c27739c6b03ac2de394e246a5f34cfe736b36457d14dcfd86f7a11c8b6739b6fb4e0f50c1a560ce621
9b073e2adbe80e47e20e54e52b72e2c2be7121285ccdb3426e1c092ca855035904d8ab66de6b3360ecac584ad910ef660d8d653ada772e6
2eb44d21868b3fe389868ce479044148aa3f81fd8f9c1d71ecedffcb6bb346ffcafcbe716af8928890b6832d29fe264353d7e8f53cf89ac
8a2c4dd9413ac460cf7500484edd1f939fadc85a59429a76f937e783e80696927bda906d00aa2ffef68799aac2b7164dc2e3c96ad2fe20a
d404255e4effd5e59ed9011b231e6f744b5c4e63d878356fc087e306ab5cc6772a582fe24e1b927b00b6f416187cc1b126286a3bbc4f5a4
33fca109725b2a9aa7f9b285dcb3ebc64e8dda311547c67eb029de40cf5168beabd1d17593056622a1e2c918add4987bc889debb5d1aa5c
91b5710511171de5d2de00658cc19605ed201a31c753d80b99b79c9a9f63bcabd9cd5e55ef5dcea3b733d696b24e3fa0bf0384b9d4a383b
7e39cb90bb9454595a31ea3958f0a43300d962e46796714bfe87ad58ff5de5b892e04ec0a8f32a739a8dc78f0b36dc17fc5fc7ee965c269
427292ffaff8e866094ebc62af4c37655467a1fee3b69b440817fee17f43af0b240b1d9d9539a5f529a571b6ec7043f408c7c752ea77014
bbbd95ff72c837d03761ab85239a48e39a8b1a4cd28a46c147fbc99cd5bfe105e8e5da35c2059930fe805d24845fbf21e2ac1fe3d5ecaf3
5b77a77d88bdbd2ec5e782d17228dc396a4a123cb0f6db56ba62618f8df73781f65e356031f91f31a5b94095394635b03dd3dc56b07e643
5c4ca2ed4b586a24a340ebb362071198cccbf4b902292db7708655578d3d1c5354a8610215d88480db926537ae1687d6156280b47c332b7
8c0f6d68008d48cfa2dbf8914914b01d69e3b6ffd5a6b22a2cdcaab2ae214dc58a39f15c857d53510c1889e4fe6ddb7d0fd30a7518dc6d6
c3ba2dd28864f405ba135571ea57e822ca5a2557c225e787b7033d41035dd6da69342e250c607c2bd0d4c7766a2f20bc71a8ab11e0897d9
99a8333e61cc86920a449ecfd50102c07d61eddd82fcdb5c57195c3e27f7fbd33fd901ca86e26f94ffc8bc8375449c3f3ecfba05f2041b8
a8f15ce898c4a66411a2a1e3ff7b5030f2a1214799d28b8dc9847da701d025766cbae248a355b60992d6961ab01fa2154b72763b24303df
a21d5f12b985c9211b0ef563f24789e497cd63ae77529bd62516b8fbfd263594ab16c7b8f7facc9f53c96afdb9637e9bd0eba01ea810e7d
2ec5e71f2911e5b25a85ec897bb249b4228dafe2246b459bb3b1ba82d4cd05deed5bb63b2cde0dd8132857da4048e53b8556bb91c6ed154
15894d02ee47b20d0fdc678b8864f00e692061003e7ff75120d9356c8f7ead19c664c353df64ef3be5d93e06f2dc12708163e153daac384
f4f917b6250352bff8533ef043e1d73c0ffaf4883f92795233f23cdab41ecb3f33a8a9433e464b084e17764d91dd4bc83bce1ffff994122
86bcbd43e50082c913bd2911f455214ca86e18ea8694ba7936ee3d52713493f730c3af7581db3df5635fbab0edb0b349e4577e8ba39fea3
82314ebbb02ac17c9b1633f6660e2c421193a46ba8c5df9726634be7072896fe35e8d17c8dffd892b6173725fcd4f87e63b9efa571f5c67
f4b8c46bd8b310fa8a4341d67b6aadca391c9dfff5c8ca69e539149da6feb232cc75d7b74d6fec0c584e7308f6d1674f83b3b1460d3eb60
89adb3d7e2644ebbd8c04b8b6b8a8e511b707c6e0bc0ccdffff3039fede5309eef44b1be5d5729a1f39d250e6969e9e5f53811996b77cdd
e372ec31cb86046195a704bc50c8293d778d20d783dd18e4354c2d196f3ac5cd4286e09f67e47a4e8f6027d558784267692fd8912c51593
af17397cc41a203177611e4994d1c20a4fa5d4918ee47f88c700d52862c246dfef25bb728ca52d20256c16ae30cb48da14a584aa5a68f4f
fcad4e9b30f306698bb1e39c1a1cd04639a25d81f6e2f9cc42dae50e2dc56e2963a12e3e9de0fb15371e689767f83561b5310d732fee963
4234575a14be0a1cd246a7bb311b129050a9f7159518571e5d718f4559fab261f5bff901ae5f26cea861dbe237de9a4f4a2b77d0b9939d1
24a5fbe1b0ac293644fd52d546d93e7d48d493688c798d829d03bd27104a05733449a426b779e2a69c5d7d352bb71f53f7f01c149ee006f
54e9d5f83bba2469d32da710b4557a20622e9196958b8e4d05a86a23c9899544ae63a51b51853bfb1bba2ef68cdea75c26a5cb0fd960851
c5e98fee2004db3a16d72ce9162b980d73b716097d7bdf31a84cd95530fa32dae0575325c32d8debfccfe15a8b6b4e20699e8eea861b9e0
304da2a5adbe11e7dcdbc14d85254859a61008c0a5bd30e4947376c1f785256186362885b06341f88c449b9d01ed813e8d14e6eec21a6af
c7e184980cddd38e1de762cf926edb8c9a7303b138094ecfda7c69a32c2f545c13c71a153a75d1cd910d28c85ba3f55cb8ed5f18325f40a
fdacc38046a0f5fc2d6a2148c3c8e745931903799f8c8cd6d0bbecd0532de6a7ee02ed33cbd62f454d323def4fcda5fa26e40ffb475f648
a82d404e713a595f76305dde2f767dd523880c2eb262bb3bbaaf246c867f9be3254f1a551badc2614a798680de0194c80778412db8b23f1
ec5311c448964e3de8e73b4394484fcb947b6bea3e7bf450503693eb4ebf3ec9a98fb2e6b75eeeb31aac5aa3d76d673f118f0027fdb7f32
a5b443ddca9f9b51734975e76754f6920688e0fbc57053c35a422ba6dc1c4957381fd7a869cb1e4530f8ce2b3bfd03683f7c4203f62e87f
60b42640d292f0525bd7ba6855a05a601b9c035a0411e6ce4bc151a69649fb205445cec3ab72baafe8cc7e352171f3b09ad39d2e9d01fdf
165a820b9bc855d6ed662760be3cefd10e4f759159c6585620009392760b52fe338df1ee124b841be30807513a86e43eccf43e6093e2377
2d7ffba037de8137ecb787ccdb4ef5cd77982ba2068d106078625450e6699dc816b787a4a2d928001d45971fd541fcdae5d5b149003bc14
c765fc0af7610e4546a2fb2448cabfcdedffaade35f37555d2e81ddeedd1aeffaf6406b4210afb99262a7013a3e906ce1fa108ab9c1112c
29767c04c7f75b085d5557954012076911f26a9d534a81fbdb42d8f91aa4eff22d650b0c7ec622c7d747656a71abf7a2b344ecb455a6295
8f2d81065b624785bd53b35d4bf5968110149ecf44badcc730c803cec835c27879ab7d4d2e7857bb48f81411eeed56414d37e957c1fe7a9
697837d7f68338dd4e7eb89c4cfe2e68043707a9b8fc9684ba11022f4d25589cccc8e0a57405a05677ec19197a57fee305351f32b3430b9
a6c3456c4ee3e20d71e212a67d2a8c8c90b545e02659882bf76f3afd2b7a06bc6f704bb6c13ab51a1a15cc10c7803f659cd793e14fff373
6183f441c8d572cc507d3a4aa4a484d3adf6d0e6321f67b1210441beb5402a711976a84d99244d03014fcc78e738b7fb2a25168dc88c3be
8cd809778b5ea8cf583e30868d54de93a5c4f7d77ce1c74152bb026038b6731f51ae7336e96acf73667c948c6cee32894821936bd329f0d
82c2b529e816e3cfd3dae745a69afdeaf24b1c3ee79cfc119c896472b6caf6582ba68db0062ffe0b01820b88856174242c44101b23f906f
c03be1c3db3b3cde1bf6eaaad8b02fac4cb477e93012d73d4cb318a77681a066d9dabc4aaf1d68c9aa96f613f9b1b8963880c7af3724df0
8ffcdb717d7fbede993e73b90b0684b05f82c8d79b6bbece07439c75ca3bc5cb523e93522950285c53087e5045294c89358de4b2b9de18e
b21486ad4acc5a1d1e5908175a1308ffc9e020c03b589a95ed6a66b7b3910ceaf367e368cb25c293112f45d04f489f5687f3a9876cc8063
1bda2c902f94fea1c465043e699ab7a99b8d03dbf23bd5eb8f6132c74465ff34ff070cf10b99e72909d8a0367bdeabf9867562ddf4392ae
a4fe1971b6bca3e1631d27b8b1ee3f50dd9f6f7722f34e1f1abb9574746b138826f100523b68c151e4037754317e63d93cf061de2d314fb
abe5aa19b5dc1347a8e2415099f0e7e3e242abe53c020ab02fab99e3ba821dc13edf9f3aece0a024a46d9157c2884e51884b9eef2fdd612
22f35f5c8d7ce3ea06dec31b9353826c0c6fce497670f644439072dfb59de45889ac5f410a385c0ce193b376d61777ba85f56cc0df5e088
898a2811ea2f52046f4e971f472511c8510b5340d9fe76e5d14fc897b9cd9e67d7063e400688c26758deeba47fd9bdf844839527ede9c36
3ba1b47e98506475833e6bedb5f6b7a5eaef6b815452d605e7f1b7d0b9c06d0285d86ea00e95609d11f4aebe41ed5fd19cbd555bcf43a8c
24a9cd747d532c409ed611415885f743c0b70e737e25d44c1f0ecbde8b346c21e4c4872bf4bf73ab6da3523958c0c611f4a0eace22e5243
1ac0977ce345aedfa88cc2ae1a403f77b4256aaefc650eb4f749e37777d81897c7dd01bf405018d7b706b88764d2ef6f99964e9bfab029c
0cee623b94fe06eefdceaf927f22e532e004cf1c2b82d9b66d50fb123931d1e01b9412d8599f4b8b9d91f371accc4c2374afc9b8a2dbe81
3473be99905565bb52784e79dc13ed78edaeb8bd4d8e27a824327c4a07d93967d18be2066e045abcc581948807e78a6ed447e7b1aeb79eb
1cbaf15c82469bd10c876cc19e74b29dd1922fba4d74f2fef987c5cea3c016fbb6e5ef289a4d7bd146476af6f88d8c9220b2212f4481d53
f3df0f40596ab6550ecad9ec7be3481613ae385d078d074d432da1a440b62f3fe60c46aa3947b22344bf648db6a545110352bf255cdd7fe
d7ee0b70b01630792e6fa50fe146152a5bb4502f08c8f3bbbf16d5ecece1d66c04f553b84700839138304f42eed49e2cb3179bbc4542a43
9d3079a917d93103eb74479463e6741d8ac184da156821dd81ae5819950feda921b5a018898fa6574fa4f59e825062f68d6c0989ddc4119
6ebbdaea7908cdac7aa12881951f1c945e9f5dddfbf6c7223b494272836f4089250fdbe42cfe216c7d70188c3db8a7351ef579167123a3a
461f9bad886e6dc7694b64a754dd091cc8777a74442f282ad59c1690f24ae20880ab2cebe1bdca36fd24a02b04052b32c94f056cf819555
36b1e6b6422ddfeac48ac4464c1220186093be0dfc6d29faf6154b25e72fad0dad90a29a5148ad0b59ef546a0540312d17346cce86d18a1
f41f78ac9e0b9197d6819b02632a1718c23b34fdb06c2e5e0f287006093ad77471782eb7fd43f60008ba604023719626e4a8eb60b73b0fc
ca105fa6fcff6f3ace245e6730c089891c40f4c02cb691fedff32a8f18636ef1354781f4a44ff38ec2767436356f5d626c8365516dd25b8
9c51345b9b55a0e80fd167f23ec21d3b415729003a979c316d02e3b0f039574fa3505c24bca5e7884a267315ffae85a78a4e55f801ba495
c2291dffb2feaa0b70ceaec801bba18fb38f7bebdb5daca99a808c80824027e6b9647a1cd02a162f3129db3c38b6ff02c88ca94c55a0c49
856102182a3cb26eff54fef90fa0f53fbb350a723bd2d72297770b67c75d3e01ecfd1f4612027e5a8d42de782b9b04663a495949918831f
7b200905028cb4e5322722cf56b1531269e5bf518a1d5cde1cf1a91072f537aad0f58b3ae4734a2e0504d2ba14084addfe06fdc6ca33e7f
9be0564be136fbc616f134982000047f585e3bbdee15c34f02619a04f1eb0d320fcb93890475711a4761cb1e67198b852b7b605037340ae
ca68ad6c1b6d81ee24b622e922ff6c16faf88ff75728ae344d1a50bd51f26c9cad3b5608b33c858e57feaf3ff882f4a40c7fe51a14dabe8
9f0f40253ecb7bd1d4d5291ed3c1b88b1da932a487d22b14181ffb770911bbebcfbf19a9843bffe3209785b02b23fe3a6176bf208f2ee25
f20332657d2ca6728b9865a6bfdc9a9055a6ab86cb782307c2c0255525884bf8060f8aed7ec034f3ea0489f0f1d61cdcbea1ff69835df7c
41b6a448c2e8274c99e9caa0339c052fbcbf6648c2780f119607c6355b8c37e83a11858b3f296815f2c60da62c68358653db2fbc738d4a9
20f8bb8d94784228934870046f60ac937af4957c9414bcc33895fcc183062be4e4bff56f9c89838ead8e433dc604536938
''')
db_write_enable = unhex('''
0602000001f48001074892b6c57deb7889b5ebf86bc3040f6d91ff1f68765f046591184be08cf36c154b7ec5368139d0f9532382214379a
ff3ffbfe4659e2f274e864bd0ad660f99e21da2bab677dbfa907a66ce110c180d2ddc5dfe40b8ed975cbedffc11631f12f8bd646a0ee82d
44d2a6c1ec9cfbd40f485cb3d9124376b97b4a3349b0a730adda626d8ac28ec20e886aab1b8851deee3431c4d89c8bb3e787eaa9c0323df
e583d5424d36436e4435043e04fd4ea46b1fb2507ca6f0eb03baf27c84b819cbc96cec31a78045eb648339e2aa4789e7672d933936005f4
720c8ffdc1ea23a4f30a1cdc8f6e87775c241b9ab1566f7771857cc4703d571f1106c526f95232925a6a93ec8e919022fbe303a515f9aaa
8ca215072069311dd3f97d9a4f56259bab3a1b7a8582d6dc2f92d49f023d6f25a05837e1536a633e252ef645225f4293955041a0d54dcb1
d1dd7e097b7839de5fde2a6ce999966d712b4cb2fd9d7830031da55d9faa99f866fbb7e520566efba43c2509286bf28e1a20c6a836db8a1
fa4cb9b8d193780aab592d41653839670129066ac56f1268e6f761337f768555e13c5d68137c60f83dba8dc3863e00e73fd3af21e23a566
daa67f3f14dd934e323651167021ca6b82a6103cb30be849446e2f54dde64a05377052b57332e9bf08a18cf52da2a13ebbd55e60333f8bc
319e1457f38ec5d4839ec0ecd034825bdeaf64926858c6e8c2df418717b5f67135abc038835d3e4e1aa809546fd0d7f01066a71537f96bd
1ecec3687583e1b511bf48c2776f4670158e56164c62da20f671764c785c352fc3cce22cefa20760acff8f45efb54a934f9834d54f9701d
edacd4d383ac01f8cca92562eec774a58da6f55da252c491ee2ab58ff769f89a9649d3956682ca7d06bbf33f9a935b781dfc21b123b1669
4424e72d6a3e6781dcf195effd36470a4eab0fdc74e87102879ec81fea6549920cce454ac7813997b82d51e7b8c1ee24fad389904478f84
7654ec3a63bc595b9a7dde798db5c0b6f244901f239e7674c98eebb42b66e8956a733c3796586280a19a1df8a69224acd2556f7ec2e27ca
e37c69b332b2c0ec85991ae48722f988935f658b9cf32f46dfc6d96a5a36f18b6bf9f657b59b3da42414e4d56c0a24485aa298d2d0d1b17
7e7d0dafe602a4fb4f423def4bdb010fdc626c947587e19e7e4b0e6f9f2da41c29a8f1903d0d2803365fe0a113abba19220141d1ac7cec6
83962030d3f6591f98ea3dd09162715e5c12f40332b47c53164532827e5596fb2cc0aa8f31683cc63ec14c034c6f3d2c70b8c47611b4c5c
b5348a2559fb162a780a2b403b0120a6846e27d6057a3ab9e1b18915ae2039e81cc6c50d2a14d5913617bacae78fe9b91e9e49d2e82def4
7565c12ff938b182f8ce941d2781b773479538c76ed9f7d4469f6fe5ba7f6e3ad98871b2866f0ef4f36277daa76c1042c83f77df0ff2e26
39540bb355ea84273411c453081bd1e1035c402c53190d0bd905e8d01fc3787c65b69172cca5b234e92e358463bbb8d23e38c74a3a8e273
5542b996ba5ec22c5095a777b6775a728df59c3560c7f36b83d55f819f196573f8fd356379fe9a5e7cecb376395e01309e2005b29e3b160
cb74c6a5856093480dd06aea5fb3fbe23e004f8d7a38fd07866cdf24161391cc756f6ff71ff072e308b35e259435111bee09ddf2b8df99d
0f2c2e8edaa4ecaabc6975a58f23bb6bfc94ebcbbba0d581f16be9d043c4e410b321c6df424ecaeea94edbe5801eb786199124222b091e5
b33bad6761445a8a6606d0e781c07a6f91cd5fe188ddb9f9e17f5e07b0cba319c52e5fb03f53df570f82ddb603d305b72a2406bc7c1a37f
920405f8f14d3ddf5d836ba68d83c1a8d7f1a41d148cc34b1ef996ddfb43ef19d2fbf0adcad301a473497739eaa10bbce85e15c32f1d90c
8ab8605d0ae941eb914086592d087a521fde33a676cdfb94a4247f60f51edd37294511e92ec71a9a54bab68a0edaabdcb2c1a3adea778f4
16e39200af4c517dd7152bb72476c5d1413f04704615d795300f3a091214f4e4ac2ef41969c81f8fc08610864907b2e6edfa5fdb0926b6f
064b2a1c3b8c7b631cc75663cedad5e71868abc9bac678e430144613cb0e51982b9e019099026b069bb7a4dc376cdd6a3c5956631797621
3672754fac87df85953cdc0de276fb8742f48ba218d4202fe6f865834152979d6da9b473e5d476c0aaa68491f545091b879c01986078d64
fa5f49f60e615cb865f154f48b45173a1dc85f2eb1128652290bd383cdedcd8f280117e60be034ce224f92673934ed9e0077d5f7899f4e0
eee097933a35e40f205d84a10733f492da619802ff70d9b949ca0c2bcb9ba68c290f2ef9a20a3bf496834c66956a8ec4179266999d9f87b
dfc14aea865f0487e2be10a64becba69547d01658935e63703986a56d6ce38fe66dbf61d754ba9a1a278353913422e4f2e4100c5962999a
3eaa3e1672bc73edcfcc7525a2d3dbe95683b4bf38f7444ac0f470f0e98079916e4e1fba3fcd5b082fc2772e63b5e0663f87638a163858f
590845240a8c22dacf6f7999c431a2ab5204a7da7839c9a932608c7f83a87d1d73d7d8b2fec65abb95221fada44365fe21061dbcde52cb8
4cbfe9f061c4dab3be86002e7683eed16c23c687ce61c5d923ffbab40bee6ae93ed7f857f304e5eb16ec6d088563524e90d916e41a3a8c7
777e29731f0f45c125082c423a5c02704c07c6fc19b1c4838ee3eabe12562829e67581d312c720b792a41744dec1e157426ab75136d31ee
2f208147039091453c0b0e3970c5624d7a53df8076e9d1625d2c8e693e0e9a81e23862dca78921b66ca4c3c5ed35b0b5ed2e24622eb216b
a0ba6e0c0eaf97c754eeb3db4a506d5854a3edc92d0111af3d2135a998729123f03d0f9366bb0d2c681cfc62c59bcd75c6b410d8e6997cc
a55c989f010393d6c242f7ce1ea71c6f262e498855584347b04ce26cce2e822b8c6b7b4937148a45c947073b300f7c72b6e78c4231078d8
0531b7f9317c1bb4d6070f299e9a97731b1befeeec2dae0a1a0364568acbebab069a4b90147776ff7e7f79c1cc98b2fe6214792501554f4
195783b0f9188ccfe96ad8cd29f5463409c2054e4a2496ee65eaa1fcda3d7764cd3e8431e44a2b05e64aa2f9fb0d13456bfea9c91ec2d90
d0099e7e395dce818650dcaf8bdfe23b4c6443f5c690b18ead221a6c2bcd34572ffb83b3332eafde6e25b37ff3ac6da0c3cc697b996265c
aa5a53ce44570303d7d111f44c635119595c247e86a32083f2865501752f93e3024b2e2b6d82d0c03b745bfd809af7e8e1349d1a79bed51
bba415064701a2a7890e8f39937c6d2f563b0747bd94f1b698624b4fd17dfdf68ffdc0450c26d771f8ff4fb01a26ff8f64eb5b6d9153f5c
e29d9dfcf84ca230a4c212401b437d1137f83a44f7a98a9fd1bc3d883e6227ce369ed32a960550aa863f3d014de7494dead34fced1d7b4e
ab651d499033589446fb5a1564557d63e724941e77ae3f46b79703d06277d87356999b51f61893d31c7231b0c635f1d83ab38a0dce544f5
f6803861d6e3d7e70d617ecc593920b1ab9006bdc7bff34a8b36a7601eb170a040156b4567ab37f55fdf2d466fca9374277322f21811d02
c7bc599c9ed5c2b1fe7b6baa19b1b0a30f79f8641b97bf66491dca0b4c0341367aa5acec1398bb37c037d81ac2368db49c5d5720bbfb746
6ba616c70c7d834286303047357da0e9a34fc14b00c17a0a02f6a62a5b52976b00ed67bb2d0aa1b4a8a93100b799e1839695bdae9b98e75
c8df5d8340d158be60379a6f626af052ad55c5cea01f806048e937f87e01e725e67620364e511afd288b25953e9ade343b596068608190f
a5c4df114c93d3c8deca929c066d8bae5ac2d607e3f94d68a5d3554827a64735a43c462bc3682cc1664411f592c9456f53da1026f51459a
0cf20cc171b9b6bede47ce57d845dffe1025c6eb240615da151106a5601b75c24c673d6ea818d60c31f414aeaa55597b40cc4f2ed2b3850
d366084a52513420b013695e2bfcb0dbfad0014975c67471a3807528d15730802a4428842c6368c72650b3161265d6b86007264cf093a31
7fee4ee388e7721a02434c514324cbf85cb57f709b53fdf69624adc29b85518f1a051f2473ad9384d7ac57c2a780ab72506ba925fa39992
dd2d0b00ffd8c38645d55c2ca2ae94cf4ffa372284a28a13797e25eb0d950c08371656a889e6189f83b9c0c8e06952b34fe13ccb5c3b2c8
2f2d988f6d9a233f1a9e64de97218be12ee7a8e8463d82131624ce167e744a3ca3915c78e6e76362d06090e2a7ed60ea7437c84838d8aa7
5f096c9a928e403e2428550c98de8c43bd25e24520aef6ee534ee1d4702175e6b25d030b87941845cefc1dc289cee33c72139f29839af81
cb6a097d114311a01736f479bdae32a59398fc4a7494d034fc8dc5f2ba8af93fc4c576b703967ae593780413b44b98f4baba9d3fd7b5571
5ad5e5c41f9361a42a757d9a6d7220a9467e19f739877076164c142d40bbae9501312c394dc0233dc5868814162bfc1f10bd4663b285dd2
d005f3bc3dad2ff023f7e81b799b1b323b37e82fc99dc81291cf93cc04a0e05aa674bcfd3bc0d930a10d0957ec7712b8cc78375dd904eb5
a468296015dab1babb076786f305c8ad90ca3947b150da79cb94037e970e9180437ea34c72771d673000826741fe759fcdc2b03558331fd
f5b89d6e3f25a05241f32f139e898126aec8b1715cac0208831fb1205f9efb75538755b2d83931c7ad9e252c88c8af3c5df62fb99653aff
99e6c6c051a9a1241381cd5ce1307261f866575caea0a3e847286ecc67d7d9aa18f48ef2a5e5f183286127f8b9aa2caa0869ec5e474a70e
5427dc2f0488b134d201241dae68ed3996869453247bb50d8bc3d3c909951e5a47b1e899610347ea8d7193346bfe754c289ad1ca454b9c9
2a07527b95a1fe508d0b7c8da5b9047d277508ff615c9dc9ab11596aa88d0c9734a45d81f0393219bead587d3a6f9d07c470f2abf8d7c69
92228bf0ab6ef79e46599bb0a60
''')
| 106.934783
| 111
| 0.989284
| 327
| 34,433
| 104.149847
| 0.981651
| 0.000763
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.627577
| 0.009642
| 34,433
| 321
| 112
| 107.267913
| 0.371133
| 0
| 0
| 0.012618
| 0
| 0
| 0.995324
| 0.986263
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003155
| 0
| 0.003155
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18070e5e5294df43ad07237dbd5eb6d1bc6ac667
| 5,717
|
py
|
Python
|
spec/codecs/netflow_stress.py
|
andsel/logstash-codec-netflow
|
bce0308260ebcd72af825cd52f48b0af88dfe66d
|
[
"Apache-2.0"
] | 85
|
2015-04-12T01:22:25.000Z
|
2021-11-09T10:55:58.000Z
|
spec/codecs/netflow_stress.py
|
andsel/logstash-codec-netflow
|
bce0308260ebcd72af825cd52f48b0af88dfe66d
|
[
"Apache-2.0"
] | 186
|
2015-01-12T20:51:46.000Z
|
2022-01-27T13:31:33.000Z
|
spec/codecs/netflow_stress.py
|
andsel/logstash-codec-netflow
|
bce0308260ebcd72af825cd52f48b0af88dfe66d
|
[
"Apache-2.0"
] | 107
|
2015-01-12T19:42:20.000Z
|
2021-11-03T11:08:57.000Z
|
import socket
import sys
import time
import random
## Standalone Netflow v9 stressor
## Used to reproduce issue 91 https://github.com/logstash-plugins/logstash-codec-netflow/issues/91
host = 'host02'
port = 2055
tpl = '\x00\t\x00\x01e\x9c\xc0_XF\x8eU\x01u\xc7\x03\x00\x00\x08\x81\x00\x00\x00d\x01\x04\x00\x17\x00\x02\x00\x04\x00\x01\x00\x04\x00\x08\x00\x04\x00\x0c\x00\x04\x00\n\x00\x04\x00\x0e\x00\x04\x00\x15\x00\x04\x00\x16\x00\x04\x00\x07\x00\x02\x00\x0b\x00\x02\x00\x10\x00\x04\x00\x11\x00\x04\x00\x12\x00\x04\x00\t\x00\x01\x00\r\x00\x01\x00\x04\x00\x01\x00\x06\x00\x01\x00\x05\x00\x01\x00=\x00\x01\x00Y\x00\x01\x000\x00\x02\x00\xea\x00\x04\x00\xeb\x00\x04'
# 21 flows:
data = '\x00\t\x00\x15e\x9c\xbcqXF\x8eT\x01u\xc6\xa1\x00\x00\x08\x81\x01\x04\x05\\\x00\x00\x00\x01\x00\x00\x00(\n\x00\t\x92\n\x00\x1fQ\x00\x00\x00n\x00\x00\x00\x9ee\x9cG\x05e\x9cG\x05\xd3\x01\x01\xbb\x00\x00\x00\x00\x00\x00\xfb\xf0\n\x00\x0e!\x10\x14\x06\x10\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00h\n\x00\x11*\n\x00#\x04\x00\x00\x00W\x00\x00\x00\x9ee\x9cI\x88e\x9cG\x07\x8e\x84\x01\xbb\x00\x00\x00\x00\x00\x00\xfb\xf0\n\x00\x0e!\x15\x10\x06\x10\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x01\x00\x00\x004\n\x00\x16o\n\x00"\x8d\x00\x00\x00h\x00\x00\x00\x9ee\x9cG\ne\x9cG\nA\xae\x01\xbb\x00\x00\x00\x00\x00\x00\xfb\xf0\n\x00\x0e!\x18\x10\x06\x11\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x01\x00\x00\x01\xb3\n\x00\x17;\n\x00$\xaa\x00\x00\x00V\x00\x00\x00\x9ee\x9cG\x0ce\x9cG\x0c\x005\xfd,\x00\x00\x00\x00\x00\x00\xfb\xf1\n\x00\x0e\x1f\x19\x13\x11\x00\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x01\x00\x00\x03\xc9\n\x00"G\n\x00\x14\xf2\x00\x00\x00\x9e\x00\x00\x00je\x9cG\re\x9cG\r\x01\xbb\x07\xdd\x00\x00\xfb\xf0\x00\x00\xff\xa2\n\x00\x12\x05\x10\x15\x06\x18\x00\x00@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00h\n\x00\n\x85\n\x00\x1ef\x00\x00\x00n\x00\x00\x00\x9ee\x9cG\re\x9cF\xba\x89\xc9\x00P\x00\x00\x00\x00\x00\x00\xfb\xf0\n\x00\x0e!\x10\x10\x06\x10\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x01\x00\x00\x004\n\x00%\x1d\n\x00\x06\x18\x00\x00\x00f\x00\x00\x00\xa2e\x9cG\x10e\x9cG\x10\x00P\xdd\xc3\x00\x00;\x1d\x00\x00\xff\x97\n\x00\x00\xf2\x18\x10\x06\x10 \x00@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x01\x00\x00\x02f\n\x00 \xb0\n\x00\x0bq\x00\x00\x00\x9e\x00\x00\x00.e\x9cG\x10e\x9cG\x10\x01\xbb\xdd\xfe\x00\x00\xfb\xf0\x00\x00\xff\x98\n\x00\x12i\x14\x10\x06\x18\x00\x00@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x03\x00\x00\x10\xfe\n\x00\x0c\x15\n\x00\x0f&\x00\x00\x00W\x00\x00\x00\x9ee\x9cG\x11e\x9c1\xe7\x01\xbb\x9c\x8e\x00\x00\x80\xa6\x00\x00\xfb\xf2\n\x00\x0e\x1b\x18\x18\x06\x10\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x02\x00\x00\x02\x15\n\x00\x04\xd4\n\x00\x03n\x00\x00\x00\xa2\x00\x00\x00fe\x9cT\x07e\x9cG\x12\xc6\x03\x01\xbb\x00\x00\xff\x97\x00\x00\x00F\n\x00\x10e\x10\x11\x06\x18\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x01E\x00\x005\\\n\x00!z\n\x00\x01\x88\x00\x00\x00\x9e\x00\x00\x00he\x9co\xd0e\x9c"\x1a\xe5\xbe\x00P\x00\x00\xfb\xf1\x00\x00\x00\x00\x00\x00\x00\x00\x15\x1b\x06\x10\x00\x00@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00Y\n\x00\x14\xf2\n\x00"G\x00\x00\x00j\x00\x00\x00\x9ee\x9cG\x14e\x9cG\x14\x07\xdd\x01\xbb\x00\x00\xff\xa2\x00\x00\xfb\xf0\n\x00\x0e!\x15\x10\x06\x18`\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x01\x00\x00\x03A\n\x00\r\x19\n\x00\x0f&\x00\x00\x00W\x00\x00\x00\x9ee\x9cG\x16e\x9cG\x16\x01\xbb\xc9\xa5\x00\x00\x80\xa6\x00\x00\xfb\xf2\n\x00\x0e\x1b\x18\x18\x06\x18\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x02\x00\x00\x06Y\n\x00\x19;\n\x00\x02\x12\x00\x00\x00\x9e\x00\x00\x00ne\x9cG\x18e\x9cF\xbf\x01\xbb\xf4\x00\x00\x00\xfb\xf0\x00\x00\xff\x9d\n\x00\x12~\x10\x10\x06\x18\x00\x00@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00a\x00\x02+h\n\x00\x07I\n\x00\x1b\xa8\x00\x00\x00V\x00\x00\x00\x9ee\x9cu\xabe\x9c1\xfe\xeb\x98\x01\xd1\x00\x00\xff\x9c\x00\x00\xfb\xf0\n\x00\x0e!\x10\x10\x06\x18\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00:\x00\x00\x0b\xc8\n\x00\x132\n\x00\x1b\xa9\x00\x00\x00j\x00\x00\x00\x9ee\x9cO\xcbe\x9cE:\x86\x94\x03\xe3\x00\x00\xff\xb7\x00\x00\xfb\xf0\n\x00\x0e!\x12\x10\x06\x10\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x15\x00\x00{\x0c\n\x00\x1c\x96\n\x00\x18\r\x00\x00\x00\x9e\x00\x00\x00he\x9cHYe\x9cF\xf0\x01\xbb\xc2\xfd\x00\x00\xfb\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x10\x19\x06\x10\x00\x00@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x03\x00\x00\x0bg\n\x00\x1a\xbc\n\x00\x15\xc8\x00\x00\x00\x9e\x00\x00\x00We\x9cGfe\x9cE\xec\x03\xe1\xc4N\x00\x00\xfb\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x10\x19\x06\x18\x00\x00@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x05\x00\x00\x11\xa2\n\x00\x1d"\n\x00\x0f&\x00\x00\x00K\x00\x00\x00\x9ee\x9cm`e\x9cA\xfe\x01\xbb\x8c\x8f\x00\x00;A\x00\x00\xfb\xf2\n\x00\x0e\x1b\x18\x18\x06\x18\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x01\x00\x00\x01F\n\x00\x08\xc8\n\x00\x05\xe0\x00\x00\x00f\x00\x00\x00\xa2e\x9cG\x1de\x9cG\x1dZX\xc9\xd7\x00\x00\x03\x15\x00\x00\xff\x97\n\x00\x00\xf2\x10\x10\x06\x18\x00\x00@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00p\n\x00\x1d.\n\x00\x0f&\x00\x00\x00K\x00\x00\x00\x9ee\x9cG\x1de\x9c@\xea\x01\xbb\xcc\x8c\x00\x00;A\x00\x00\xfb\xf2\n\x00\x0e\x1b\x18\x18\x06\x12\x00\x01@\x00\x01`\x00\x00\x00`\x00\x00\x00\x00\x00\x00'
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
print("NETFLOW v9: sending 1 template 1 data packet in an infinite loop")
duration = 0.0
while True:
for i in range(0,400):
sock.sendto(tpl, (host, port))
sock.sendto(data, (host, port))
sys.stdout.write('.')
sys.stdout.flush()
time.sleep(random.random())
print
| 184.419355
| 4,680
| 0.729404
| 1,347
| 5,717
| 3.093541
| 0.16778
| 0.5054
| 0.468683
| 0.449244
| 0.564195
| 0.533237
| 0.515959
| 0.447564
| 0.411087
| 0.403888
| 0
| 0.369154
| 0.020115
| 5,717
| 30
| 4,681
| 190.566667
| 0.374688
| 0.023789
| 0
| 0
| 0
| 0.105263
| 0.929865
| 0.916771
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.210526
| 0
| 0.210526
| 0.105263
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
180da8e8d342ff97d31fc39f55105a5257216d9a
| 4,202
|
py
|
Python
|
map/migrations/0002_auto_20190426_2013.py
|
KushalVijay/DSCSolution-Step-to-Cure-India
|
d452e46b3e2879bdd7f80461cb42814795584783
|
[
"MIT"
] | 1
|
2020-04-26T06:35:08.000Z
|
2020-04-26T06:35:08.000Z
|
map/migrations/0002_auto_20190426_2013.py
|
KushalVijay/DSCSolution-Step-to-Cure-India
|
d452e46b3e2879bdd7f80461cb42814795584783
|
[
"MIT"
] | null | null | null |
map/migrations/0002_auto_20190426_2013.py
|
KushalVijay/DSCSolution-Step-to-Cure-India
|
d452e46b3e2879bdd7f80461cb42814795584783
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2019-04-26 14:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('map', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='city',
name='accuracy',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='admin_name1',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='city',
name='admin_name2',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='city',
name='admin_name3',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='city',
name='country_code',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='latitude',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='longitude',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='malaria_cause',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='malaria_deaths',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='malaria_risk_factor',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='malaria_risk_index',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='malnutrition_cause',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='malnutrition_deaths',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='malnutrition_risk_factor',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='malnutrition_risk_index',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='pincode',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='place_name',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='city',
name='population',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='tuberculosis_cause',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='tuberculosis_deaths',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='tuberculosis_risk_factor',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='city',
name='tuberculosis_risk_index',
field=models.FloatField(blank=True, null=True),
),
]
| 33.349206
| 74
| 0.535697
| 378
| 4,202
| 5.809524
| 0.169312
| 0.200364
| 0.250455
| 0.290528
| 0.882969
| 0.882969
| 0.86612
| 0.86612
| 0.86612
| 0.844262
| 0
| 0.01207
| 0.349357
| 4,202
| 125
| 75
| 33.616
| 0.791149
| 0.016421
| 0
| 0.745763
| 1
| 0
| 0.107865
| 0.023471
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016949
| 0
| 0.042373
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
1816a03089b5de39cd8afc1b0bdbe2730768077a
| 22,557
|
py
|
Python
|
bombbomb/apis/files_api.py
|
bombbomb/bombbomb-python-openapi
|
d1623cb06e58fdc83b04603a589e9d30e7eb3fdf
|
[
"Apache-2.0"
] | null | null | null |
bombbomb/apis/files_api.py
|
bombbomb/bombbomb-python-openapi
|
d1623cb06e58fdc83b04603a589e9d30e7eb3fdf
|
[
"Apache-2.0"
] | null | null | null |
bombbomb/apis/files_api.py
|
bombbomb/bombbomb-python-openapi
|
d1623cb06e58fdc83b04603a589e9d30e7eb3fdf
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
BombBomb
We make it easy to build relationships using simple videos.
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class FilesApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def doc_host_delete(self, doc_id, **kwargs):
"""
Delete file
Deletes a users file
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.doc_host_delete(doc_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str doc_id: Id of document (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.doc_host_delete_with_http_info(doc_id, **kwargs)
else:
(data) = self.doc_host_delete_with_http_info(doc_id, **kwargs)
return data
def doc_host_delete_with_http_info(self, doc_id, **kwargs):
"""
Delete file
Deletes a users file
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.doc_host_delete_with_http_info(doc_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str doc_id: Id of document (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['doc_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method doc_host_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'doc_id' is set
if ('doc_id' not in params) or (params['doc_id'] is None):
raise ValueError("Missing the required parameter `doc_id` when calling `doc_host_delete`")
resource_path = '/files/{docId}'.replace('{format}', 'json')
path_params = {}
if 'doc_id' in params:
path_params['docId'] = params['doc_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['BBOAuth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def doc_host_get(self, doc_id, **kwargs):
"""
Get file
Get a single file by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.doc_host_get(doc_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str doc_id: Id of document (required)
:return: HostedDoc
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.doc_host_get_with_http_info(doc_id, **kwargs)
else:
(data) = self.doc_host_get_with_http_info(doc_id, **kwargs)
return data
def doc_host_get_with_http_info(self, doc_id, **kwargs):
"""
Get file
Get a single file by id
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.doc_host_get_with_http_info(doc_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str doc_id: Id of document (required)
:return: HostedDoc
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['doc_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method doc_host_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'doc_id' is set
if ('doc_id' not in params) or (params['doc_id'] is None):
raise ValueError("Missing the required parameter `doc_id` when calling `doc_host_get`")
resource_path = '/files/{docId}'.replace('{format}', 'json')
path_params = {}
if 'doc_id' in params:
path_params['docId'] = params['doc_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['BBOAuth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HostedDoc',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def doc_host_list(self, **kwargs):
"""
List all files
List all uploaded user files
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.doc_host_list(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[HostedDoc]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.doc_host_list_with_http_info(**kwargs)
else:
(data) = self.doc_host_list_with_http_info(**kwargs)
return data
def doc_host_list_with_http_info(self, **kwargs):
"""
List all files
List all uploaded user files
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.doc_host_list_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[HostedDoc]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method doc_host_list" % key
)
params[key] = val
del params['kwargs']
resource_path = '/files'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['BBOAuth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[HostedDoc]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def doc_host_upload_v2(self, file, **kwargs):
"""
Upload a file
Upload a new file
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.doc_host_upload_v2(file, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str file: The file being uploaded (required)
:return: list[HostedDoc]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.doc_host_upload_v2_with_http_info(file, **kwargs)
else:
(data) = self.doc_host_upload_v2_with_http_info(file, **kwargs)
return data
def doc_host_upload_v2_with_http_info(self, file, **kwargs):
"""
Upload a file
Upload a new file
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.doc_host_upload_v2_with_http_info(file, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str file: The file being uploaded (required)
:return: list[HostedDoc]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['file']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method doc_host_upload_v2" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'file' is set
if ('file' not in params) or (params['file'] is None):
raise ValueError("Missing the required parameter `file` when calling `doc_host_upload_v2`")
resource_path = '/files'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
form_params.append(('file', params['file']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['BBOAuth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[HostedDoc]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_hosted_images_paged(self, page_size, page, **kwargs):
"""
Get paged hosted images
Get a specific page of uploaded images available to the user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_hosted_images_paged(page_size, page, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str page_size: The number of items to retrieve in a single db query. (required)
:param str page: Zero-based index of the page of data to retrieve from the db. (required)
:param str search: Filter results with names that match the search term.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_hosted_images_paged_with_http_info(page_size, page, **kwargs)
else:
(data) = self.get_hosted_images_paged_with_http_info(page_size, page, **kwargs)
return data
def get_hosted_images_paged_with_http_info(self, page_size, page, **kwargs):
"""
Get paged hosted images
Get a specific page of uploaded images available to the user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_hosted_images_paged_with_http_info(page_size, page, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str page_size: The number of items to retrieve in a single db query. (required)
:param str page: Zero-based index of the page of data to retrieve from the db. (required)
:param str search: Filter results with names that match the search term.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'search']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_hosted_images_paged" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'page_size' is set
if ('page_size' not in params) or (params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_hosted_images_paged`")
# verify the required parameter 'page' is set
if ('page' not in params) or (params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_hosted_images_paged`")
resource_path = '/files/images/paged'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page' in params:
query_params['page'] = params['page']
if 'search' in params:
query_params['search'] = params['search']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = ['BBOAuth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 39.093588
| 113
| 0.56488
| 2,426
| 22,557
| 5.039571
| 0.095218
| 0.065434
| 0.022902
| 0.029445
| 0.892361
| 0.88091
| 0.865042
| 0.846475
| 0.837069
| 0.828562
| 0
| 0.00158
| 0.354524
| 22,557
| 576
| 114
| 39.161458
| 0.838118
| 0.34885
| 0
| 0.741445
| 1
| 0
| 0.152338
| 0.042985
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041825
| false
| 0
| 0.026616
| 0
| 0.129278
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
186089ac079fe93691c5ed9c3d718790ce31b13d
| 4,021
|
py
|
Python
|
interpreter/definitions/truth_table.py
|
dragonoverlord3000/PropLogic
|
583ef0e4beef8500d29ba54c1a4c7071d2a4fddc
|
[
"MIT"
] | 2
|
2021-09-12T08:46:12.000Z
|
2022-01-14T09:07:46.000Z
|
interpreter/definitions/truth_table.py
|
dragonoverlord3000/PropLogic
|
583ef0e4beef8500d29ba54c1a4c7071d2a4fddc
|
[
"MIT"
] | null | null | null |
interpreter/definitions/truth_table.py
|
dragonoverlord3000/PropLogic
|
583ef0e4beef8500d29ba54c1a4c7071d2a4fddc
|
[
"MIT"
] | null | null | null |
from interpreter.definitions.tokens import Token, TokenType
# The important part is the `truth_table_dict` at the bottom
"""
These functions take TokenType.TRUE and TokenType.FALSE as input and returns a new
Token of either type TRUE or type FALSE, depending on the input.
"""
# Negation '~'
def negation_truth_table(a:TokenType) -> TokenType:
if a.type == TokenType.TRUE:
return Token(TokenType.FALSE)
elif a.type == TokenType.FALSE:
return Token(TokenType.TRUE)
else:
print(f"Error, token type ('{a.type}') is not boolean")
# Conjunction '&'
def conjunction_truth_table(a:TokenType, b:TokenType) -> TokenType:
if a.type == TokenType.TRUE and b.type == TokenType.TRUE:
return Token(TokenType.TRUE)
elif a.type == TokenType.TRUE and b.type == TokenType.FALSE:
return Token(TokenType.FALSE)
elif a.type == TokenType.FALSE and b.type == TokenType.TRUE:
return Token(TokenType.FALSE)
elif a.type == TokenType.FALSE and b.type == TokenType.FALSE:
return Token(TokenType.FALSE)
else:
print(f"Error, either token type ('{a.type}') or ;) token type ('{b.type}') is not boolean")
# Disjunction '|'
def disjunction_truth_table(a:TokenType,b:TokenType) -> TokenType:
if a.type == TokenType.TRUE and b.type == TokenType.TRUE:
return Token(TokenType.TRUE)
elif a.type == TokenType.TRUE and b.type == TokenType.FALSE:
return Token(TokenType.TRUE)
elif a.type == TokenType.FALSE and b.type == TokenType.TRUE:
return Token(TokenType.TRUE)
elif a.type == TokenType.FALSE and b.type == TokenType.FALSE:
return Token(TokenType.FALSE)
else:
print(f"Error, either token type ('{a.type}') or ;) token type ('{b.type}') is not boolean")
# Implication '>'
def implication_truth_table(a:TokenType, b:TokenType) -> TokenType:
if a.type == TokenType.TRUE and b.type == TokenType.TRUE:
return Token(TokenType.TRUE)
elif a.type == TokenType.TRUE and b.type == TokenType.FALSE:
return Token(TokenType.FALSE)
elif a.type == TokenType.FALSE and b.type == TokenType.TRUE:
return Token(TokenType.TRUE)
elif a.type == TokenType.FALSE and b.type == TokenType.FALSE:
return Token(TokenType.TRUE)
else:
print(f"Error, either token type ('{a.type}') or ;) token type ('{b.type}') is not boolean")
# Converse Implication '<'
def converse_implication_truth_table(a:TokenType, b:TokenType) -> TokenType:
if a.type == TokenType.TRUE and b.type == TokenType.TRUE:
return Token(TokenType.TRUE)
elif a.type == TokenType.TRUE and b.type == TokenType.FALSE:
return Token(TokenType.TRUE)
elif a.type == TokenType.FALSE and b.type == TokenType.TRUE:
return Token(TokenType.FALSE)
elif a.type == TokenType.FALSE and b.type == TokenType.FALSE:
return Token(TokenType.TRUE)
else:
print(f"Error, either token type ('{a.type}') or ;) token type ('{b.type}') is not boolean")
# Biconditional '<>'
def biconditional_truth_table(a:TokenType, b:TokenType) -> TokenType:
if a.type == TokenType.TRUE and b.type == TokenType.TRUE:
return Token(TokenType.TRUE)
elif a.type == TokenType.TRUE and b.type == TokenType.FALSE:
return Token(TokenType.FALSE)
elif a.type == TokenType.FALSE and b.type == TokenType.TRUE:
return Token(TokenType.FALSE)
elif a.type == TokenType.FALSE and b.type == TokenType.FALSE:
return Token(TokenType.TRUE)
else:
print(f"Error, either token type ('{a.type}') or ;) token type ('{b.type}') is not boolean")
# Relate operator type to their truth table
truth_table_dict = {
TokenType.NEGATION: negation_truth_table,
TokenType.CONJUNCTION: conjunction_truth_table,
TokenType.DISJUNCTION: disjunction_truth_table,
TokenType.IMPLICATION: implication_truth_table,
TokenType.CONVERSEIMPLICATION: converse_implication_truth_table,
TokenType.BICONDITIONAL: biconditional_truth_table
}
| 38.663462
| 100
| 0.684158
| 539
| 4,021
| 5.048237
| 0.102041
| 0.200662
| 0.113194
| 0.124954
| 0.740537
| 0.740537
| 0.740537
| 0.732819
| 0.732819
| 0.732819
| 0
| 0
| 0.194728
| 4,021
| 103
| 101
| 39.038835
| 0.840334
| 0.050982
| 0
| 0.746479
| 0
| 0.070423
| 0.124897
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084507
| false
| 0
| 0.014085
| 0
| 0.408451
| 0.084507
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
187261d20befd8b3efc31210e376c8d3c5a80ec1
| 12,233
|
py
|
Python
|
tests/test_encryption.py
|
00mjk/pypsrp
|
540bad3ad2a9c2c12408ccf787db67ba76377566
|
[
"MIT"
] | 1
|
2022-03-11T20:07:11.000Z
|
2022-03-11T20:07:11.000Z
|
tests/test_encryption.py
|
00mjk/pypsrp
|
540bad3ad2a9c2c12408ccf787db67ba76377566
|
[
"MIT"
] | null | null | null |
tests/test_encryption.py
|
00mjk/pypsrp
|
540bad3ad2a9c2c12408ccf787db67ba76377566
|
[
"MIT"
] | 1
|
2022-03-11T20:07:12.000Z
|
2022-03-11T20:07:12.000Z
|
import collections
import pytest
from pypsrp.encryption import WinRMEncryption
from pypsrp.exceptions import WinRMError
WrapIOVResult = collections.namedtuple("WrapIOVResult", ["buffers"])
WrapResult = collections.namedtuple("WrapResult", ["data"])
class MockAuth:
def __init__(self, padding=False):
self.padding = padding
def wrap_winrm(self, data):
return b"reallylongheader", data + b"-encrypted", 1 if self.padding else 0
def unwrap_winrm(self, header, data):
return data[: len(data) - 10]
class TestWinRMEncryption(object):
def test_wrap_small_spnego(self):
plaintext = b"plaintext"
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.SPNEGO)
expected = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-SPNEGO-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=9\r\n"
b"--Encrypted Boundary\r\n\tContent-Type: application/"
b"octet-stream\r\n\x10\x00\x00\x00reallylongheaderplaintext-"
b"encrypted--Encrypted Boundary--\r\n"
)
actual_type, actual = encryption.wrap_message(plaintext)
assert "multipart/encrypted" == actual_type
assert expected == actual
def test_wrap_spnego_padded(self):
plaintext = b"plaintext"
encryption = WinRMEncryption(MockAuth(padding=True), WinRMEncryption.SPNEGO)
expected = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-SPNEGO-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=10\r\n"
b"--Encrypted Boundary\r\n\tContent-Type: application/"
b"octet-stream\r\n\x10\x00\x00\x00reallylongheaderplaintext-"
b"encrypted--Encrypted Boundary--\r\n"
)
actual_type, actual = encryption.wrap_message(plaintext)
assert "multipart/encrypted" == actual_type
assert expected == actual
def test_wrap_small_kerberos(self):
plaintext = b"plaintext"
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.KERBEROS)
expected = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-Kerberos-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=9\r\n"
b"--Encrypted Boundary\r\n\tContent-Type: application/"
b"octet-stream\r\n\x10\x00\x00\x00reallylongheaderplaintext-"
b"encrypted--Encrypted Boundary--\r\n"
)
actual_type, actual = encryption.wrap_message(plaintext)
assert "multipart/encrypted" == actual_type
assert expected == actual
def test_wrap_small_credsp(self):
plaintext = b"plaintext"
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.CREDSSP)
expected = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-CredSSP-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=9\r\n"
b"--Encrypted Boundary\r\n\tContent-Type: application/"
b"octet-stream\r\n\x10\x00\x00\x00reallylongheaderplaintext-"
b"encrypted--Encrypted Boundary--\r\n"
)
actual_type, actual = encryption.wrap_message(plaintext)
assert "multipart/encrypted" == actual_type
assert expected == actual
def test_wrap_large_spnego(self):
plaintext = b"a" * 20000
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.SPNEGO)
expected = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-SPNEGO-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=20000"
b"\r\n--Encrypted Boundary\r\n\tContent-Type: application"
b"/octet-stream\r\n\x10\x00\x00\x00reallylongheader" + plaintext + b"-encrypted--Encrypted Boundary--\r\n"
)
actual_type, actual = encryption.wrap_message(plaintext)
assert "multipart/encrypted" == actual_type
assert expected == actual
def test_wrap_large_kerberos(self):
plaintext = b"a" * 20000
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.KERBEROS)
expected = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-Kerberos-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=20000"
b"\r\n--Encrypted Boundary\r\n\tContent-Type: application"
b"/octet-stream\r\n\x10\x00\x00\x00reallylongheader" + plaintext + b"-encrypted--Encrypted Boundary--\r\n"
)
actual_type, actual = encryption.wrap_message(plaintext)
assert "multipart/encrypted" == actual_type
assert expected == actual
def test_wrap_large_credsp(self):
plaintext = b"a" * 20000
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.CREDSSP)
expected = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-CredSSP-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=16384"
b"\r\n--Encrypted Boundary\r\n\tContent-Type: application"
b"/octet-stream\r\n\x10\x00\x00\x00reallylongheader"
+ b"a" * 16384
+ b"-encrypted--Encrypted Boundary\r\n\tContent-Type: "
b"application/HTTP-CredSSP-session-encrypted\r\n"
b"\tOriginalContent: type=application/soap+xml;"
b"charset=UTF-8;Length=3616\r\n--Encrypted Boundary\r\n"
b"\tContent-Type: application/octet-stream\r\n"
b"\x10\x00\x00\x00reallylongheader" + b"a" * 3616 + b"-encrypted--Encrypted Boundary--\r\n"
)
actual_type, actual = encryption.wrap_message(plaintext)
assert "multipart/x-multi-encrypted" == actual_type
assert expected == actual
def test_unwrap_small_spnego(self):
expected = b"plaintext"
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.SPNEGO)
bwrapped = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-SPNEGO-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=9\r\n"
b"--Encrypted Boundary\r\n\tContent-Type: application/"
b"octet-stream\r\n\x10\x00\x00\x00reallylongheaderplaintext-"
b"encrypted--Encrypted Boundary--\r\n"
)
actual = encryption.unwrap_message(bwrapped, "Encrypted Boundary")
assert expected == actual
def test_unwrap_small_spnego_without_end_hyphens(self):
expected = b"plaintext"
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.SPNEGO)
bwrapped = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-SPNEGO-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=9\r\n"
b"--Encrypted Boundary\r\n\tContent-Type: application/"
b"octet-stream\r\n\x10\x00\x00\x00reallylongheaderplaintext-"
b"encrypted--Encrypted Boundary\r\n"
)
actual = encryption.unwrap_message(bwrapped, "Encrypted Boundary")
assert expected == actual
def test_unwrap_small_kerberos(self):
expected = b"plaintext"
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.KERBEROS)
# The spaces after -- on each boundary is on purpose, some MS implementations do this.
bwrapped = (
b"-- Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-Kerberos-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=9\r\n"
b"-- Encrypted Boundary\r\n\tContent-Type: application/"
b"octet-stream\r\n\x10\x00\x00\x00reallylongheaderplaintext-"
b"encrypted-- Encrypted Boundary--\r\n"
)
actual = encryption.unwrap_message(bwrapped, "Encrypted Boundary")
assert expected == actual
def test_unwrap_small_credsp(self):
expected = b"plaintext"
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.CREDSSP)
bwrapped = (
b"--Encrypted Boundary2\r\n\tContent-Type: application"
b"/HTTP-CredSSP-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=9\r\n"
b"--Encrypted Boundary2\r\n\tContent-Type: application/"
b"octet-stream\r\n\x10\x00\x00\x00reallylongheaderplaintext-encrypted"
b"--Encrypted Boundary2--\r\n"
)
actual = encryption.unwrap_message(bwrapped, "Encrypted Boundary2")
assert expected == actual
def test_unwrap_large_spnego(self):
expected = b"a" * 20000
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.SPNEGO)
bwrapped = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-SPNEGO-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=20000"
b"\r\n--Encrypted Boundary\r\n\tContent-Type: application"
b"/octet-stream\r\n\x10\x00\x00\x00reallylongheader" + expected + b"-encrypted--Encrypted Boundary--\r\n"
)
actual = encryption.unwrap_message(bwrapped, "Encrypted Boundary")
assert expected == actual
def test_unwrap_large_kerberos(self):
expected = b"a" * 20000
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.KERBEROS)
bwrapped = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-Kerberos-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=20000"
b"\r\n--Encrypted Boundary\r\n\tContent-Type: application"
b"/octet-stream\r\n\x10\x00\x00\x00reallylongheader" + expected + b"-encrypted--Encrypted Boundary--\r\n"
)
actual = encryption.unwrap_message(bwrapped, "Encrypted Boundary")
assert expected == actual
def test_unwrap_large_credsp(self):
expected = b"a" * 20000
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.CREDSSP)
bwrapped = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-CredSSP-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=16384"
b"\r\n--Encrypted Boundary\r\n\tContent-Type: application"
b"/octet-stream\r\n\x10\x00\x00\x00reallylongheader"
+ b"a" * 16384
+ b"-encrypted--Encrypted Boundary\r\n\tContent-Type: "
b"application/HTTP-CredSSP-session-encrypted\r\n"
b"\tOriginalContent: type=application/soap+xml;"
b"charset=UTF-8;Length=3616\r\n--Encrypted Boundary\r\n"
b"\tContent-Type: application/octet-stream\r\n"
b"\x10\x00\x00\x00reallylongheader" + b"a" * 3616 + b"-encrypted--Encrypted Boundary--\r\n"
)
actual = encryption.unwrap_message(bwrapped, "Encrypted Boundary")
assert expected == actual
def test_unwrap_length_mismatch(self):
encryption = WinRMEncryption(MockAuth(), WinRMEncryption.SPNEGO)
bwrapped = (
b"--Encrypted Boundary\r\n\tContent-Type: application"
b"/HTTP-SPNEGO-session-encrypted\r\n\tOriginalContent: "
b"type=application/soap+xml;charset=UTF-8;Length=9\r\n"
b"--Encrypted Boundary\r\n\tContent-Type: application/"
b"octet-stream\r\n\x10\x00\x00\x00reallylongheaderplain-"
b"encrypted--Encrypted Boundary--\r\n"
)
with pytest.raises(WinRMError) as err:
encryption.unwrap_message(bwrapped, "Encrypted Boundary")
assert (
str(err.value) == "The encrypted length from the server does not match the "
"expected length, decryption failed, actual: 5 != expected: 9"
)
| 45.988722
| 118
| 0.64702
| 1,398
| 12,233
| 5.602289
| 0.080114
| 0.025536
| 0.10572
| 0.111593
| 0.896578
| 0.894152
| 0.890449
| 0.870531
| 0.825077
| 0.794305
| 0
| 0.027506
| 0.230279
| 12,233
| 265
| 119
| 46.162264
| 0.804269
| 0.006867
| 0
| 0.735683
| 0
| 0.039648
| 0.459537
| 0.312752
| 0
| 0
| 0
| 0
| 0.096916
| 1
| 0.079295
| false
| 0
| 0.017621
| 0.008811
| 0.114537
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43e67ce55c58b7e5112e786b6b49b1c852d329f1
| 95
|
py
|
Python
|
contrib/wydget/wydget/dialogs/__init__.py
|
bitcraft/pyglet
|
144257c365ca85528c6a4c5bed8141e683d7a9b6
|
[
"BSD-3-Clause"
] | 15
|
2015-01-21T12:29:01.000Z
|
2018-12-09T09:17:33.000Z
|
contrib/wydget/wydget/dialogs/__init__.py
|
bitcraft/pyglet
|
144257c365ca85528c6a4c5bed8141e683d7a9b6
|
[
"BSD-3-Clause"
] | null | null | null |
contrib/wydget/wydget/dialogs/__init__.py
|
bitcraft/pyglet
|
144257c365ca85528c6a4c5bed8141e683d7a9b6
|
[
"BSD-3-Clause"
] | 9
|
2015-12-12T09:12:46.000Z
|
2021-12-26T13:29:14.000Z
|
from wydget.dialogs.question import Question, Message
from wydget.dialogs.file import FileOpen
| 31.666667
| 53
| 0.852632
| 13
| 95
| 6.230769
| 0.615385
| 0.246914
| 0.419753
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094737
| 95
| 2
| 54
| 47.5
| 0.94186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
43fa6a88b8f50a6b63b608fb0c534077b53f0078
| 1,420
|
py
|
Python
|
utility_calculation/value_iterator.py
|
veeral-agarwal/value-iteration
|
e33b01a62616972e0202f3c1557c55999cd723ec
|
[
"MIT"
] | null | null | null |
utility_calculation/value_iterator.py
|
veeral-agarwal/value-iteration
|
e33b01a62616972e0202f3c1557c55999cd723ec
|
[
"MIT"
] | null | null | null |
utility_calculation/value_iterator.py
|
veeral-agarwal/value-iteration
|
e33b01a62616972e0202f3c1557c55999cd723ec
|
[
"MIT"
] | null | null | null |
A = [0]
B = [0]
C = [0]
# change your R here
R = [16.6]
for i in range(1,4):
# For A
print("V"+str(i)+"(A) = max(0.8(-1+γ("+str(B[i-1])+")) + 0.2(-1+γ("+str(A[i-1])+"))," + "0.8(-1+γ("+str(C[i-1])+")) + 0.2(-1+γ("+str(A[i-1])+"))")
print("= max(" + str( (0.8*(-1+(0.2*B[i-1]))) + 0.2*(-1+(0.2*A[i-1])) ) +" , "+ str((0.8*(-1+(0.2*C[i-1]))) + 0.2*(-1+(0.2*A[i-1]))) + ")")
A.append(max((0.8*(-1+(0.2*B[i-1]))) + 0.2*(-1+(0.2*A[i-1])) , (0.8*(-1+(0.2*C[i-1]))) + 0.2*(-1+(0.2*A[i-1]))))
print("A = "+str(A[i]))
print("V"+str(i)+"(B) = max(0.8(-1+γ("+str(A[i-1])+")) + 0.2(-1+γ("+str(B[i-1])+"))," + "0.8(-4+γ("+str(R[0])+")) + 0.2(-1+γ("+str(B[i-1])+"))")
print("= max(" + str( (0.8*(-1+(0.2*A[i-1]))) + 0.2*(-1+(0.2*B[i-1])) ) +" , "+ str((0.8*(-4+(0.2*R[0]))) + 0.2*(-1+(0.2*B[i-1])))+ ")")
B.append( max((0.8*(-1+(0.2*A[i-1]))) + 0.2*(-1+(0.2*B[i-1])) , (0.8*(-4+(0.2*R[0]))) + 0.2*(-1+(0.2*B[i-1]))) )
print("B = "+str(B[i]))
print("V"+str(i)+"(C) = max(0.8(-1+γ("+str(A[i-1])+")) + 0.2(-1+γ("+str(C[i-1])+"))," + "0.25(-3+γ("+str(R[0])+")) + 0.75(-1+γ("+str(C[i-1])+"))")
print("= max(" + str( (0.8*(-1+(0.2*A[i-1]))) + 0.2*(-1+(0.2*C[i-1])) ) +" , "+ str((0.25*(-3+(0.2*R[0]))) + 0.75*(-1+(0.2*C[i-1])))+ ")")
C.append( max((0.8*(-1+(0.2*A[i-1]))) + 0.2*(-1+(0.2*C[i-1])) , (0.25*(-3+(0.2*R[0]))) + 0.75*(-1+(0.2*C[i-1]))) )
print("C = "+str(C[i]))
| 61.73913
| 150
| 0.352817
| 357
| 1,420
| 1.403361
| 0.07563
| 0.155689
| 0.191617
| 0.095808
| 0.816367
| 0.736527
| 0.696607
| 0.642715
| 0.620758
| 0.620758
| 0
| 0.168576
| 0.139437
| 1,420
| 22
| 151
| 64.545455
| 0.241408
| 0.016901
| 0
| 0
| 0
| 0
| 0.165111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.529412
| 0
| 0
| 1
| null | 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
a1047b92f74f6c074da4af1b0ec1142a0104b41b
| 239
|
py
|
Python
|
clay/utils/__init__.py
|
lucuma/Clay
|
07923ddb5b710cdea21fd7c4667e4189ebc0640e
|
[
"Apache-2.0"
] | 35
|
2015-02-12T15:34:47.000Z
|
2021-09-27T17:16:04.000Z
|
clay/utils/__init__.py
|
lucuma/Clay
|
07923ddb5b710cdea21fd7c4667e4189ebc0640e
|
[
"Apache-2.0"
] | 11
|
2015-01-11T20:48:49.000Z
|
2020-12-23T18:52:22.000Z
|
clay/utils/__init__.py
|
lucuma/Clay
|
07923ddb5b710cdea21fd7c4667e4189ebc0640e
|
[
"Apache-2.0"
] | 10
|
2015-02-27T19:48:46.000Z
|
2018-06-24T09:39:27.000Z
|
from .active import * # noqa
from .jinja_render import * # noqa
from .jinja_includewith import * # noqa
from .load_config import * # noqa
from .make_matcher import * # noqa
from .multidict import * # noqa
from .urls import * # noqa
| 29.875
| 40
| 0.707113
| 32
| 239
| 5.15625
| 0.40625
| 0.424242
| 0.509091
| 0.230303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205021
| 239
| 7
| 41
| 34.142857
| 0.868421
| 0.142259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a13704fd85c90d0c743e4cf5aa349240d8b903a2
| 26,837
|
py
|
Python
|
sdk/python/pulumi_azure/datashare/dataset_data_lake_gen2.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/datashare/dataset_data_lake_gen2.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/datashare/dataset_data_lake_gen2.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['DatasetDataLakeGen2Args', 'DatasetDataLakeGen2']
@pulumi.input_type
class DatasetDataLakeGen2Args:
def __init__(__self__, *,
file_system_name: pulumi.Input[str],
share_id: pulumi.Input[str],
storage_account_id: pulumi.Input[str],
file_path: Optional[pulumi.Input[str]] = None,
folder_path: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a DatasetDataLakeGen2 resource.
:param pulumi.Input[str] file_system_name: The name of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] share_id: The resource ID of the Data Share where this Data Share Data Lake Gen2 Dataset should be created. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] storage_account_id: The resource id of the storage account of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] file_path: The path of the file in the data lake file system to be shared with the receiver. Conflicts with `folder_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] folder_path: The folder path in the data lake file system to be shared with the receiver. Conflicts with `file_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] name: The name which should be used for this Data Share Data Lake Gen2 Dataset. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
pulumi.set(__self__, "file_system_name", file_system_name)
pulumi.set(__self__, "share_id", share_id)
pulumi.set(__self__, "storage_account_id", storage_account_id)
if file_path is not None:
pulumi.set(__self__, "file_path", file_path)
if folder_path is not None:
pulumi.set(__self__, "folder_path", folder_path)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="fileSystemName")
def file_system_name(self) -> pulumi.Input[str]:
"""
The name of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "file_system_name")
@file_system_name.setter
def file_system_name(self, value: pulumi.Input[str]):
pulumi.set(self, "file_system_name", value)
@property
@pulumi.getter(name="shareId")
def share_id(self) -> pulumi.Input[str]:
"""
The resource ID of the Data Share where this Data Share Data Lake Gen2 Dataset should be created. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "share_id")
@share_id.setter
def share_id(self, value: pulumi.Input[str]):
pulumi.set(self, "share_id", value)
@property
@pulumi.getter(name="storageAccountId")
def storage_account_id(self) -> pulumi.Input[str]:
"""
The resource id of the storage account of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "storage_account_id")
@storage_account_id.setter
def storage_account_id(self, value: pulumi.Input[str]):
pulumi.set(self, "storage_account_id", value)
@property
@pulumi.getter(name="filePath")
def file_path(self) -> Optional[pulumi.Input[str]]:
"""
The path of the file in the data lake file system to be shared with the receiver. Conflicts with `folder_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "file_path")
@file_path.setter
def file_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "file_path", value)
@property
@pulumi.getter(name="folderPath")
def folder_path(self) -> Optional[pulumi.Input[str]]:
"""
The folder path in the data lake file system to be shared with the receiver. Conflicts with `file_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "folder_path")
@folder_path.setter
def folder_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "folder_path", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Data Share Data Lake Gen2 Dataset. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _DatasetDataLakeGen2State:
def __init__(__self__, *,
display_name: Optional[pulumi.Input[str]] = None,
file_path: Optional[pulumi.Input[str]] = None,
file_system_name: Optional[pulumi.Input[str]] = None,
folder_path: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
share_id: Optional[pulumi.Input[str]] = None,
storage_account_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering DatasetDataLakeGen2 resources.
:param pulumi.Input[str] display_name: The name of the Data Share Dataset.
:param pulumi.Input[str] file_path: The path of the file in the data lake file system to be shared with the receiver. Conflicts with `folder_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] file_system_name: The name of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] folder_path: The folder path in the data lake file system to be shared with the receiver. Conflicts with `file_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] name: The name which should be used for this Data Share Data Lake Gen2 Dataset. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] share_id: The resource ID of the Data Share where this Data Share Data Lake Gen2 Dataset should be created. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] storage_account_id: The resource id of the storage account of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if file_path is not None:
pulumi.set(__self__, "file_path", file_path)
if file_system_name is not None:
pulumi.set(__self__, "file_system_name", file_system_name)
if folder_path is not None:
pulumi.set(__self__, "folder_path", folder_path)
if name is not None:
pulumi.set(__self__, "name", name)
if share_id is not None:
pulumi.set(__self__, "share_id", share_id)
if storage_account_id is not None:
pulumi.set(__self__, "storage_account_id", storage_account_id)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Data Share Dataset.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="filePath")
def file_path(self) -> Optional[pulumi.Input[str]]:
"""
The path of the file in the data lake file system to be shared with the receiver. Conflicts with `folder_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "file_path")
@file_path.setter
def file_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "file_path", value)
@property
@pulumi.getter(name="fileSystemName")
def file_system_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "file_system_name")
@file_system_name.setter
def file_system_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "file_system_name", value)
@property
@pulumi.getter(name="folderPath")
def folder_path(self) -> Optional[pulumi.Input[str]]:
"""
The folder path in the data lake file system to be shared with the receiver. Conflicts with `file_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "folder_path")
@folder_path.setter
def folder_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "folder_path", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Data Share Data Lake Gen2 Dataset. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="shareId")
def share_id(self) -> Optional[pulumi.Input[str]]:
"""
The resource ID of the Data Share where this Data Share Data Lake Gen2 Dataset should be created. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "share_id")
@share_id.setter
def share_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "share_id", value)
@property
@pulumi.getter(name="storageAccountId")
def storage_account_id(self) -> Optional[pulumi.Input[str]]:
"""
The resource id of the storage account of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "storage_account_id")
@storage_account_id.setter
def storage_account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_account_id", value)
class DatasetDataLakeGen2(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
file_path: Optional[pulumi.Input[str]] = None,
file_system_name: Optional[pulumi.Input[str]] = None,
folder_path: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
share_id: Optional[pulumi.Input[str]] = None,
storage_account_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Data Share Data Lake Gen2 Dataset.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
import pulumi_azuread as azuread
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.datashare.Account("exampleAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
identity=azure.datashare.AccountIdentityArgs(
type="SystemAssigned",
))
example_share = azure.datashare.Share("exampleShare",
account_id=example_account.id,
kind="CopyBased")
example_storage_account_account = azure.storage.Account("exampleStorage/accountAccount",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
account_kind="BlobStorage",
account_tier="Standard",
account_replication_type="LRS")
example_data_lake_gen2_filesystem = azure.storage.DataLakeGen2Filesystem("exampleDataLakeGen2Filesystem", storage_account_id=example_storage / account_account["id"])
example_service_principal = azuread.get_service_principal_output(display_name=example_account.name)
example_assignment = azure.authorization.Assignment("exampleAssignment",
scope=example_storage / account_account["id"],
role_definition_name="Storage Blob Data Reader",
principal_id=example_service_principal.object_id)
example_dataset_data_lake_gen2 = azure.datashare.DatasetDataLakeGen2("exampleDatasetDataLakeGen2",
share_id=example_share.id,
storage_account_id=example_storage / account_account["id"],
file_system_name=example_data_lake_gen2_filesystem.name,
file_path="myfile.txt",
opts=pulumi.ResourceOptions(depends_on=[example_assignment]))
```
## Import
Data Share Data Lake Gen2 Datasets can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:datashare/datasetDataLakeGen2:DatasetDataLakeGen2 example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] file_path: The path of the file in the data lake file system to be shared with the receiver. Conflicts with `folder_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] file_system_name: The name of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] folder_path: The folder path in the data lake file system to be shared with the receiver. Conflicts with `file_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] name: The name which should be used for this Data Share Data Lake Gen2 Dataset. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] share_id: The resource ID of the Data Share where this Data Share Data Lake Gen2 Dataset should be created. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] storage_account_id: The resource id of the storage account of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DatasetDataLakeGen2Args,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Data Share Data Lake Gen2 Dataset.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
import pulumi_azuread as azuread
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.datashare.Account("exampleAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
identity=azure.datashare.AccountIdentityArgs(
type="SystemAssigned",
))
example_share = azure.datashare.Share("exampleShare",
account_id=example_account.id,
kind="CopyBased")
example_storage_account_account = azure.storage.Account("exampleStorage/accountAccount",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
account_kind="BlobStorage",
account_tier="Standard",
account_replication_type="LRS")
example_data_lake_gen2_filesystem = azure.storage.DataLakeGen2Filesystem("exampleDataLakeGen2Filesystem", storage_account_id=example_storage / account_account["id"])
example_service_principal = azuread.get_service_principal_output(display_name=example_account.name)
example_assignment = azure.authorization.Assignment("exampleAssignment",
scope=example_storage / account_account["id"],
role_definition_name="Storage Blob Data Reader",
principal_id=example_service_principal.object_id)
example_dataset_data_lake_gen2 = azure.datashare.DatasetDataLakeGen2("exampleDatasetDataLakeGen2",
share_id=example_share.id,
storage_account_id=example_storage / account_account["id"],
file_system_name=example_data_lake_gen2_filesystem.name,
file_path="myfile.txt",
opts=pulumi.ResourceOptions(depends_on=[example_assignment]))
```
## Import
Data Share Data Lake Gen2 Datasets can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:datashare/datasetDataLakeGen2:DatasetDataLakeGen2 example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DataShare/accounts/account1/shares/share1/dataSets/dataSet1
```
:param str resource_name: The name of the resource.
:param DatasetDataLakeGen2Args args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DatasetDataLakeGen2Args, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
file_path: Optional[pulumi.Input[str]] = None,
file_system_name: Optional[pulumi.Input[str]] = None,
folder_path: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
share_id: Optional[pulumi.Input[str]] = None,
storage_account_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DatasetDataLakeGen2Args.__new__(DatasetDataLakeGen2Args)
__props__.__dict__["file_path"] = file_path
if file_system_name is None and not opts.urn:
raise TypeError("Missing required property 'file_system_name'")
__props__.__dict__["file_system_name"] = file_system_name
__props__.__dict__["folder_path"] = folder_path
__props__.__dict__["name"] = name
if share_id is None and not opts.urn:
raise TypeError("Missing required property 'share_id'")
__props__.__dict__["share_id"] = share_id
if storage_account_id is None and not opts.urn:
raise TypeError("Missing required property 'storage_account_id'")
__props__.__dict__["storage_account_id"] = storage_account_id
__props__.__dict__["display_name"] = None
super(DatasetDataLakeGen2, __self__).__init__(
'azure:datashare/datasetDataLakeGen2:DatasetDataLakeGen2',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
display_name: Optional[pulumi.Input[str]] = None,
file_path: Optional[pulumi.Input[str]] = None,
file_system_name: Optional[pulumi.Input[str]] = None,
folder_path: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
share_id: Optional[pulumi.Input[str]] = None,
storage_account_id: Optional[pulumi.Input[str]] = None) -> 'DatasetDataLakeGen2':
"""
Get an existing DatasetDataLakeGen2 resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] display_name: The name of the Data Share Dataset.
:param pulumi.Input[str] file_path: The path of the file in the data lake file system to be shared with the receiver. Conflicts with `folder_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] file_system_name: The name of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] folder_path: The folder path in the data lake file system to be shared with the receiver. Conflicts with `file_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] name: The name which should be used for this Data Share Data Lake Gen2 Dataset. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] share_id: The resource ID of the Data Share where this Data Share Data Lake Gen2 Dataset should be created. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
:param pulumi.Input[str] storage_account_id: The resource id of the storage account of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DatasetDataLakeGen2State.__new__(_DatasetDataLakeGen2State)
__props__.__dict__["display_name"] = display_name
__props__.__dict__["file_path"] = file_path
__props__.__dict__["file_system_name"] = file_system_name
__props__.__dict__["folder_path"] = folder_path
__props__.__dict__["name"] = name
__props__.__dict__["share_id"] = share_id
__props__.__dict__["storage_account_id"] = storage_account_id
return DatasetDataLakeGen2(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
The name of the Data Share Dataset.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="filePath")
def file_path(self) -> pulumi.Output[Optional[str]]:
"""
The path of the file in the data lake file system to be shared with the receiver. Conflicts with `folder_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "file_path")
@property
@pulumi.getter(name="fileSystemName")
def file_system_name(self) -> pulumi.Output[str]:
"""
The name of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "file_system_name")
@property
@pulumi.getter(name="folderPath")
def folder_path(self) -> pulumi.Output[Optional[str]]:
"""
The folder path in the data lake file system to be shared with the receiver. Conflicts with `file_path` Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "folder_path")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name which should be used for this Data Share Data Lake Gen2 Dataset. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="shareId")
def share_id(self) -> pulumi.Output[str]:
"""
The resource ID of the Data Share where this Data Share Data Lake Gen2 Dataset should be created. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "share_id")
@property
@pulumi.getter(name="storageAccountId")
def storage_account_id(self) -> pulumi.Output[str]:
"""
The resource id of the storage account of the data lake file system to be shared with the receiver. Changing this forces a new Data Share Data Lake Gen2 Dataset to be created.
"""
return pulumi.get(self, "storage_account_id")
| 53.248016
| 242
| 0.682118
| 3,511
| 26,837
| 5.013956
| 0.061521
| 0.042718
| 0.068394
| 0.057941
| 0.896842
| 0.882584
| 0.872188
| 0.858668
| 0.855771
| 0.824074
| 0
| 0.008341
| 0.236129
| 26,837
| 503
| 243
| 53.353877
| 0.85039
| 0.499013
| 0
| 0.677291
| 1
| 0
| 0.104665
| 0.006521
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159363
| false
| 0.003984
| 0.01992
| 0
| 0.2749
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a15c2e7ccc904deef243667fd0f9632cd183ce42
| 17,312
|
py
|
Python
|
networkapi/api_network/tests/v3/sanity/networkipv4/sync/test_put.py
|
treviza153/GloboNetworkAPI
|
9ca8ba7febdef0eb716514196b1bda1afdccd851
|
[
"Apache-2.0"
] | 73
|
2015-04-13T17:56:11.000Z
|
2022-03-24T06:13:07.000Z
|
networkapi/api_network/tests/v3/sanity/networkipv4/sync/test_put.py
|
treviza153/GloboNetworkAPI
|
9ca8ba7febdef0eb716514196b1bda1afdccd851
|
[
"Apache-2.0"
] | 99
|
2015-04-03T01:04:46.000Z
|
2021-10-03T23:24:48.000Z
|
networkapi/api_network/tests/v3/sanity/networkipv4/sync/test_put.py
|
treviza153/GloboNetworkAPI
|
9ca8ba7febdef0eb716514196b1bda1afdccd851
|
[
"Apache-2.0"
] | 64
|
2015-08-05T21:26:29.000Z
|
2022-03-22T01:06:28.000Z
|
# -*- coding: utf-8 -*-
import json
from django.test.client import Client
from networkapi.test.test_case import NetworkApiTestCase
class NetworkIPv4PutSuccessTestCase(NetworkApiTestCase):
fixtures = [
'networkapi/system/fixtures/initial_variables.json',
'networkapi/usuario/fixtures/initial_usuario.json',
'networkapi/grupo/fixtures/initial_ugrupo.json',
'networkapi/usuario/fixtures/initial_usuariogrupo.json',
'networkapi/api_ogp/fixtures/initial_objecttype.json',
'networkapi/api_ogp/fixtures/initial_objectgrouppermissiongeneral.json',
'networkapi/grupo/fixtures/initial_permissions.json',
'networkapi/grupo/fixtures/initial_permissoes_administrativas.json',
'networkapi/vlan/fixtures/initial_tipo_rede.json',
'networkapi/filter/fixtures/initial_filter.json',
'networkapi/filterequiptype/fixtures/initial_filterequiptype.json',
'networkapi/equipamento/fixtures/initial_tipo_equip.json',
'networkapi/api_network/fixtures/sanity/initial_config_environment.json',
'networkapi/api_network/fixtures/sanity/initial_environment.json',
'networkapi/api_network/fixtures/sanity/initial_environment_dc.json',
'networkapi/api_network/fixtures/sanity/initial_environment_envlog.json',
'networkapi/api_network/fixtures/sanity/initial_environment_gl3.json',
'networkapi/api_network/fixtures/sanity/initial_ipconfig.json',
'networkapi/api_network/fixtures/sanity/initial_networkipv4.json',
'networkapi/api_network/fixtures/sanity/initial_vlan.json',
'networkapi/api_network/fixtures/sanity/initial_vrf.json',
'networkapi/api_network/fixtures/sanity/initial_ipv4.json',
'networkapi/api_network/fixtures/sanity/initial_vip_request_v4.json',
'networkapi/api_network/fixtures/sanity/initial_environment_vip.json',
'networkapi/api_network/fixtures/sanity/initial_env_env_vip.json',
'networkapi/api_network/fixtures/sanity/initial_equipments.json',
'networkapi/api_network/fixtures/sanity/initial_equipments_env.json',
'networkapi/api_network/fixtures/sanity/initial_equipments_group.json',
'networkapi/api_network/fixtures/sanity/initial_ipv4_eqpt.json',
'networkapi/api_network/fixtures/sanity/initial_roteiros.json',
'networkapi/api_network/fixtures/sanity/initial_equip_marca_model.json',
'networkapi/api_network/fixtures/sanity/initial_cidr.json',
]
json_path = 'api_network/tests/v3/sanity/networkipv4/json/%s'
def setUp(self):
self.client = Client()
self.authorization = self.get_http_authorization('test')
def tearDown(self):
pass
def test_try_update_inactive_netipv4(self):
"""Test of success to update inactive Network IPv4 changing cluster unit,
network type and environment vip.
"""
name_file = self.json_path % 'put/net_inactive.json'
# Does PUT request
response = self.client.put(
'/api/v3/networkv4/3/',
data=json.dumps(self.load_json_file(name_file)),
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(200, response.status_code)
get_url = '/api/v3/networkv4/3/?kind=basic&include=cluster_unit,active'
name_file = self.json_path % 'get/basic/net_inactive.json'
response = self.client.get(
get_url,
content_type='application/json',
HTTP_AUTHORIZATION=self.get_http_authorization('test'))
self.compare_status(200, response.status_code)
self.compare_json_lists(name_file, response.data['networks'])
def test_try_update_active_netipv4(self):
"""Test of success to update active Network IPv4 changing cluster unit,
network type and environment vip.
"""
name_file = self.json_path % 'put/net_active.json'
# Does PUT request
response = self.client.put(
'/api/v3/networkv4/1/',
data=json.dumps(self.load_json_file(name_file)),
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(200, response.status_code)
get_url = '/api/v3/networkv4/1/?kind=basic&include=cluster_unit,active'
response = self.client.get(
get_url,
content_type='application/json',
HTTP_AUTHORIZATION=self.get_http_authorization('test'))
self.compare_status(200, response.status_code)
name_file = self.json_path % 'get/basic/net_active.json'
self.compare_json_lists(name_file, response.data['networks'])
def test_try_update_inactive_netipv4_changing_octets(self):
"""Test of success to update inactive Network IPv4 changing octets.
Octets will not be changed.
"""
name_file_put = self.json_path % 'put/net_inactive_changing_octets.json'
# Does PUT request
response = self.client.put(
'/api/v3/networkv4/3/',
data=json.dumps(self.load_json_file(name_file_put)),
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(200, response.status_code)
get_url = '/api/v3/networkv4/3/?kind=basic&include=cluster_unit,active'
response = self.client.get(
get_url,
content_type='application/json',
HTTP_AUTHORIZATION=self.get_http_authorization('test'))
self.compare_status(200, response.status_code)
name_file = self.json_path % 'get/basic/net_inactive_changing_octets.json'
self.compare_json_lists(name_file, response.data['networks'])
def test_try_update_netipv4_ignore_change_active_flag_from_false_to_true(self):
"""Test of success to update NetworkIPv4 changing active flag from False
to True. Active flag cannot be changed.
"""
name_file_put = 'api_network/tests/v3/sanity/networkipv4/json/put/' \
'net_changing_active_from_false_to_true.json'
# Does PUT request
response = self.client.put(
'/api/v3/networkv4/3/',
data=json.dumps(self.load_json_file(name_file_put)),
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(200, response.status_code)
get_url = '/api/v3/networkv4/3/?kind=basic&include=cluster_unit,active'
response = self.client.get(
get_url,
content_type='application/json',
HTTP_AUTHORIZATION=self.get_http_authorization('test'))
self.compare_status(200, response.status_code)
name_file = self.json_path % 'get/basic/net_inactive.json'
self.compare_json_lists(name_file, response.data['networks'])
def test_try_update_netipv4_ignore_change_active_flag_from_true_to_false(self):
"""Test of success to update NetworkIPv4 changing active flag from True.
to False. Active flag cannot be changed.
"""
name_file_put = 'api_network/tests/v3/sanity/networkipv4/json/put/' \
'net_changing_active_from_true_to_false.json'
# Does PUT request
response = self.client.put(
'/api/v3/networkv4/1/',
data=json.dumps(self.load_json_file(name_file_put)),
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(200, response.status_code)
get_url = '/api/v3/networkv4/1/?kind=basic&include=cluster_unit,active'
response = self.client.get(
get_url,
content_type='application/json',
HTTP_AUTHORIZATION=self.get_http_authorization('test'))
self.compare_status(200, response.status_code)
name_file = self.json_path % 'get/basic/net_active.json'
self.compare_json_lists(name_file, response.data['networks'])
class NetworkIPv4PutErrorTestCase(NetworkApiTestCase):
fixtures = [
'networkapi/system/fixtures/initial_variables.json',
'networkapi/usuario/fixtures/initial_usuario.json',
'networkapi/grupo/fixtures/initial_ugrupo.json',
'networkapi/usuario/fixtures/initial_usuariogrupo.json',
'networkapi/api_ogp/fixtures/initial_objecttype.json',
'networkapi/api_ogp/fixtures/initial_objectgrouppermissiongeneral.json',
'networkapi/grupo/fixtures/initial_permissions.json',
'networkapi/grupo/fixtures/initial_permissoes_administrativas.json',
'networkapi/vlan/fixtures/initial_tipo_rede.json',
'networkapi/filter/fixtures/initial_filter.json',
'networkapi/filterequiptype/fixtures/initial_filterequiptype.json',
'networkapi/equipamento/fixtures/initial_tipo_equip.json',
'networkapi/api_network/fixtures/sanity/initial_config_environment.json',
'networkapi/api_network/fixtures/sanity/initial_environment.json',
'networkapi/api_network/fixtures/sanity/initial_environment_dc.json',
'networkapi/api_network/fixtures/sanity/initial_environment_envlog.json',
'networkapi/api_network/fixtures/sanity/initial_environment_gl3.json',
'networkapi/api_network/fixtures/sanity/initial_ipconfig.json',
'networkapi/api_network/fixtures/sanity/initial_networkipv4.json',
'networkapi/api_network/fixtures/sanity/initial_vlan.json',
'networkapi/api_network/fixtures/sanity/initial_vrf.json',
'networkapi/api_network/fixtures/sanity/initial_ipv4.json',
'networkapi/api_network/fixtures/sanity/initial_vip_request_v4.json',
'networkapi/api_network/fixtures/sanity/initial_environment_vip.json',
'networkapi/api_network/fixtures/sanity/initial_env_env_vip.json',
'networkapi/api_network/fixtures/sanity/initial_equipments.json',
'networkapi/api_network/fixtures/sanity/initial_equipments_env.json',
'networkapi/api_network/fixtures/sanity/initial_equipments_group.json',
'networkapi/api_network/fixtures/sanity/initial_ipv4_eqpt.json',
'networkapi/api_network/fixtures/sanity/initial_roteiros.json',
'networkapi/api_network/fixtures/sanity/initial_equip_marca_model.json'
]
json_path = 'api_network/tests/v3/sanity/networkipv4/json/%s'
def setUp(self):
self.client = Client()
self.authorization = self.get_http_authorization('test')
def tearDown(self):
pass
def test_try_update_nonexistent_netipv4(self):
"""Test of error to update inexistent Network IPv4."""
name_file = self.json_path % 'put/net_nonexistent.json'
# Does PUT request
response = self.client.put(
'/api/v3/networkv4/1000/',
data=json.dumps(self.load_json_file(name_file)),
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(404, response.status_code)
self.compare_values(
'There is no NetworkIPv4 with pk = 1000.', response.data['detail'])
def test_try_update_inactive_netipv4_changing_nettype_to_none(self):
"""Test of error to update inactive Network IPv4 changing network type
to None.
"""
name_file = self.json_path % 'put/net_inactive_changing_net_type.json'
# Does PUT request
response = self.client.put(
'/api/v3/networkv4/3/',
data=json.dumps(self.load_json_file(name_file)),
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(400, response.status_code)
self.compare_values(
['Wrong type'],
response.data['detail']['errors'][0]['error_reasons'])
get_url = '/api/v3/networkv4/3/?kind=basic&include=cluster_unit,active'
name_file = self.json_path % 'get/basic/net_inactive_changing_net_type.json'
response = self.client.get(
get_url,
content_type='application/json',
HTTP_AUTHORIZATION=self.get_http_authorization('test'))
self.compare_status(200, response.status_code)
self.compare_json_lists(name_file, response.data['networks'])
class NetworkIPv4ForcePutSuccessTestCase(NetworkApiTestCase):
fixtures = [
'networkapi/system/fixtures/initial_variables.json',
'networkapi/usuario/fixtures/initial_usuario.json',
'networkapi/grupo/fixtures/initial_ugrupo.json',
'networkapi/usuario/fixtures/initial_usuariogrupo.json',
'networkapi/api_ogp/fixtures/initial_objecttype.json',
'networkapi/api_ogp/fixtures/initial_objectgrouppermissiongeneral.json',
'networkapi/grupo/fixtures/initial_permissions.json',
'networkapi/grupo/fixtures/initial_permissoes_administrativas.json',
'networkapi/vlan/fixtures/initial_tipo_rede.json',
'networkapi/filter/fixtures/initial_filter.json',
'networkapi/filterequiptype/fixtures/initial_filterequiptype.json',
'networkapi/equipamento/fixtures/initial_tipo_equip.json',
'networkapi/api_network/fixtures/sanity/initial_config_environment.json',
'networkapi/api_network/fixtures/sanity/initial_environment.json',
'networkapi/api_network/fixtures/sanity/initial_environment_dc.json',
'networkapi/api_network/fixtures/sanity/initial_environment_envlog.json',
'networkapi/api_network/fixtures/sanity/initial_environment_gl3.json',
'networkapi/api_network/fixtures/sanity/initial_ipconfig.json',
'networkapi/api_network/fixtures/sanity/initial_networkipv4.json',
'networkapi/api_network/fixtures/sanity/initial_vlan.json',
'networkapi/api_network/fixtures/sanity/initial_vrf.json',
'networkapi/api_network/fixtures/sanity/initial_ipv4.json',
'networkapi/api_network/fixtures/sanity/initial_vip_request_v4.json',
'networkapi/api_network/fixtures/sanity/initial_environment_vip.json',
'networkapi/api_network/fixtures/sanity/initial_env_env_vip.json',
'networkapi/api_network/fixtures/sanity/initial_equipments.json',
'networkapi/api_network/fixtures/sanity/initial_equipments_env.json',
'networkapi/api_network/fixtures/sanity/initial_equipments_group.json',
'networkapi/api_network/fixtures/sanity/initial_ipv4_eqpt.json',
'networkapi/api_network/fixtures/sanity/initial_roteiros.json',
'networkapi/api_network/fixtures/sanity/initial_equip_marca_model.json',
'networkapi/api_network/fixtures/sanity/initial_cidr.json'
]
json_path = 'api_network/tests/v3/sanity/networkipv4/json/%s'
def setUp(self):
self.client = Client()
self.authorization = self.get_http_authorization('test_admin')
def tearDown(self):
pass
def test_try_update_netipv4_changing_active_flag_from_false_to_true(self):
"""Test of success of changing NetworkIPv4 active flag from false
to true without really deploy the Network.
"""
name_file_put = 'api_network/tests/v3/sanity/networkipv4/json/put/' \
'net_changing_active_from_false_to_true.json'
# Does PUT request
response = self.client.put(
'/api/v3/networkv4/force/3/',
data=json.dumps(self.load_json_file(name_file_put)),
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(200, response.status_code)
get_url = '/api/v3/networkv4/3/?kind=basic&include=cluster_unit,active'
response = self.client.get(
get_url,
content_type='application/json',
HTTP_AUTHORIZATION=self.get_http_authorization('test'))
self.compare_status(200, response.status_code)
name_file = self.json_path % 'get/basic/net_changed_active_from_false_to_true.json'
self.compare_json_lists(name_file, response.data['networks'])
def test_try_update_netipv4_changing_active_flag_from_true_to_false(self):
"""Test of success of changing NetworkIPv4 active flag from true
to false without really undeploy the Network.
"""
name_file_put = 'api_network/tests/v3/sanity/networkipv4/json/put/' \
'net_changing_active_from_true_to_false.json'
# Does PUT request
response = self.client.put(
'/api/v3/networkv4/force/1/',
data=json.dumps(self.load_json_file(name_file_put)),
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(200, response.status_code)
get_url = '/api/v3/networkv4/1/?kind=basic&include=cluster_unit,active'
response = self.client.get(
get_url,
content_type='application/json',
HTTP_AUTHORIZATION=self.get_http_authorization('test'))
self.compare_status(200, response.status_code)
name_file = self.json_path % 'get/basic/net_changed_active_from_true_to_false.json'
self.compare_json_lists(name_file, response.data['networks'])
| 42.957816
| 91
| 0.701941
| 2,019
| 17,312
| 5.743437
| 0.072313
| 0.111073
| 0.095291
| 0.122111
| 0.959555
| 0.956795
| 0.938772
| 0.922301
| 0.91704
| 0.913677
| 0
| 0.011627
| 0.195183
| 17,312
| 402
| 92
| 43.064677
| 0.820642
| 0.059901
| 0
| 0.855556
| 0
| 0
| 0.481943
| 0.444271
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0.011111
| 0.011111
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a193e96bb4198b60322ca46c6e10fb184a154ef6
| 18,878
|
py
|
Python
|
discovery-provider/alembic/versions/6b5186e7d28f_replace_aggregate_plays_mat_view_with_.py
|
Tenderize/audius-protocol
|
aa15844e3f12812fe8aaa81e2cb6e5c5fa89ff51
|
[
"Apache-2.0"
] | 1
|
2022-03-27T21:40:36.000Z
|
2022-03-27T21:40:36.000Z
|
discovery-provider/alembic/versions/6b5186e7d28f_replace_aggregate_plays_mat_view_with_.py
|
Tenderize/audius-protocol
|
aa15844e3f12812fe8aaa81e2cb6e5c5fa89ff51
|
[
"Apache-2.0"
] | null | null | null |
discovery-provider/alembic/versions/6b5186e7d28f_replace_aggregate_plays_mat_view_with_.py
|
Tenderize/audius-protocol
|
aa15844e3f12812fe8aaa81e2cb6e5c5fa89ff51
|
[
"Apache-2.0"
] | null | null | null |
"""Replace aggregate_plays mat view with table
Revision ID: 6b5186e7d28f
Revises: 36eac5ed00bf
Create Date: 2021-11-19 20:54:17.596441
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '6b5186e7d28f'
down_revision = '36eac5ed00bf'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
query = """
DO
$do$
BEGIN
-- only run migration if the aggregate_plays table does not already exist
-- trying to drop the mat view aggregate_plays will fail if the table aggregate_plays exists
-- also don't want to risk losing data
IF NOT EXISTS (
SELECT FROM information_schema.tables
WHERE table_name = 'aggregate_plays'
) THEN
-- update indexing checkpoints based on current plays
WITH latest_play_id AS (
SELECT MAX(id) AS id FROM plays
)
INSERT INTO indexing_checkpoints (tablename, last_checkpoint)
VALUES(
'aggregate_plays',
(COALESCE((SELECT id FROM latest_play_id), 0))
)
ON CONFLICT (tablename)
DO UPDATE SET last_checkpoint = EXCLUDED.last_checkpoint;
-- create aggregate_plays table
DROP TABLE IF EXISTS aggregate_plays_table;
CREATE TABLE aggregate_plays_table AS (
SELECT
plays.play_item_id as play_item_id,
count(*) as count
FROM
plays
WHERE plays.id <= (
SELECT last_checkpoint
FROM indexing_checkpoints
WHERE tablename = 'aggregate_plays'
)
GROUP BY plays.play_item_id
);
-- drop existing aggregate_plays mat view and its dependencies
DROP MATERIALIZED VIEW IF EXISTS trending_params;
DROP MATERIALIZED VIEW IF EXISTS aggregate_plays; -- will complain if aggregate_plays table exists
-- rename table to replace mat view
ALTER TABLE aggregate_plays_table RENAME TO aggregate_plays;
CREATE UNIQUE INDEX IF NOT EXISTS play_item_id_idx ON aggregate_plays (play_item_id);
-- recreate mat views
-- trending_params from 92571f94989a
CREATE MATERIALIZED VIEW trending_params as
SELECT
t.track_id as track_id,
t.genre as genre,
t.owner_id as owner_id,
ap.play_count as play_count,
au.follower_count as owner_follower_count,
COALESCE (aggregate_track.repost_count, 0) as repost_count,
COALESCE (aggregate_track.save_count, 0) as save_count,
COALESCE (repost_week.repost_count, 0) as repost_week_count,
COALESCE (repost_month.repost_count, 0) as repost_month_count,
COALESCE (repost_year.repost_count, 0) as repost_year_count,
COALESCE (save_week.repost_count, 0) as save_week_count,
COALESCE (save_month.repost_count, 0) as save_month_count,
COALESCE (save_year.repost_count, 0) as save_year_count,
COALESCE (karma.karma, 0) as karma
FROM
tracks t
-- join on subquery for aggregate play count
LEFT OUTER JOIN (
SELECT
ap.count as play_count,
ap.play_item_id as play_item_id
FROM
aggregate_plays ap
) as ap ON ap.play_item_id = t.track_id
-- join on subquery for aggregate user
LEFT OUTER JOIN (
SELECT
au.user_id as user_id,
au.follower_count as follower_count
FROM
aggregate_user au
) as au ON au.user_id = t.owner_id
-- join on subquery for aggregate track
LEFT OUTER JOIN (
SELECT
aggregate_track.track_id as track_id,
aggregate_track.repost_count as repost_count,
aggregate_track.save_count as save_count
FROM
aggregate_track
) as aggregate_track ON aggregate_track.track_id = t.track_id
-- -- join on subquery for reposts by year
LEFT OUTER JOIN (
SELECT
r.repost_item_id as track_id,
count(r.repost_item_id) as repost_count
FROM
reposts r
WHERE
r.is_current is True AND
r.repost_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 year')
GROUP BY r.repost_item_id
) repost_year ON repost_year.track_id = t.track_id
-- -- join on subquery for reposts by month
LEFT OUTER JOIN (
SELECT
r.repost_item_id as track_id,
count(r.repost_item_id) as repost_count
FROM
reposts r
WHERE
r.is_current is True AND
r.repost_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 month')
GROUP BY r.repost_item_id
) repost_month ON repost_month.track_id = t.track_id
-- -- join on subquery for reposts by week
LEFT OUTER JOIN (
SELECT
r.repost_item_id as track_id,
count(r.repost_item_id) as repost_count
FROM
reposts r
WHERE
r.is_current is True AND
r.repost_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 week')
GROUP BY r.repost_item_id
) repost_week ON repost_week.track_id = t.track_id
-- -- join on subquery for saves by year
LEFT OUTER JOIN (
SELECT
r.save_item_id as track_id,
count(r.save_item_id) as repost_count
FROM
saves r
WHERE
r.is_current is True AND
r.save_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 year')
GROUP BY r.save_item_id
) save_year ON save_year.track_id = t.track_id
-- -- join on subquery for saves by month
LEFT OUTER JOIN (
SELECT
r.save_item_id as track_id,
count(r.save_item_id) as repost_count
FROM
saves r
WHERE
r.is_current is True AND
r.save_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 month')
GROUP BY r.save_item_id
) save_month ON save_month.track_id = t.track_id
-- -- join on subquery for saves by week
LEFT OUTER JOIN (
SELECT
r.save_item_id as track_id,
count(r.save_item_id) as repost_count
FROM
saves r
WHERE
r.is_current is True AND
r.save_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 week')
GROUP BY r.save_item_id
) save_week ON save_week.track_id = t.track_id
LEFT OUTER JOIN (
SELECT
save_and_reposts.item_id as track_id,
sum(au.follower_count) as karma
FROM
(
select
r_and_s.user_id,
r_and_s.item_id
from
(select
user_id,
repost_item_id as item_id
from
reposts
where
is_delete is false AND
is_current is true AND
repost_type = 'track'
union all
select
user_id,
save_item_id as item_id
from
saves
where
is_delete is false AND
is_current is true AND
save_type = 'track'
) r_and_s
join
users
on r_and_s.user_id = users.user_id
where
users.cover_photo is not null AND
users.profile_picture is not null AND
users.bio is not null
) save_and_reposts
JOIN
aggregate_user au
ON
save_and_reposts.user_id = au.user_id
GROUP BY save_and_reposts.item_id
) karma ON karma.track_id = t.track_id
WHERE
t.is_current is True AND
t.is_delete is False AND
t.is_unlisted is False AND
t.stem_of is Null;
CREATE INDEX trending_params_track_id_idx ON trending_params (track_id);
END IF;
END
$do$
"""
conn.execute(query)
def downgrade():
conn = op.get_bind()
query = """
begin;
-- drop dependent mat views and aggregate_plays table
DROP MATERIALIZED VIEW IF EXISTS trending_params;
DROP TABLE IF EXISTS aggregate_plays;
--- ======================= AGGREGATE PLAYS ======================= from version 776ca72b16db
DROP MATERIALIZED VIEW IF EXISTS aggregate_plays;
DROP INDEX IF EXISTS play_item_id_idx;
CREATE MATERIALIZED VIEW aggregate_plays as
SELECT
plays.play_item_id as play_item_id,
count(*) as count
FROM
plays
GROUP BY plays.play_item_id;
-- add index on above materialized view
CREATE INDEX play_item_id_idx ON aggregate_plays (play_item_id);
-- trending_params from 92571f94989a
CREATE MATERIALIZED VIEW trending_params as
SELECT
t.track_id as track_id,
t.genre as genre,
t.owner_id as owner_id,
ap.play_count as play_count,
au.follower_count as owner_follower_count,
COALESCE (aggregate_track.repost_count, 0) as repost_count,
COALESCE (aggregate_track.save_count, 0) as save_count,
COALESCE (repost_week.repost_count, 0) as repost_week_count,
COALESCE (repost_month.repost_count, 0) as repost_month_count,
COALESCE (repost_year.repost_count, 0) as repost_year_count,
COALESCE (save_week.repost_count, 0) as save_week_count,
COALESCE (save_month.repost_count, 0) as save_month_count,
COALESCE (save_year.repost_count, 0) as save_year_count,
COALESCE (karma.karma, 0) as karma
FROM
tracks t
-- join on subquery for aggregate play count
LEFT OUTER JOIN (
SELECT
ap.count as play_count,
ap.play_item_id as play_item_id
FROM
aggregate_plays ap
) as ap ON ap.play_item_id = t.track_id
-- join on subquery for aggregate user
LEFT OUTER JOIN (
SELECT
au.user_id as user_id,
au.follower_count as follower_count
FROM
aggregate_user au
) as au ON au.user_id = t.owner_id
-- join on subquery for aggregate track
LEFT OUTER JOIN (
SELECT
aggregate_track.track_id as track_id,
aggregate_track.repost_count as repost_count,
aggregate_track.save_count as save_count
FROM
aggregate_track
) as aggregate_track ON aggregate_track.track_id = t.track_id
-- -- join on subquery for reposts by year
LEFT OUTER JOIN (
SELECT
r.repost_item_id as track_id,
count(r.repost_item_id) as repost_count
FROM
reposts r
WHERE
r.is_current is True AND
r.repost_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 year')
GROUP BY r.repost_item_id
) repost_year ON repost_year.track_id = t.track_id
-- -- join on subquery for reposts by month
LEFT OUTER JOIN (
SELECT
r.repost_item_id as track_id,
count(r.repost_item_id) as repost_count
FROM
reposts r
WHERE
r.is_current is True AND
r.repost_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 month')
GROUP BY r.repost_item_id
) repost_month ON repost_month.track_id = t.track_id
-- -- join on subquery for reposts by week
LEFT OUTER JOIN (
SELECT
r.repost_item_id as track_id,
count(r.repost_item_id) as repost_count
FROM
reposts r
WHERE
r.is_current is True AND
r.repost_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 week')
GROUP BY r.repost_item_id
) repost_week ON repost_week.track_id = t.track_id
-- -- join on subquery for saves by year
LEFT OUTER JOIN (
SELECT
r.save_item_id as track_id,
count(r.save_item_id) as repost_count
FROM
saves r
WHERE
r.is_current is True AND
r.save_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 year')
GROUP BY r.save_item_id
) save_year ON save_year.track_id = t.track_id
-- -- join on subquery for saves by month
LEFT OUTER JOIN (
SELECT
r.save_item_id as track_id,
count(r.save_item_id) as repost_count
FROM
saves r
WHERE
r.is_current is True AND
r.save_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 month')
GROUP BY r.save_item_id
) save_month ON save_month.track_id = t.track_id
-- -- join on subquery for saves by week
LEFT OUTER JOIN (
SELECT
r.save_item_id as track_id,
count(r.save_item_id) as repost_count
FROM
saves r
WHERE
r.is_current is True AND
r.save_type = 'track' AND
r.is_delete is False AND
r.created_at > (now() - interval '1 week')
GROUP BY r.save_item_id
) save_week ON save_week.track_id = t.track_id
LEFT OUTER JOIN (
SELECT
save_and_reposts.item_id as track_id,
sum(au.follower_count) as karma
FROM
(
select
r_and_s.user_id,
r_and_s.item_id
from
(select
user_id,
repost_item_id as item_id
from
reposts
where
is_delete is false AND
is_current is true AND
repost_type = 'track'
union all
select
user_id,
save_item_id as item_id
from
saves
where
is_delete is false AND
is_current is true AND
save_type = 'track'
) r_and_s
join
users
on r_and_s.user_id = users.user_id
where
users.cover_photo is not null AND
users.profile_picture is not null AND
users.bio is not null
) save_and_reposts
JOIN
aggregate_plays au
ON
save_and_reposts.user_id = au.user_id
GROUP BY save_and_reposts.item_id
) karma ON karma.track_id = t.track_id
WHERE
t.is_current is True AND
t.is_delete is False AND
t.is_unlisted is False AND
t.stem_of is Null;
CREATE INDEX trending_params_track_id_idx ON trending_params (track_id);
commit;
"""
conn.execute(query)
| 40.51073
| 114
| 0.471448
| 2,065
| 18,878
| 4.061985
| 0.079903
| 0.047926
| 0.032427
| 0.045303
| 0.841202
| 0.833929
| 0.827015
| 0.811636
| 0.800668
| 0.800668
| 0
| 0.010691
| 0.484691
| 18,878
| 465
| 115
| 40.597849
| 0.851563
| 0.009111
| 0
| 0.864055
| 0
| 0
| 0.985453
| 0.075837
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004608
| false
| 0
| 0.002304
| 0
| 0.006912
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1b20ae37f1f0b68baa57075525d760225dacef3
| 8,905
|
py
|
Python
|
api/resources.py
|
annevandalfsen/screenbird
|
38b70302be3b3dc0c74b6aae8e09666115592aef
|
[
"MIT",
"Unlicense"
] | 121
|
2015-01-01T23:31:36.000Z
|
2021-05-27T04:24:44.000Z
|
api/resources.py
|
annevandalfsen/screenbird
|
38b70302be3b3dc0c74b6aae8e09666115592aef
|
[
"MIT",
"Unlicense"
] | 1
|
2021-06-10T23:44:37.000Z
|
2021-06-10T23:44:37.000Z
|
api/resources.py
|
annevandalfsen/screenbird
|
38b70302be3b3dc0c74b6aae8e09666115592aef
|
[
"MIT",
"Unlicense"
] | 31
|
2015-01-13T00:23:33.000Z
|
2017-05-13T21:50:29.000Z
|
from django.core.urlresolvers import reverse
from djangorestframework.compat import View
from djangorestframework.mixins import ResponseMixin
from djangorestframework.renderers import JSONRenderer
from django.contrib.auth.models import User
from djangorestframework.resources import ModelResource
from djangorestframework.response import Response
from accounts.models import UserProfile
from videos.models import Video, Channel
def is_allowed(user):
"""
"""
return (user.userprofile.is_paid) or (user.userprofile.is_using_trial) or (user.userprofile.api_key)
class VideoResource(ResponseMixin, View):
"""
Returns all videos under the account of the api_key provided.
The format of the response is in JSON.
"""
renderers = [JSONRenderer,]
csrf_exempt = True
def get(self, request):
key = request.GET.get('api_key', None)
account_id = -1
if key:
userprofile = None
try:
userprofile = UserProfile.objects.get(api_key=key)
account_id = userprofile.user_id
except:
pass
if userprofile:
user = None
try:
user = User.objects.get(pk=account_id)
except:
pass
if user:
if is_allowed(user):
videos = Video.objects.filter(uploader__id=account_id)
json_videos = []
for video in videos:
channel_name = None
if video.channel:
channel_name = video.channel.name
json_videos.append(
{
'id':video.id,
'channel':channel_name,
'url':video.get_absolute_url(),
'title':video.title,
'embed_code':video.get_embed_code()
}
)
response = Response(200, {'success':True,'videos':json_videos} )
else:
response = Response(401)
else:
response = Response(401)
else:
response = Response(401)
else:
response = Response(400)
return self.render(response)
class ChannelVideoResource(ResponseMixin, View):
"""
Returns all videos under the channel of an account of the api_key provided.
The format of the response is in JSON.
"""
renderers = [JSONRenderer,]
csrf_exempt = True
def get(self, request):
channel_link = request.GET.get('channel_link', None)
key = request.GET.get('api_key', None)
account_id = -1
if key and channel_link:
userprofile = None
try:
userprofile = UserProfile.objects.get(api_key=key)
account_id = userprofile.user_id
except:
pass
channel = None
try:
channel = Channel.objects.get(api_link=channel_link)
except:
pass
if channel:
if (channel.owner.id == account_id) and is_allowed(channel.owner):
videos = Video.objects.filter(channel=channel)
json_videos = []
for video in videos:
channel_name = None
if video.channel:
channel_name = video.channel.name
json_videos.append(
{
'id':video.id,
'channel':channel_name,
'url':video.get_absolute_url(),
'title':video.title,
'embed_code':video.get_embed_code()
}
)
response = Response(200, {'success':True,'videos':json_videos} )
else:
response = Response(401)
else:
response = Response(401)
else:
response = Response(401)
return self.render(response)
class LatestVideoResource(ResponseMixin, View):
"""
Returns the latest video under the account of the api_key provided.
The format of the response is in JSON.
"""
renderers = [JSONRenderer,]
csrf_exempt = True
def get(self, request):
key = request.GET.get('api_key', None)
account_id = -1
if key:
userprofile = None
try:
userprofile = UserProfile.objects.get(api_key=key)
account_id = userprofile.user_id
except:
pass
if userprofile:
user = None
try:
user = User.objects.get(pk=account_id)
except:
pass
if user:
if is_allowed(user):
videos = Video.objects.filter(uploader__id=account_id).order_by('-created')[:1:]
json_videos = []
for video in videos:
channel_name = None
if video.channel:
channel_name = video.channel.name
json_videos.append(
{
'id':video.id,
'channel':channel_name,
'url':video.get_absolute_url(),
'title':video.title,
'embed_code':video.get_embed_code()
}
)
response = Response(200, {'success':True,'videos':json_videos} )
else:
response = Response(401)
else:
response = Response(401)
else:
response = Response(401)
else:
response = Response(400)
return self.render(response)
class LatestChannelVideoResource(ResponseMixin, View):
renderers = [JSONRenderer,]
csrf_exempt = True
def get(self, request):
key = request.GET.get('api_key', None)
channel_link = request.GET.get('channel_link', None)
account_id = -1
if key and channel_link:
userprofile = None
try:
userprofile = UserProfile.objects.get(api_key=key)
account_id = userprofile.user_id
except:
pass
channel = None
try:
channel = Channel.objects.get(api_link=channel_link)
except:
pass
if channel:
if (channel.owner.id == account_id) and is_allowed(channel.owner):
videos = Video.objects.filter(channel=channel).order_by('-created')[:1:]
json_videos = []
for video in videos:
channel_name = None
if video.channel:
channel_name = video.channel.name
json_videos.append(
{
'id':video.id,
'channel':channel_name,
'url':video.get_absolute_url(),
'title':video.title,
'embed_code':video.get_embed_code()
}
)
response = Response(200, {'success':True,'videos':json_videos} )
else:
response = Response(401)
else:
response = Response(401)
else:
response = Response(401)
return self.render(response)
| 39.229075
| 104
| 0.428636
| 712
| 8,905
| 5.217697
| 0.125
| 0.077524
| 0.07537
| 0.074293
| 0.832571
| 0.831225
| 0.831225
| 0.811306
| 0.791386
| 0.791386
| 0
| 0.013541
| 0.502414
| 8,905
| 226
| 105
| 39.402655
| 0.82487
| 0.036159
| 0
| 0.837696
| 0
| 0
| 0.026779
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026178
| false
| 0.041885
| 0.04712
| 0
| 0.162304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a1d654cdbf9f078019f0d284acb1af615b604380
| 1,239
|
py
|
Python
|
tests/test_core_parser_base.py
|
ejoerns/flamingo
|
1d61c99c9ad34dd0a2a652f80783226051e07238
|
[
"Apache-2.0"
] | null | null | null |
tests/test_core_parser_base.py
|
ejoerns/flamingo
|
1d61c99c9ad34dd0a2a652f80783226051e07238
|
[
"Apache-2.0"
] | null | null | null |
tests/test_core_parser_base.py
|
ejoerns/flamingo
|
1d61c99c9ad34dd0a2a652f80783226051e07238
|
[
"Apache-2.0"
] | null | null | null |
def test_base_parser_import():
from flamingo.core.parser import ContentParser # NOQA
def test_meta_data_parsing():
from io import StringIO
from flamingo.core.parser import ContentParser
from flamingo.core.data_model import Content
raw_content = StringIO("""
a: content of a
b: content of b
c:
a, b, c
content
""")
parser = ContentParser()
content = Content()
parser.parse(raw_content, content)
assert sorted(list(content.data.keys())) == ['a', 'b', 'c', 'content_body']
assert content['a'] == 'content of a'
assert content['b'] == 'content of b'
assert content['c'].strip() == 'a, b, c'
assert content['content_body'] == 'content'
# test with whitespaces
raw_content = StringIO("""
a: content of a
b: content of b
c:
a, b, c
content
""") # NOQA
parser = ContentParser()
content = Content()
parser.parse(raw_content, content)
assert sorted(list(content.data.keys())) == ['a', 'b', 'c', 'content_body']
assert content['a'] == 'content of a'
assert content['b'] == 'content of b'
assert content['c'].strip() == 'a, b, c'
assert content['content_body'] == 'content'
| 21.736842
| 79
| 0.599677
| 158
| 1,239
| 4.607595
| 0.196203
| 0.098901
| 0.024725
| 0.06044
| 0.821429
| 0.821429
| 0.708791
| 0.708791
| 0.708791
| 0.708791
| 0
| 0
| 0.252623
| 1,239
| 56
| 80
| 22.125
| 0.786177
| 0.02502
| 0
| 0.888889
| 0
| 0
| 0.255814
| 0
| 0
| 0
| 0
| 0
| 0.277778
| 1
| 0.055556
| false
| 0
| 0.138889
| 0
| 0.194444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1f58e5f63b2bbee8b0c6e6daf3639345ffeee47
| 167
|
py
|
Python
|
python/testData/surround/CustomFoldingRegionSeveralMethods_after.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | null | null | null |
python/testData/surround/CustomFoldingRegionSeveralMethods_after.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | null | null | null |
python/testData/surround/CustomFoldingRegionSeveralMethods_after.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | 1
|
2020-11-27T10:36:50.000Z
|
2020-11-27T10:36:50.000Z
|
class C:
def m1(self):
pass
# <editor-fold desc="Description">
def m2(self):
pass
def m3(self):
pass
# </editor-fold>
| 15.181818
| 38
| 0.479042
| 20
| 167
| 4
| 0.6
| 0.3
| 0.35
| 0.45
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.389222
| 167
| 11
| 39
| 15.181818
| 0.754902
| 0.281437
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.428571
| 0
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
62983c086dd7e849d4f05d580b280f039ed2b429
| 18,686
|
py
|
Python
|
tests/test_decompressor_stream_reader.py
|
odidev/python-zstandard
|
477776e6019478ca1c0b5777b073afbec70975f5
|
[
"BSD-3-Clause"
] | 316
|
2016-09-04T19:00:27.000Z
|
2022-03-30T09:52:10.000Z
|
tests/test_decompressor_stream_reader.py
|
odidev/python-zstandard
|
477776e6019478ca1c0b5777b073afbec70975f5
|
[
"BSD-3-Clause"
] | 154
|
2016-09-30T03:12:16.000Z
|
2022-03-02T21:08:55.000Z
|
tests/test_decompressor_stream_reader.py
|
odidev/python-zstandard
|
477776e6019478ca1c0b5777b073afbec70975f5
|
[
"BSD-3-Clause"
] | 67
|
2016-11-04T13:03:20.000Z
|
2022-03-28T22:19:51.000Z
|
import io
import os
import unittest
import zstandard as zstd
from .common import (
CustomBytesIO,
)
class TestDecompressor_stream_reader(unittest.TestCase):
def test_context_manager(self):
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(b"foo") as reader:
with self.assertRaisesRegex(
ValueError, "cannot __enter__ multiple times"
):
with reader as reader2:
pass
def test_not_implemented(self):
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(b"foo") as reader:
with self.assertRaises(io.UnsupportedOperation):
reader.readline()
with self.assertRaises(io.UnsupportedOperation):
reader.readlines()
with self.assertRaises(io.UnsupportedOperation):
iter(reader)
with self.assertRaises(io.UnsupportedOperation):
next(reader)
with self.assertRaises(io.UnsupportedOperation):
reader.write(b"foo")
with self.assertRaises(io.UnsupportedOperation):
reader.writelines([])
def test_constant_methods(self):
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(b"foo") as reader:
self.assertFalse(reader.closed)
self.assertTrue(reader.readable())
self.assertFalse(reader.writable())
self.assertFalse(reader.seekable())
self.assertFalse(reader.isatty())
self.assertFalse(reader.closed)
self.assertIsNone(reader.flush())
self.assertFalse(reader.closed)
self.assertTrue(reader.closed)
def test_read_closed(self):
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(b"foo") as reader:
reader.close()
self.assertTrue(reader.closed)
with self.assertRaisesRegex(ValueError, "stream is closed"):
reader.read(1)
def test_read_sizes(self):
cctx = zstd.ZstdCompressor()
foo = cctx.compress(b"foo")
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(foo) as reader:
with self.assertRaisesRegex(
ValueError, "cannot read negative amounts less than -1"
):
reader.read(-2)
self.assertEqual(reader.read(0), b"")
self.assertEqual(reader.read(), b"foo")
def test_read_buffer(self):
cctx = zstd.ZstdCompressor()
source = b"".join([b"foo" * 60, b"bar" * 60, b"baz" * 60])
frame = cctx.compress(source)
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(frame) as reader:
self.assertEqual(reader.tell(), 0)
# We should get entire frame in one read.
result = reader.read(8192)
self.assertEqual(result, source)
self.assertEqual(reader.tell(), len(source))
# Read after EOF should return empty bytes.
self.assertEqual(reader.read(1), b"")
self.assertEqual(reader.tell(), len(result))
self.assertTrue(reader.closed)
def test_read_buffer_small_chunks(self):
cctx = zstd.ZstdCompressor()
source = b"".join([b"foo" * 60, b"bar" * 60, b"baz" * 60])
frame = cctx.compress(source)
dctx = zstd.ZstdDecompressor()
chunks = []
with dctx.stream_reader(frame, read_size=1) as reader:
while True:
chunk = reader.read(1)
if not chunk:
break
chunks.append(chunk)
self.assertEqual(reader.tell(), sum(map(len, chunks)))
self.assertEqual(b"".join(chunks), source)
def test_read_stream(self):
cctx = zstd.ZstdCompressor()
source = b"".join([b"foo" * 60, b"bar" * 60, b"baz" * 60])
frame = cctx.compress(source)
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(io.BytesIO(frame)) as reader:
self.assertEqual(reader.tell(), 0)
chunk = reader.read(8192)
self.assertEqual(chunk, source)
self.assertEqual(reader.tell(), len(source))
self.assertEqual(reader.read(1), b"")
self.assertEqual(reader.tell(), len(source))
self.assertFalse(reader.closed)
self.assertTrue(reader.closed)
def test_read_stream_small_chunks(self):
cctx = zstd.ZstdCompressor()
source = b"".join([b"foo" * 60, b"bar" * 60, b"baz" * 60])
frame = cctx.compress(source)
dctx = zstd.ZstdDecompressor()
chunks = []
with dctx.stream_reader(io.BytesIO(frame), read_size=1) as reader:
while True:
chunk = reader.read(1)
if not chunk:
break
chunks.append(chunk)
self.assertEqual(reader.tell(), sum(map(len, chunks)))
self.assertEqual(b"".join(chunks), source)
def test_close(self):
foo = zstd.ZstdCompressor().compress(b"foo" * 1024)
buffer = io.BytesIO(foo)
dctx = zstd.ZstdDecompressor()
reader = dctx.stream_reader(buffer)
reader.read(3)
self.assertFalse(reader.closed)
self.assertFalse(buffer.closed)
reader.close()
self.assertTrue(reader.closed)
self.assertTrue(buffer.closed)
with self.assertRaisesRegex(ValueError, "stream is closed"):
reader.read()
with self.assertRaisesRegex(ValueError, "stream is closed"):
with reader:
pass
# Context manager exit should not close stream.
buffer = io.BytesIO(foo)
reader = dctx.stream_reader(buffer)
with reader:
reader.read(3)
self.assertTrue(reader.closed)
self.assertTrue(buffer.closed)
# Context manager exit should close stream if an exception raised.
buffer = io.BytesIO(foo)
reader = dctx.stream_reader(buffer)
with self.assertRaisesRegex(Exception, "ignore"):
with reader:
reader.read(3)
raise Exception("ignore")
self.assertTrue(reader.closed)
self.assertTrue(buffer.closed)
# Test with non-file source variant.
with dctx.stream_reader(foo) as reader:
reader.read(3)
self.assertFalse(reader.closed)
self.assertTrue(reader.closed)
def test_close_closefd_false(self):
foo = zstd.ZstdCompressor().compress(b"foo" * 1024)
buffer = io.BytesIO(foo)
dctx = zstd.ZstdDecompressor()
reader = dctx.stream_reader(buffer, closefd=False)
reader.read(3)
self.assertFalse(reader.closed)
self.assertFalse(buffer.closed)
reader.close()
self.assertTrue(reader.closed)
self.assertFalse(buffer.closed)
with self.assertRaisesRegex(ValueError, "stream is closed"):
reader.read()
with self.assertRaisesRegex(ValueError, "stream is closed"):
with reader:
pass
# Context manager exit should not close stream.
buffer = io.BytesIO(foo)
reader = dctx.stream_reader(buffer, closefd=False)
with reader:
reader.read(3)
self.assertTrue(reader.closed)
self.assertFalse(buffer.closed)
# Context manager exit should close stream if an exception raised.
buffer = io.BytesIO(foo)
reader = dctx.stream_reader(buffer, closefd=False)
with self.assertRaisesRegex(Exception, "ignore"):
with reader:
reader.read(3)
raise Exception("ignore")
self.assertTrue(reader.closed)
self.assertFalse(buffer.closed)
# Test with non-file source variant.
with dctx.stream_reader(foo, closefd=False) as reader:
reader.read(3)
self.assertFalse(reader.closed)
self.assertTrue(reader.closed)
def test_read_after_exit(self):
cctx = zstd.ZstdCompressor()
frame = cctx.compress(b"foo" * 60)
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(frame) as reader:
while reader.read(16):
pass
self.assertTrue(reader.closed)
with self.assertRaisesRegex(ValueError, "stream is closed"):
reader.read(10)
def test_illegal_seeks(self):
cctx = zstd.ZstdCompressor()
frame = cctx.compress(b"foo" * 60)
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(frame) as reader:
with self.assertRaisesRegex(
OSError, "cannot seek to negative position"
):
reader.seek(-1, os.SEEK_SET)
reader.read(1)
with self.assertRaisesRegex(
OSError, "cannot seek zstd decompression stream backwards"
):
reader.seek(0, os.SEEK_SET)
with self.assertRaisesRegex(
OSError, "cannot seek zstd decompression stream backwards"
):
reader.seek(-1, os.SEEK_CUR)
with self.assertRaisesRegex(
OSError,
"zstd decompression streams cannot be seeked with SEEK_END",
):
reader.seek(0, os.SEEK_END)
reader.close()
with self.assertRaisesRegex(ValueError, "stream is closed"):
reader.seek(4, os.SEEK_SET)
with self.assertRaisesRegex(ValueError, "stream is closed"):
reader.seek(0)
def test_seek(self):
source = b"foobar" * 60
cctx = zstd.ZstdCompressor()
frame = cctx.compress(source)
dctx = zstd.ZstdDecompressor()
with dctx.stream_reader(frame) as reader:
reader.seek(3)
self.assertEqual(reader.read(3), b"bar")
reader.seek(4, os.SEEK_CUR)
self.assertEqual(reader.read(2), b"ar")
def test_no_context_manager(self):
source = b"foobar" * 60
cctx = zstd.ZstdCompressor()
frame = cctx.compress(source)
dctx = zstd.ZstdDecompressor()
reader = dctx.stream_reader(frame)
self.assertEqual(reader.read(6), b"foobar")
self.assertEqual(reader.read(18), b"foobar" * 3)
self.assertFalse(reader.closed)
# Calling close prevents subsequent use.
reader.close()
self.assertTrue(reader.closed)
with self.assertRaisesRegex(ValueError, "stream is closed"):
reader.read(6)
def test_read_after_error(self):
source = io.BytesIO(b"")
dctx = zstd.ZstdDecompressor()
reader = dctx.stream_reader(source)
with reader:
reader.read(0)
with self.assertRaisesRegex(ValueError, "stream is closed"):
with reader:
pass
def test_partial_read(self):
# Inspired by https://github.com/indygreg/python-zstandard/issues/71.
buffer = io.BytesIO()
cctx = zstd.ZstdCompressor()
writer = cctx.stream_writer(buffer)
writer.write(bytearray(os.urandom(1000000)))
writer.flush(zstd.FLUSH_FRAME)
buffer.seek(0)
dctx = zstd.ZstdDecompressor()
reader = dctx.stream_reader(buffer)
while True:
chunk = reader.read(8192)
if not chunk:
break
def test_read_multiple_frames(self):
cctx = zstd.ZstdCompressor()
source = io.BytesIO()
writer = cctx.stream_writer(source)
writer.write(b"foo")
writer.flush(zstd.FLUSH_FRAME)
writer.write(b"bar")
writer.flush(zstd.FLUSH_FRAME)
dctx = zstd.ZstdDecompressor()
reader = dctx.stream_reader(source.getvalue())
self.assertEqual(reader.read(2), b"fo")
self.assertEqual(reader.read(2), b"o")
self.assertEqual(reader.read(2), b"ba")
self.assertEqual(reader.read(2), b"r")
source.seek(0)
reader = dctx.stream_reader(source)
self.assertEqual(reader.read(2), b"fo")
self.assertEqual(reader.read(2), b"o")
self.assertEqual(reader.read(2), b"ba")
self.assertEqual(reader.read(2), b"r")
reader = dctx.stream_reader(source.getvalue())
self.assertEqual(reader.read(3), b"foo")
self.assertEqual(reader.read(3), b"bar")
source.seek(0)
reader = dctx.stream_reader(source)
self.assertEqual(reader.read(3), b"foo")
self.assertEqual(reader.read(3), b"bar")
reader = dctx.stream_reader(source.getvalue())
self.assertEqual(reader.read(4), b"foo")
self.assertEqual(reader.read(4), b"bar")
source.seek(0)
reader = dctx.stream_reader(source)
self.assertEqual(reader.read(4), b"foo")
self.assertEqual(reader.read(4), b"bar")
reader = dctx.stream_reader(source.getvalue())
self.assertEqual(reader.read(128), b"foo")
self.assertEqual(reader.read(128), b"bar")
source.seek(0)
reader = dctx.stream_reader(source)
self.assertEqual(reader.read(128), b"foo")
self.assertEqual(reader.read(128), b"bar")
# Now tests for reads spanning frames.
reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
self.assertEqual(reader.read(3), b"foo")
self.assertEqual(reader.read(3), b"bar")
source.seek(0)
reader = dctx.stream_reader(source, read_across_frames=True)
self.assertEqual(reader.read(3), b"foo")
self.assertEqual(reader.read(3), b"bar")
reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
self.assertEqual(reader.read(6), b"foobar")
source.seek(0)
reader = dctx.stream_reader(source, read_across_frames=True)
self.assertEqual(reader.read(6), b"foobar")
reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
self.assertEqual(reader.read(7), b"foobar")
source.seek(0)
reader = dctx.stream_reader(source, read_across_frames=True)
self.assertEqual(reader.read(7), b"foobar")
reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
self.assertEqual(reader.read(128), b"foobar")
source.seek(0)
reader = dctx.stream_reader(source, read_across_frames=True)
self.assertEqual(reader.read(128), b"foobar")
def test_readinto(self):
cctx = zstd.ZstdCompressor()
foo = cctx.compress(b"foo")
dctx = zstd.ZstdDecompressor()
# Attempting to readinto() a non-writable buffer fails.
# The exact exception varies based on the backend.
reader = dctx.stream_reader(foo)
with self.assertRaises(Exception):
reader.readinto(b"foobar")
# readinto() with sufficiently large destination.
b = bytearray(1024)
reader = dctx.stream_reader(foo)
self.assertEqual(reader.readinto(b), 3)
self.assertEqual(b[0:3], b"foo")
self.assertEqual(reader.readinto(b), 0)
self.assertEqual(b[0:3], b"foo")
# readinto() with small reads.
b = bytearray(1024)
reader = dctx.stream_reader(foo, read_size=1)
self.assertEqual(reader.readinto(b), 3)
self.assertEqual(b[0:3], b"foo")
# Too small destination buffer.
b = bytearray(2)
reader = dctx.stream_reader(foo)
self.assertEqual(reader.readinto(b), 2)
self.assertEqual(b[:], b"fo")
def test_readinto1(self):
cctx = zstd.ZstdCompressor()
foo = cctx.compress(b"foo")
dctx = zstd.ZstdDecompressor()
reader = dctx.stream_reader(foo)
with self.assertRaises(Exception):
reader.readinto1(b"foobar")
# Sufficiently large destination.
b = bytearray(1024)
reader = dctx.stream_reader(foo)
self.assertEqual(reader.readinto1(b), 3)
self.assertEqual(b[0:3], b"foo")
self.assertEqual(reader.readinto1(b), 0)
self.assertEqual(b[0:3], b"foo")
# readinto() with small reads.
b = bytearray(1024)
reader = dctx.stream_reader(foo, read_size=1)
self.assertEqual(reader.readinto1(b), 3)
self.assertEqual(b[0:3], b"foo")
# Too small destination buffer.
b = bytearray(2)
reader = dctx.stream_reader(foo)
self.assertEqual(reader.readinto1(b), 2)
self.assertEqual(b[:], b"fo")
def test_readall(self):
cctx = zstd.ZstdCompressor()
foo = cctx.compress(b"foo")
dctx = zstd.ZstdDecompressor()
reader = dctx.stream_reader(foo)
self.assertEqual(reader.readall(), b"foo")
def test_read1(self):
cctx = zstd.ZstdCompressor()
foo = cctx.compress(b"foo")
dctx = zstd.ZstdDecompressor()
b = CustomBytesIO(foo)
reader = dctx.stream_reader(b)
self.assertEqual(reader.read1(), b"foo")
self.assertEqual(b._read_count, 1)
b = CustomBytesIO(foo)
reader = dctx.stream_reader(b)
self.assertEqual(reader.read1(0), b"")
self.assertEqual(reader.read1(2), b"fo")
self.assertEqual(b._read_count, 1)
self.assertEqual(reader.read1(1), b"o")
self.assertEqual(b._read_count, 1)
self.assertEqual(reader.read1(1), b"")
self.assertEqual(b._read_count, 2)
def test_read_lines(self):
cctx = zstd.ZstdCompressor()
source = b"\n".join(
("line %d" % i).encode("ascii") for i in range(1024)
)
frame = cctx.compress(source)
dctx = zstd.ZstdDecompressor()
reader = dctx.stream_reader(frame)
tr = io.TextIOWrapper(reader, encoding="utf-8")
lines = []
for line in tr:
lines.append(line.encode("utf-8"))
self.assertEqual(len(lines), 1024)
self.assertEqual(b"".join(lines), source)
reader = dctx.stream_reader(frame)
tr = io.TextIOWrapper(reader, encoding="utf-8")
lines = tr.readlines()
self.assertEqual(len(lines), 1024)
self.assertEqual("".join(lines).encode("utf-8"), source)
reader = dctx.stream_reader(frame)
tr = io.TextIOWrapper(reader, encoding="utf-8")
lines = []
while True:
line = tr.readline()
if not line:
break
lines.append(line.encode("utf-8"))
self.assertEqual(len(lines), 1024)
self.assertEqual(b"".join(lines), source)
| 31.510961
| 79
| 0.595205
| 2,116
| 18,686
| 5.1862
| 0.095463
| 0.112083
| 0.114817
| 0.078185
| 0.848369
| 0.809732
| 0.781757
| 0.746309
| 0.710042
| 0.694369
| 0
| 0.017762
| 0.288933
| 18,686
| 592
| 80
| 31.564189
| 0.808158
| 0.043776
| 0
| 0.757075
| 0
| 0
| 0.041349
| 0
| 0
| 0
| 0
| 0
| 0.339623
| 1
| 0.054245
| false
| 0.011792
| 0.011792
| 0
| 0.068396
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62aa2c2465ec06df3dd23e72da9632b91edb5253
| 5,358
|
py
|
Python
|
Freeze/setup_basic.py
|
wert23239/Meta-MarIO-2
|
54e19f4ba166180e3ede026d87824e8ee8877520
|
[
"MIT"
] | 17
|
2017-07-05T15:00:33.000Z
|
2020-08-14T01:14:57.000Z
|
Freeze/setup_basic.py
|
wert23239/Meta-MarIO-2
|
54e19f4ba166180e3ede026d87824e8ee8877520
|
[
"MIT"
] | null | null | null |
Freeze/setup_basic.py
|
wert23239/Meta-MarIO-2
|
54e19f4ba166180e3ede026d87824e8ee8877520
|
[
"MIT"
] | 7
|
2017-07-18T03:22:32.000Z
|
2021-11-26T00:54:18.000Z
|
# -*- coding: utf-8 -*-
# A very simple setup script to create a single executable
#
# hello.py is a very simple 'Hello, world' type script which also displays the
# environment in which the script runs
# Run the build process by running the command 'python setup.py build'
#
# If everything works well you should find a subdirectory in the build
# subdirectory that contains the files needed to run the script without Python
from cx_Freeze import setup, Executable
import os
os.environ['TCL_LIBRARY'] = "C:\\ProgramData\\Miniconda3\\tcl\\tcl8.6"
os.environ['TK_LIBRARY'] = "C:\\ProgramData\\Miniconda3\\tcl\\tk8.6"
addtional_mods = []
include_files=[
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\cilkrts20.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\ifdlg100.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libchkp.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libicaf.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libifcoremd.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libifcoremdd.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libifcorert.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libifcorertd.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libifportmd.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libimalloc.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libiomp5md.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libiompstubs5md.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libmmd.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libmmdd.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\libmpx.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\liboffload.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_avx.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_avx2.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_avx512.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_avx512_mic.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_core.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_def.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_intel_thread.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_mc.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_mc3.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_msg.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_rt.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_sequential.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_tbb_thread.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_vml_avx.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_vml_avx2.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_vml_avx512.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_vml_avx512_mic.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_vml_cmpt.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_vml_def.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_vml_mc.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_vml_mc2.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\mkl_vml_mc3.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\Library\\bin\\svml_dispmd.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\DLL\\tcl86t.dll",
"C:\\Users\\Jonny\\AppData\\Local\\conda\\conda\\envs\\deeplearning\\DLL\\tk86t.dll"
]
disclude_mods=['Tkinter']
package_mods= ['numpy','matplotlib','tkinter']
executables = [
Executable('test.py')
]
setup(name='tet',
version='0.1',
description='Sample cx_Freeze script',
options = {'build_exe': {'includes': addtional_mods,'include_files': include_files ,'packages':package_mods,'excludes':disclude_mods }},
executables=executables
)
| 71.44
| 143
| 0.688503
| 709
| 5,358
| 5.132581
| 0.180536
| 0.067601
| 0.123935
| 0.202803
| 0.756801
| 0.739214
| 0.739214
| 0.739214
| 0.739214
| 0.739214
| 0
| 0.007517
| 0.081374
| 5,358
| 74
| 144
| 72.405405
| 0.731816
| 0.075961
| 0
| 0
| 0
| 0.694915
| 0.847781
| 0.820666
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.033898
| 0
| 0.033898
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
62b247dfe10ae2876917d349f541300f80deeedb
| 6,902
|
py
|
Python
|
smtbx/refinement/restraints/tests/test_adp_restraints.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
smtbx/refinement/restraints/tests/test_adp_restraints.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
smtbx/refinement/restraints/tests/test_adp_restraints.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
from __future__ import absolute_import, division, print_function
from libtbx.test_utils import approx_equal
from smtbx.refinement.restraints import adp_restraints
from smtbx import development
from cctbx.array_family import flex
from cctbx import crystal
def get_pair_sym_table(xray_structure):
asu_mappings = xray_structure.asu_mappings(buffer_thickness=3.5)
pair_asu_table = crystal.pair_asu_table(asu_mappings=asu_mappings)
scattering_types = xray_structure.scatterers().extract_scattering_types()
pair_asu_table.add_covalent_pairs(
scattering_types, exclude_scattering_types=flex.std_string(("H","D")))
return pair_asu_table.extract_pair_sym_table()
def test_adp_similarity():
xray_structure = development.sucrose()
pair_sym_table = get_pair_sym_table(xray_structure)
for table in (None,pair_sym_table):
if table is None: xs = xray_structure
else: xs = None
restraints = \
adp_restraints.adp_similarity_restraints(
xray_structure=xs,
pair_sym_table=table)
assert restraints.proxies.size() == 24
i_seqs = (9,14,28,32,36,38)
restraints = \
adp_restraints.adp_similarity_restraints(
xray_structure=xs,
pair_sym_table=table,
i_seqs=i_seqs)
expected_i_seqs = ((9,32),(14,36),(32,36),(36,38))
expected_weights = (625,156.25,625,625)
proxies = restraints.proxies
assert proxies.size() == len(expected_i_seqs)
for i in range(proxies.size()):
assert approx_equal(proxies[i].i_seqs, expected_i_seqs[i])
assert approx_equal(proxies[i].weight, expected_weights[i])
# add more restraints to same shared proxy
i_seqs = (3,23,40,42)
restraints = \
adp_restraints.adp_similarity_restraints(
xray_structure=xs,
pair_sym_table=table,
proxies=proxies,
i_seqs=i_seqs)
expected_i_seqs = (
(9,32),(14,36),(32,36),(36,38),(3,23),(40,42))
expected_weights = (625,156.25,625,625,156.25,625)
proxies = restraints.proxies
assert proxies.size() == len(expected_i_seqs)
for i in range(proxies.size()):
assert approx_equal(proxies[i].i_seqs, expected_i_seqs[i])
assert approx_equal(proxies[i].weight, expected_weights[i])
def test_rigid_bond():
xray_structure = development.sucrose()
pair_sym_table = get_pair_sym_table(xray_structure)
for table in (None,pair_sym_table):
if table is None: xs = xray_structure
else: xs = None
restraints = \
adp_restraints.rigid_bond_restraints(
xray_structure=xs,
pair_sym_table=table)
assert restraints.proxies.size() == 60
i_seqs = (9,14,28,32,36,38)
restraints = \
adp_restraints.rigid_bond_restraints(
xray_structure=xs,
pair_sym_table=table,
i_seqs=i_seqs)
expected_i_seqs = (
(9,32),(9,36),(14,36),(14,32),(14,38),(32,36),(32,38),(36,38))
expected_weights = [10000]*len(expected_i_seqs)
proxies = restraints.proxies
assert proxies.size() == len(expected_i_seqs)
for i in range(proxies.size()):
assert approx_equal(proxies[i].i_seqs, expected_i_seqs[i])
assert approx_equal(proxies[i].weight, expected_weights[i])
# add more restraints to same shared proxy
i_seqs = (10,40,42)
restraints = \
adp_restraints.rigid_bond_restraints(
xray_structure=xs,
pair_sym_table=table,
proxies=proxies,
i_seqs=i_seqs)
expected_i_seqs = (
(9,32),(9,36),(14,36),(14,32),(14,38),(32,36),
(32,38),(36,38),(10,42),(10,40),(40,42))
expected_weights = [10000]*len(expected_i_seqs)
proxies = restraints.proxies
assert proxies.size() == len(expected_i_seqs)
for i in range(proxies.size()):
assert approx_equal(proxies[i].i_seqs, expected_i_seqs[i])
assert approx_equal(proxies[i].weight, expected_weights[i])
def test_isotropic_adp():
xray_structure = development.sucrose()
xray_structure.scatterers()[10].set_use_u_iso_only()
pair_sym_table = get_pair_sym_table(xray_structure)
for table in (None,pair_sym_table):
restraints = \
adp_restraints.isotropic_adp_restraints(
xray_structure=xray_structure,
pair_sym_table=table)
assert restraints.proxies.size() == 22
i_seqs = (9,14,28,32,36,38)
expected_weights = (100,25,100,100,100,100)
restraints = \
adp_restraints.isotropic_adp_restraints(
xray_structure=xray_structure,
pair_sym_table=table,
i_seqs=i_seqs)
proxies = restraints.proxies
assert proxies.size() == len(i_seqs)
for i in range(proxies.size()):
assert approx_equal(proxies[i].i_seqs[0], i_seqs[i])
assert approx_equal(proxies[i].weight, expected_weights[i])
# add more restraints to same shared proxy
i_seqs = (3,5,42)
restraints = \
adp_restraints.isotropic_adp_restraints(
xray_structure=xray_structure,
pair_sym_table=table,
proxies=proxies,
i_seqs=i_seqs)
expected_i_seqs = (9,14,28,32,36,38,3,5,42)
expected_weights = (100,25,100,100,100,100,25,25,100)
proxies = restraints.proxies
assert proxies.size() == len(expected_i_seqs)
for i in range(proxies.size()):
assert approx_equal(proxies[i].i_seqs[0], expected_i_seqs[i])
assert approx_equal(proxies[i].weight, expected_weights[i])
def test_rigu():
xray_structure = development.sucrose()
pair_sym_table = get_pair_sym_table(xray_structure)
for table in (None,pair_sym_table):
if table is None: xs = xray_structure
else: xs = None
restraints = \
adp_restraints.rigu_restraints(
xray_structure=xs,
pair_sym_table=table)
assert restraints.proxies.size() == 60
i_seqs = (9,14,28,32,36,38)
restraints = \
adp_restraints.rigu_restraints(
xray_structure=xs,
pair_sym_table=table,
i_seqs=i_seqs)
expected_i_seqs = (
(9,32),(9,36),(14,36),(14,32),(14,38),(32,36),(32,38),(36,38))
expected_weights = [62500]*len(expected_i_seqs)
proxies = restraints.proxies
assert proxies.size() == len(expected_i_seqs)
for i in range(proxies.size()):
assert approx_equal(proxies[i].i_seqs, expected_i_seqs[i])
assert approx_equal(proxies[i].weight, expected_weights[i])
# add more restraints to same shared proxy
i_seqs = (10,40,42)
restraints = \
adp_restraints.rigu_restraints(
xray_structure=xs,
pair_sym_table=table,
proxies=proxies,
i_seqs=i_seqs)
expected_i_seqs = (
(9,32),(9,36),(14,36),(14,32),(14,38),(32,36),
(32,38),(36,38),(10,42),(10,40),(40,42))
expected_weights = [62500]*len(expected_i_seqs)
proxies = restraints.proxies
assert proxies.size() == len(expected_i_seqs)
for i in range(proxies.size()):
assert approx_equal(proxies[i].i_seqs, expected_i_seqs[i])
assert approx_equal(proxies[i].weight, expected_weights[i])
| 38.132597
| 75
| 0.688061
| 1,005
| 6,902
| 4.446766
| 0.100498
| 0.06601
| 0.069814
| 0.085925
| 0.855672
| 0.854777
| 0.848512
| 0.836205
| 0.808906
| 0.797941
| 0
| 0.06681
| 0.188931
| 6,902
| 180
| 76
| 38.344444
| 0.731511
| 0.023616
| 0
| 0.811765
| 0
| 0
| 0.000297
| 0
| 0
| 0
| 0
| 0
| 0.164706
| 1
| 0.029412
| false
| 0
| 0.035294
| 0
| 0.070588
| 0.005882
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62f272a3adb4038b3f712a93a6fe7ba468ed8288
| 5,786
|
py
|
Python
|
deeprob/spn/learning/splitting/cluster.py
|
fedelux3/deeprob-kit
|
3bae5ab1a36b13ee1317f650e68e2d310dbf403c
|
[
"MIT"
] | null | null | null |
deeprob/spn/learning/splitting/cluster.py
|
fedelux3/deeprob-kit
|
3bae5ab1a36b13ee1317f650e68e2d310dbf403c
|
[
"MIT"
] | null | null | null |
deeprob/spn/learning/splitting/cluster.py
|
fedelux3/deeprob-kit
|
3bae5ab1a36b13ee1317f650e68e2d310dbf403c
|
[
"MIT"
] | null | null | null |
import warnings
import numpy as np
from typing import Union, Type, List
from sklearn import mixture, cluster
from sklearn.exceptions import ConvergenceWarning
from deeprob.spn.structure.leaf import Leaf, LeafType
from deeprob.utils.data import mixed_ohe_data
def gmm(
data: np.ndarray,
distributions: List[Type[Leaf]],
domains: List[Union[list, tuple]],
random_state: np.random.RandomState,
n: int = 2
) -> np.ndarray:
"""
Execute GMM clustering on some data.
:param data: The data.
:param distributions: The data distributions.
:param domains: The data domains.
:param random_state: The random state.
:param n: The number of clusters.
:return: An array where each element is the cluster where the corresponding data belong.
"""
# Convert the data using One Hot Encoding, in case of non-binary discrete features
if any([len(d) > 2 for d in domains]):
data = mixed_ohe_data(data, domains)
# Apply GMM
with warnings.catch_warnings():
warnings.simplefilter(action='ignore', category=ConvergenceWarning) # Ignore convergence warnings
return mixture.GaussianMixture(n, n_init=3, random_state=random_state).fit_predict(data)
def kmeans(
data: np.ndarray,
distributions: List[Type[Leaf]],
domains: List[Union[list, tuple]],
random_state: np.random.RandomState,
n: int = 2
) -> np.ndarray:
"""
Execute K-Means clustering on some data.
:param data: The data.
:param distributions: The data distributions.
:param domains: The data domains.
:param random_state: The random state.
:param n: The number of clusters.
:return: An array where each element is the cluster where the corresponding data belong.
"""
# Convert the data using One Hot Encoding, in case of non-binary discrete features
if any([len(d) > 2 for d in domains]):
data = mixed_ohe_data(data, domains)
# Apply K-Means
with warnings.catch_warnings():
warnings.simplefilter(action='ignore', category=ConvergenceWarning) # Ignore convergence warnings
return cluster.KMeans(n, n_init=5, random_state=random_state).fit_predict(data)
def kmeans_mb(
data: np.ndarray,
distributions: List[Type[Leaf]],
domains: List[Union[list, tuple]],
random_state: np.random.RandomState,
n: int = 2
) -> np.ndarray:
"""
Execute MiniBatch K-Means clustering on some data.
:param data: The data.
:param distributions: The data distributions.
:param domains: The data domains.
:param random_state: The random state.
:param n: The number of clusters.
:return: An array where each element is the cluster where the corresponding data belong.
"""
# Convert the data using One Hot Encoding, in case of non-binary discrete features
if any([len(d) > 2 for d in domains]):
data = mixed_ohe_data(data, domains)
# Apply K-Means MiniBatch
with warnings.catch_warnings():
warnings.simplefilter(action='ignore', category=ConvergenceWarning) # Ignore convergence warnings
warnings.simplefilter(action='ignore', category=UserWarning) # Ignore user warnings
return cluster.MiniBatchKMeans(n, n_init=5, random_state=random_state).fit_predict(data)
def dbscan(
data: np.ndarray,
distributions: List[Type[Leaf]],
domains: List[Union[list, tuple]],
random_state: np.random.RandomState,
n: int = 2
) -> np.ndarray:
"""
Execute DBSCAN clustering on some data (only on discrete data).
:param data: The data.
:param distributions: The data distributions.
:param domains: The data domains.
:param random_state: The random state.
:param n: The number of clusters.
:return: An array where each element is the cluster where the corresponding data belong.
:raises ValueError: If the leaf distributions are NOT discrete.
"""
# Control if distribution are binary
if not all([x.LEAF_TYPE == LeafType.DISCRETE for x in distributions]):
raise ValueError('DBScan clustering can be applied only on discrete attributes')
# Convert the data using One Hot Encoding, in case of non-binary discrete features
if any([len(d) > 2 for d in domains]):
data = mixed_ohe_data(data, domains)
# Apply DBSCAN
with warnings.catch_warnings():
warnings.simplefilter(action='ignore', category=ConvergenceWarning) # Ignore convergence warnings
return cluster.DBSCAN(eps = 0.25, n_jobs=-1).fit_predict(data)
def wald(
data: np.ndarray,
distributions: List[Type[Leaf]],
domains: List[Union[list, tuple]],
random_state: np.random.RandomState,
n: int = 2
) -> np.ndarray:
"""
Execute Ward (Hierarchical) clustering on some data (only discrete data).
:param data: The data.
:param distributions: The data distributions.
:param domains: The data domains.
:param random_state: The random state.
:param n: The number of clusters.
:return: An array where each element is the cluster where the corresponding data belong.
:raises ValueError: If the leaf distributions are NOT discrete.
"""
# Control if distribution are binary
if not all([x.LEAF_TYPE == LeafType.DISCRETE for x in distributions]):
raise ValueError('DBScan clustering can be applied only on discrete attributes')
# Convert the data using One Hot Encoding, in case of non-binary discrete features
if any([len(d) > 2 for d in domains]):
data = mixed_ohe_data(data, domains)
# Apply Wald
with warnings.catch_warnings():
warnings.simplefilter(action='ignore', category=ConvergenceWarning) # Ignore convergence warnings
return cluster.AgglomerativeClustering(n, linkage='ward').fit_predict(data)
| 36.620253
| 106
| 0.699965
| 769
| 5,786
| 5.208062
| 0.152146
| 0.057678
| 0.017978
| 0.050936
| 0.882896
| 0.870911
| 0.860924
| 0.860924
| 0.860924
| 0.84819
| 0
| 0.003717
| 0.209471
| 5,786
| 157
| 107
| 36.853503
| 0.871885
| 0.420498
| 0
| 0.75
| 0
| 0
| 0.051118
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069444
| false
| 0
| 0.097222
| 0
| 0.236111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1a019fe1d5c6c3295069517b9ad8eac7282f6fa9
| 1,241
|
py
|
Python
|
src/Problem8.py
|
BrunoMNDantas/ProjectEuler
|
d7548931c8c450fc8c53a939df349575b9994702
|
[
"MIT"
] | null | null | null |
src/Problem8.py
|
BrunoMNDantas/ProjectEuler
|
d7548931c8c450fc8c53a939df349575b9994702
|
[
"MIT"
] | null | null | null |
src/Problem8.py
|
BrunoMNDantas/ProjectEuler
|
d7548931c8c450fc8c53a939df349575b9994702
|
[
"MIT"
] | null | null | null |
from functools import reduce
num = """7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"""
largest = 0
for i in range(0, len(num)-13):
window = map(int, num[i:i+13])
product = reduce(lambda a,b: a*b, window)
if product > largest:
largest = product
print(largest)
| 73
| 1,012
| 0.923449
| 38
| 1,241
| 30.157895
| 0.605263
| 0.00349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.85182
| 0.048348
| 1,241
| 16
| 1,013
| 77.5625
| 0.118544
| 0
| 0
| 0
| 0
| 0
| 0.805802
| 0.805802
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0.111111
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c52c15da86e136ba16cf05ec84de30ca4ebdcc4d
| 8,634
|
py
|
Python
|
changelog_generator/tests/test_generator.py
|
nghialt/gitlab-changelog-generator
|
d9af4baa1d76ab9436548e47842eb80935f9a8bd
|
[
"MIT"
] | 3
|
2019-11-01T15:13:31.000Z
|
2020-02-03T06:27:16.000Z
|
changelog_generator/tests/test_generator.py
|
nghialt/gitlab-changelog-generator
|
d9af4baa1d76ab9436548e47842eb80935f9a8bd
|
[
"MIT"
] | 19
|
2018-06-17T21:07:32.000Z
|
2020-06-04T07:07:41.000Z
|
changelog_generator/tests/test_generator.py
|
nghialt/gitlab-changelog-generator
|
d9af4baa1d76ab9436548e47842eb80935f9a8bd
|
[
"MIT"
] | 5
|
2018-11-24T08:04:39.000Z
|
2020-10-28T19:54:29.000Z
|
import datetime
import mock
import unittest
from changelog_generator.generator import (
generate_changelog,
get_closed_issues_since_last_tag,
)
class TestGenerator(unittest.TestCase):
@mock.patch(
"changelog_generator.generator.get_closed_issues_since_last_tag"
)
@mock.patch("os.path.isfile")
@mock.patch("changelog_generator.generator.get_commits_since_date")
@mock.patch("changelog_generator.generator.get_last_commit_date")
def test_generate_changelog_existing(
self,
mock_get_commit_date,
mock_get_commits,
mock_is_file,
mock_closed_issues,
):
mock_is_file.return_value = True
mock_get_commit_date.return_value = "2018-06-10T14:01:45.000000+00:00"
mock_get_commits.return_value = [
{
"parent_ids": ["06f7e730ff5edcc5a955d939c1e39ac363ad3e41"],
"committed_date": "2018-06-10T14:01:44.000+00:00",
"message": "Test commit message",
}
]
mock_closed_issues.return_value = [
{
"title": "A Closed Issue",
"closed_at": "2018-06-10T14:01:44.000+00:00",
}
]
cli_args = {
"ip_address": "localhost",
"api_version": "4",
"project_group": "test-group",
"project": "test-project",
"branch_one": "release",
"branch_two": "master",
"version": "1",
"changelog": "N",
}
with mock.patch("builtins.open", mock.mock_open()) as mock_file:
result = generate_changelog(cli_args)
mock_file.assert_called_once_with(mock.ANY, "w")
mock_file_write_calls = [
mock.call(
f"## v1 ({datetime.datetime.now().strftime('%Y-%m-%d')})\n"
),
mock.call("* 2018-06-10 - Test commit message \n"),
mock.call("\n### Closed Issues\n"),
mock.call("* A Closed Issue"),
]
mock_file().write.assert_has_calls(mock_file_write_calls)
self.assertEqual(
result, "CHANGELOG_generated.md written successfully"
)
@mock.patch(
"changelog_generator.generator.get_closed_issues_since_last_tag"
)
@mock.patch("changelog_generator.generator.datetime")
@mock.patch("os.path.isfile")
@mock.patch("changelog_generator.generator.get_commits_since_date")
@mock.patch("changelog_generator.generator.get_last_commit_date")
def test_generate_changelog_update(
self,
mock_get_commit_date,
mock_get_commits,
mock_is_file,
mock_datetime,
mock_closed_issues,
):
mock_datetime.datetime.now.return_value = datetime.date(2018, 6, 18)
mock_is_file.return_value = True
mock_get_commit_date.return_value = "2018-06-10T14:01:45.000000+00:00"
mock_get_commits.return_value = [
{
"parent_ids": ["06f7e730ff5edcc5a955d939c1e39ac363ad3e41"],
"committed_date": "2018-06-10T14:01:44.000+00:00",
"message": "Test commit message",
}
]
mock_closed_issues.return_value = [
{
"title": "A Closed Issue",
"closed_at": "2018-06-10T14:01:44.000+00:00",
}
]
cli_args = {
"ip_address": "localhost",
"api_version": "4",
"project_group": "test-group",
"project": "test-project",
"branch_one": "release",
"branch_two": "master",
"version": "1",
"changelog": "Y",
}
with mock.patch(
"builtins.open", mock.mock_open(read_data="Existing data")
) as mock_file:
result = generate_changelog(cli_args)
mock_file.assert_called_with(mock.ANY, "w")
mock_file().write.assert_has_calls(
[
mock.call("## v1 (2018-06-18)\n"),
mock.call("* 2018-06-10 - Test commit message \n"),
mock.call("\n### Closed Issues\n"),
mock.call("* A Closed Issue"),
mock.call("\n"),
mock.call("Existing data"),
]
)
self.assertEqual(result, "CHANGELOG.md updated successfully")
@mock.patch(
"changelog_generator.generator.get_closed_issues_since_last_tag"
)
@mock.patch("os.path.isfile")
@mock.patch("changelog_generator.generator.get_commits_since_date")
@mock.patch("changelog_generator.generator.get_last_commit_date")
def test_generate_changelog_new(
self,
mock_get_commit_date,
mock_get_commits,
mock_is_file,
mock_closed_issues,
):
mock_is_file.return_value = False
mock_get_commit_date.return_value = "2018-06-10T14:01:45.000000+00:00"
mock_get_commits.return_value = [
{
"parent_ids": ["06f7e730ff5edcc5a955d939c1e39ac363ad3e41"],
"committed_date": "2018-06-10T14:01:44.000+00:00",
"message": "Test commit message",
}
]
mock_closed_issues.return_value = [
{
"title": "A Closed Issue",
"closed_at": "2018-06-10T14:01:44.000+00:00",
}
]
cli_args = {
"ip_address": "localhost",
"api_version": "4",
"project_group": "test-group",
"project": "test-project",
"branch_one": "release",
"branch_two": "master",
"version": "1",
"changelog": "N",
}
with mock.patch("builtins.open", mock.mock_open()) as mock_file:
result = generate_changelog(cli_args)
mock_file.assert_called_once_with(mock.ANY, "w")
mock_file_write_calls = [
mock.call(
f"## v1 ({datetime.datetime.now().strftime('%Y-%m-%d')})\n"
),
mock.call("* 2018-06-10 - Test commit message \n"),
mock.call("\n### Closed Issues\n"),
mock.call("* A Closed Issue"),
]
mock_file().write.assert_has_calls(mock_file_write_calls)
self.assertEqual(
result, "New CHANGELOG.md file written successfully"
)
@mock.patch("changelog_generator.generator.get_closed_issues_for_project")
@mock.patch("changelog_generator.generator.get_last_tagged_release_date")
def test_get_closed_issues_since_last_tag(
self, mock_release_date, mock_closed_project_issues
):
mock_release_date.return_value = "2018-06-10T14:01:44.000+00:00"
mock_closed_project_issues.return_value = [
{
"closed_at": "2050-06-10T14:01:44.000+00:00",
"title": "A Closed Issue",
"assignee": 1,
}
]
cli_args = {
"ip_address": "localhost",
"api_version": "4",
"project_group": "test-group",
"project": "test-project",
"branch_one": "release",
"branch_two": "master",
"version": "1",
"changelog": "N",
}
self.assertEqual(
get_closed_issues_since_last_tag(cli_args),
[
{
"closed_at": "2050-06-10T14:01:44.000+00:00",
"title": "A Closed Issue",
}
],
)
@mock.patch("changelog_generator.generator.get_closed_issues_for_project")
@mock.patch("changelog_generator.generator.get_last_tagged_release_date")
def test_get_closed_issues_since_last_tag_no_issues(
self, mock_release_date, mock_closed_project_issues
):
mock_release_date.return_value = "2018-06-10T14:01:44.000+00:00"
mock_closed_project_issues.return_value = [
{
"closed_at": "1990-06-10T14:01:44.000+00:00",
"title": "A Closed Issue",
"assignee": 1,
}
]
cli_args = {
"ip_address": "localhost",
"api_version": "4",
"project_group": "test-group",
"project": "test-project",
"branch_one": "release",
"branch_two": "master",
"version": "1",
"changelog": "N",
}
self.assertEqual(get_closed_issues_since_last_tag(cli_args), [])
| 35.240816
| 79
| 0.548066
| 921
| 8,634
| 4.827362
| 0.11835
| 0.040486
| 0.091093
| 0.08502
| 0.91408
| 0.908007
| 0.901934
| 0.892263
| 0.883941
| 0.883941
| 0
| 0.073486
| 0.328585
| 8,634
| 244
| 80
| 35.385246
| 0.693462
| 0
| 0
| 0.665198
| 1
| 0
| 0.328585
| 0.164582
| 0
| 0
| 0
| 0
| 0.048458
| 1
| 0.022026
| false
| 0
| 0.017621
| 0
| 0.044053
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c53fc1094ee2b1d2deb48f45954b6913c7d8c311
| 1,977
|
py
|
Python
|
portfolio/migrations/0001_initial.py
|
aru456/Personal-Portfolio-Website
|
32979eb1b006285c93ac6381ca4e557e189ef0e3
|
[
"MIT"
] | null | null | null |
portfolio/migrations/0001_initial.py
|
aru456/Personal-Portfolio-Website
|
32979eb1b006285c93ac6381ca4e557e189ef0e3
|
[
"MIT"
] | null | null | null |
portfolio/migrations/0001_initial.py
|
aru456/Personal-Portfolio-Website
|
32979eb1b006285c93ac6381ca4e557e189ef0e3
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.5 on 2020-05-28 18:48
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='about',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField()),
],
),
migrations.CreateModel(
name='achievements',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField()),
],
),
migrations.CreateModel(
name='contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField()),
],
),
migrations.CreateModel(
name='experience',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField()),
],
),
migrations.CreateModel(
name='home',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField()),
('image', models.ImageField(blank=True, upload_to='portfolio/images/')),
],
),
]
| 35.303571
| 114
| 0.534143
| 175
| 1,977
| 5.914286
| 0.308571
| 0.101449
| 0.120773
| 0.111111
| 0.758454
| 0.758454
| 0.758454
| 0.758454
| 0.758454
| 0.758454
| 0
| 0.022455
| 0.324229
| 1,977
| 55
| 115
| 35.945455
| 0.752246
| 0.022762
| 0
| 0.729167
| 1
| 0
| 0.082902
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020833
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d9cf3fda194225eabfc1173613f5513f5c3043d
| 192
|
py
|
Python
|
Utils/__init__.py
|
Silvrash/codeForMe
|
b520d99c33b85df7ada14ccd2215bcd2df0d8a0c
|
[
"Apache-2.0"
] | null | null | null |
Utils/__init__.py
|
Silvrash/codeForMe
|
b520d99c33b85df7ada14ccd2215bcd2df0d8a0c
|
[
"Apache-2.0"
] | null | null | null |
Utils/__init__.py
|
Silvrash/codeForMe
|
b520d99c33b85df7ada14ccd2215bcd2df0d8a0c
|
[
"Apache-2.0"
] | null | null | null |
import os
def is_in_development_environment():
'''
Return True if code is running in a development environment
'''
return 'prod' if os.environ.get('DATABASE_URI') else 'dev'
| 21.333333
| 63
| 0.692708
| 27
| 192
| 4.777778
| 0.740741
| 0.341085
| 0.434109
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 192
| 8
| 64
| 24
| 0.848684
| 0.307292
| 0
| 0
| 0
| 0
| 0.162393
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3da3d0b693e3db9d790a6c4318bdc60f34e0a606
| 24,159
|
py
|
Python
|
aw_nas/weights_manager/ofa_backbone.py
|
A-LinCui/Discriminator-Guiding-Knowledge-Distillation-MAR
|
e8caad8de2a559b9c9532448bdcdedd566cb2cfa
|
[
"MIT"
] | null | null | null |
aw_nas/weights_manager/ofa_backbone.py
|
A-LinCui/Discriminator-Guiding-Knowledge-Distillation-MAR
|
e8caad8de2a559b9c9532448bdcdedd566cb2cfa
|
[
"MIT"
] | null | null | null |
aw_nas/weights_manager/ofa_backbone.py
|
A-LinCui/Discriminator-Guiding-Knowledge-Distillation-MAR
|
e8caad8de2a559b9c9532448bdcdedd566cb2cfa
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Base class definition of OFA Backbone."""
import abc
import copy
import torch
from torch import nn
from aw_nas import Component
from aw_nas.ops import *
from aw_nas.ops.baseline_ops import MobileNetV2Block, MobileNetV3Block
from aw_nas.utils import make_divisible, feature_level_to_stage_index
from aw_nas.utils.common_utils import _get_channel_mask
class FlexibleBlock(Component, nn.Module):
REGISTRY = "ofa_block"
def __init__(self, schedule_cfg=None):
super(FlexibleBlock, self).__init__(schedule_cfg)
nn.Module.__init__(self)
def reset_mask(self):
for m in self.modules():
if isinstance(m, FlexibleLayer):
m.reset_mask()
class FlexibleMobileNetV2Block(MobileNetV2Block, FlexibleBlock):
NAME = "mbv2_block"
def __init__(
self,
expansion,
C,
C_out,
stride,
kernel_sizes=(3, 5, 7),
do_kernel_transform=True,
affine=True,
activation="relu",
schedule_cfg=None,
):
FlexibleBlock.__init__(self, schedule_cfg)
self.activation = activation
C_inner = make_divisible(C * expansion, 8)
self.kernel_sizes = sorted(kernel_sizes)
self.kernel_size = self.kernel_sizes[-1]
self.do_kernel_transform = do_kernel_transform
self.affine = affine
inv_bottleneck = None
if expansion != 1:
inv_bottleneck = nn.Sequential(
FlexiblePointLinear(C, C_inner, 1, 1, 0),
FlexibleBatchNorm2d(C_inner, affine=affine),
get_op(activation)(),
)
depth_wise = nn.Sequential(
FlexibleDepthWiseConv(
C_inner,
self.kernel_sizes,
stride,
do_kernel_transform=do_kernel_transform,
),
FlexibleBatchNorm2d(C_inner, affine=affine),
get_op(activation)(),
)
point_linear = nn.Sequential(
FlexiblePointLinear(C_inner, C_out, 1, 1, 0),
FlexibleBatchNorm2d(C_out, affine=affine),
)
super(FlexibleMobileNetV2Block, self).__init__(
expansion,
C,
C_out,
stride,
self.kernel_size,
affine,
activation,
inv_bottleneck,
depth_wise,
point_linear,
)
self.reset_mask()
def set_mask(self, expansion, kernel_size):
mask = None
if expansion is not None and expansion != self.expansion:
filters = self.point_linear[0].weight.data
mask = _get_channel_mask(filters, make_divisible(self.C *
expansion, 8))
if self.inv_bottleneck:
self.inv_bottleneck[0].set_mask(None, mask)
self.inv_bottleneck[1].set_mask(mask)
self.depth_wise[0].set_mask(mask, kernel_size)
self.depth_wise[1].set_mask(mask)
self.point_linear[0].set_mask(mask, None)
def forward_rollout(self, inputs, expansion, kernel_size):
self.set_mask(expansion, kernel_size)
out = self.forward(inputs)
self.reset_mask()
return out
def finalize(self):
inv_bottleneck = None
if self.inv_bottleneck:
inv_bottleneck = nn.Sequential(
*[
m.finalize() if isinstance(m, FlexibleLayer) else m
for m in self.inv_bottleneck
]
)
depth_wise = nn.Sequential(
*[
m.finalize() if isinstance(m, FlexibleLayer) else m
for m in self.depth_wise
]
)
point_linear = nn.Sequential(
*[
m.finalize() if isinstance(m, FlexibleLayer) else m
for m in self.point_linear
]
)
return MobileNetV2Block(
self.expansion,
self.C,
self.C_out,
self.stride,
self.kernel_size,
self.affine,
self.activation,
inv_bottleneck,
depth_wise,
point_linear,
)
class FlexibleMobileNetV3Block(MobileNetV3Block, FlexibleBlock):
NAME = "mbv3_block"
def __init__(self,
expansion,
C,
C_out,
stride,
kernel_sizes=(3, 5, 7),
do_kernel_transform=True,
affine=True,
activation="relu",
use_se=False,
schedule_cfg=None
):
FlexibleBlock.__init__(self, schedule_cfg)
self.expansion = expansion
self.activation = activation
self.C = C
self.C_out = C_out
self.C_inner = make_divisible(C * expansion, 8)
self.stride = stride
self.kernel_sizes = sorted(kernel_sizes)
self.kernel_size = self.kernel_sizes[-1]
self.do_kernel_transform = do_kernel_transform
self.use_se = use_se
self.affine = affine
inv_bottleneck = None
if expansion != 1:
inv_bottleneck = nn.Sequential(
FlexiblePointLinear(C, self.C_inner, 1, 1, 0),
FlexibleBatchNorm2d(self.C_inner, affine=affine),
get_op(activation)(),
)
depth_wise = nn.Sequential(
FlexibleDepthWiseConv(
self.C_inner,
self.kernel_sizes,
stride,
do_kernel_transform=do_kernel_transform,
),
FlexibleBatchNorm2d(self.C_inner, affine=affine),
get_op(activation)(),
)
point_linear = nn.Sequential(
FlexiblePointLinear(self.C_inner, C_out, 1, 1, 0),
FlexibleBatchNorm2d(C_out, affine=affine),
)
se = None
if self.use_se:
se = FlexibleSEModule(self.C_inner)
super(FlexibleMobileNetV3Block, self).__init__(
expansion,
C,
C_out,
stride,
self.kernel_size,
affine,
activation,
use_se,
inv_bottleneck,
depth_wise,
point_linear,
se,
)
self.reset_mask()
def set_mask(self, expansion, kernel_size):
mask = None
if expansion != self.expansion:
filters = self.point_linear[0].weight.data
mask = _get_channel_mask(filters, make_divisible(self.C *
expansion, 8))
if self.inv_bottleneck:
self.inv_bottleneck[0].set_mask(None, mask)
self.inv_bottleneck[1].set_mask(mask)
self.depth_wise[0].set_mask(mask, kernel_size)
self.depth_wise[1].set_mask(mask)
self.point_linear[0].set_mask(mask, None)
if self.se:
self.se.set_mask(mask)
def forward_rollout(self, inputs, expansion, kernel_size, drop_connect_rate=0.0):
self.set_mask(expansion, kernel_size)
out = self.forward(inputs, drop_connect_rate)
self.reset_mask()
return out
def finalize(self):
inv_bottleneck = None
if self.inv_bottleneck:
inv_bottleneck = nn.Sequential(
*[
m.finalize() if isinstance(m, FlexibleLayer) else m
for m in self.inv_bottleneck
]
)
depth_wise = nn.Sequential(
*[
m.finalize() if isinstance(m, FlexibleLayer) else m
for m in self.depth_wise
]
)
point_linear = nn.Sequential(
*[
m.finalize() if isinstance(m, FlexibleLayer) else m
for m in self.point_linear
]
)
se = None
if self.se:
se = self.se.finalize()
return MobileNetV3Block(
self.expansion,
self.C,
self.C_out,
self.stride,
self.kernel_size,
self.affine,
self.activation,
self.use_se,
inv_bottleneck,
depth_wise,
point_linear,
se,
)
class BaseBackboneArch(Component, nn.Module):
REGISTRY = "ofa_backbone"
def __init__(
self,
device,
blocks=[1, 4, 4, 4, 4, 4],
strides=[1, 2, 2, 1, 2, 1],
expansions=[1, 6, 6, 6, 6, 6],
layer_channels=[16, 24, 40, 80, 96, 192, 320],
mult_ratio=1.0,
kernel_sizes=[3, 5, 7],
do_kernel_transform=True,
num_classes=10,
cell_type="mbv2_cell",
pretrained_path=None,
schedule_cfg=None,
):
super(BaseBackboneArch, self).__init__(schedule_cfg)
nn.Module.__init__(self)
self.device = device
self.blocks = blocks
self.strides = strides
self.expansions = expansions
self.channels = layer_channels
self.mult_ratio = mult_ratio
self.kernel_sizes = kernel_sizes
self.do_kernel_transform = do_kernel_transform
self.num_classes = num_classes
self.pretrained_path = pretrained_path
@abc.abstractmethod
def make_stage(
self, C_in, C_out, depth, stride, expansion, kernel_size, mult_ratio=1.0
):
"""
make a serial of blocks as a stage
"""
class MobileNetV2Arch(BaseBackboneArch):
NAME = "mbv2_backbone"
"""
According to the original papar MobileNet-V2, the standard architecture is:
| input_ch | operator | t | c | n | s |
| 3 | conv2d | - | 32 | 1 | 2 |
| 32 | bottleneck | 1 | 16 | 1 | 1 |
| 16 | bottleneck | 6 | 24 | 2 | 2 |
| 24 | bottleneck | 6 | 32 | 3 | 2 |
| 32 | bottleneck | 6 | 64 | 4 | 2 |
| 64 | bottleneck | 6 | 96 | 3 | 1 |
| 96 | bottleneck | 6 | 160 | 3 | 2 |
| 160 | bottleneck | 6 | 320 | 1 | 1 |
| 320 | conv2d 1x1 | - | 1280 | 1 | 1 |
| 1280 | avgpool7x7 | - | - | 1 | - |
| 1280 | conv2d 1x1 | - | k | - | |
The first `conv2d` is called stem, and the last two `conv2d` are called
"conv_final" and "classifier" respectively.
However, in order to be compatible with MobileNet-V3, which has only 6
rather than 7 bottleneck stages, we fix the last bottleneck(160 -> 320) as
t=6, n=1, k=3.
"""
def __init__(
self,
device,
blocks=[1, 4, 4, 4, 4, 4],
strides=[1, 2, 2, 2, 1, 2],
expansions=[1, 6, 6, 6, 6, 6],
layer_channels=[32, 16, 24, 32, 64, 96, 160, 320, 1280],
mult_ratio=1.0,
kernel_sizes=[3, 5, 7],
do_kernel_transform=True,
num_classes=10,
block_type="mbv2_block",
pretrained_path=None,
stem_stride=2,
schedule_cfg=None,
):
super(MobileNetV2Arch, self).__init__(
device,
blocks,
strides,
expansions,
layer_channels,
mult_ratio,
kernel_sizes,
do_kernel_transform,
num_classes,
block_type,
pretrained_path,
schedule_cfg,
)
self.block_initializer = FlexibleBlock.get_class_(block_type)
self.stem_stride = stem_stride
self.channels = [make_divisible(c * mult_ratio, 8)
for c in layer_channels]
self.stem = nn.Sequential(
nn.Conv2d(
3, self.channels[0], kernel_size=3, stride=self.stem_stride, padding=1, bias=False
),
nn.BatchNorm2d(self.channels[0]),
get_op("relu")(),
)
expect(
blocks[0] == expansions[0] == 1,
"The first conv layer should have single block and no expansion.",
ValueError,
)
self.mult_ratio = mult_ratio
self.cells = [
self.make_stage(
self.channels[0],
self.channels[1],
self.blocks[0],
self.strides[0],
self.expansions[0],
[3],
)
]
for i, depth in enumerate(self.blocks[1:], 1):
self.cells.append(
self.make_stage(
self.channels[i],
self.channels[i + 1],
depth,
self.strides[i],
self.expansions[i],
self.kernel_sizes,
)
)
self.cells = nn.ModuleList(self.cells)
self.conv_head = self.block_initializer(
6,
self.channels[-3],
self.channels[-2],
1,
self.kernel_sizes,
self.do_kernel_transform,
activation="relu",
affine=True,
)
self.conv_final = nn.Sequential(
FlexiblePointLinear(self.channels[-2], self.channels[-1], 1, 1, 0),
nn.BatchNorm2d(self.channels[-1]),
)
self.classifier = nn.Conv2d(self.channels[-1], num_classes, 1, 1, 0)
if self.pretrained_path:
state_dict = torch.load(self.pretrained_path, "cpu")
if state_dict["classifier.weight"].shape[0] != self.num_classes:
del state_dict["classifier.weight"]
del state_dict["classifier.bias"]
self.logger.info(
f"loading pretrained model from path {self.pretrained_path}...")
self.logger.info(self.load_state_dict(state_dict, strict=False))
self.to(self.device)
def make_stage(self, C_in, C_out, block_num, stride, expansion, kernel_sizes):
cell = []
for i in range(block_num):
if i == 0:
s = stride
else:
s = 1
C_in = C_out
cell.append(
self.block_initializer(
expansion,
C_in,
C_out,
s,
kernel_sizes,
self.do_kernel_transform,
activation="relu",
affine=True,
)
)
return nn.ModuleList(cell)
def forward(self, inputs):
return self.forward_rollout(inputs)
def forward_rollout(self, inputs, rollout=None):
out = self.stem(inputs)
for i, cell in enumerate(self.cells):
for j, block in enumerate(cell):
if rollout is None:
out = block(out)
else:
if j >= rollout.depth[i]:
break
out = block.forward_rollout(
out, rollout.width[i][j], rollout.kernel[i][j]
)
out = self.conv_head(out)
out = self.conv_final(out)
out = F.adaptive_avg_pool2d(out, 1)
return self.classifier(out).flatten(1)
def finalize(self, blocks, expansions, kernel_sizes):
cells = []
finalized_model = copy.deepcopy(self)
for i, cell in enumerate(self.cells):
cells.append([])
for j, block in enumerate(cell):
if j >= blocks[i]:
break
block.set_mask(expansions[i][j], kernel_sizes[i][j])
cells[-1].append(block.finalize())
cells[-1] = nn.ModuleList(cells[-1])
finalized_model.cells = nn.ModuleList(cells)
return finalized_model
def extract_features(self, inputs, p_levels, rollout=None, drop_connect_rate=0.0):
out = self.stem(inputs)
level_indexes = feature_level_to_stage_index(self.strides)
features = []
for i, cell in enumerate(self.cells):
for j, block in enumerate(cell):
if rollout is None:
out = block(out, drop_connect_rate)
else:
if j >= rollout.depth[i]:
break
out = block.forward_rollout(
out, rollout.width[i][j], rollout.kernel[i][j], drop_connect_rate
)
features.append(out)
out = self.conv_head(out)
features[-1] = out
return [features[level_indexes[p]] for p in p_levels], out
def get_feature_channel_num(self, p_levels):
level_indexes = feature_level_to_stage_index(self.strides)
return [self.channels[level_indexes[p]] for p in p_levels]
def get_features(self, inputs, p_levels, rollout=None):
out = self.stem(inputs)
level_indexes = feature_level_to_stage_index(self.strides)
features = []
for i, cell in enumerate(self.cells):
for j, block in enumerate(cell):
if rollout is None:
out = block(out)
else:
if j >= rollout.depth[i]:
break
out = block.forward_rollout(
out, rollout.width[i][j], rollout.kernel[i][j]
)
features.append(out)
out = self.conv_head(out)
features[-1] = out
return [features[level_indexes[p]] for p in p_levels], out
def get_feature_channel_num(self, p_levels):
level_indexes = feature_level_to_stage_index(self.strides)
return [self.channels[level_indexes[p]] for p in p_levels]
class MobileNetV3Arch(BaseBackboneArch):
NAME = "mbv3_backbone"
def __init__(
self,
device,
blocks=[1, 4, 4, 4, 4, 4],
strides=[1, 2, 2, 2, 1, 2],
expansions=[1, 6, 6, 6, 6, 6],
layer_channels=[16, 16, 24, 40, 80, 112, 160, 960, 1280],
mult_ratio=1.0,
kernel_sizes=[3, 5, 7],
do_kernel_transform=True,
use_ses=[False, False, True, False, True, True],
acts=["relu", "relu", "relu", "h_swish", "h_swish", "h_swish"],
num_classes=10,
block_type="mbv3_block",
pretrained_path=None,
stem_stride=2,
schedule_cfg=None,
):
super(MobileNetV3Arch, self).__init__(
device,
blocks,
strides,
expansions,
layer_channels,
mult_ratio,
kernel_sizes,
do_kernel_transform,
num_classes,
block_type,
pretrained_path,
schedule_cfg,
)
self.block_initializer = FlexibleBlock.get_class_(block_type)
self.channels = [make_divisible(c * mult_ratio, 8)
for c in layer_channels]
self.stem_stride = stem_stride
self.stem = nn.Sequential(
nn.Conv2d(
3, self.channels[0], kernel_size=3, stride=self.stem_stride, padding=1, bias=False
),
nn.BatchNorm2d(self.channels[0]),
get_op("h_swish")(),
)
expect(
blocks[0] == expansions[0] == 1,
"The first conv layer should have single block and no expansion.",
ValueError,
)
self.mult_ratio = mult_ratio
self.use_ses = use_ses
self.acts = acts
self.cells = [
self.make_stage(
self.channels[0],
self.channels[1],
self.blocks[0],
self.strides[0],
self.expansions[0],
[3],
self.use_ses[0],
self.acts[0],
)
]
for i, depth in enumerate(self.blocks[1:], 1):
self.cells.append(
self.make_stage(
self.channels[i],
self.channels[i + 1],
depth,
self.strides[i],
self.expansions[i],
self.kernel_sizes,
self.use_ses[i],
self.acts[i],
)
)
self.cells = nn.ModuleList(self.cells)
self.conv_head = nn.Sequential(
nn.Conv2d(self.channels[-3],
self.channels[-2], 1, 1, 0, bias=False),
nn.BatchNorm2d(self.channels[-2]),
get_op("h_swish")(),
)
self.conv_final = nn.Sequential(
nn.Conv2d(self.channels[-2],
self.channels[-1], 1, 1, 0, bias=False),
get_op("h_swish")(),
)
self.classifier = nn.Linear(self.channels[-1], num_classes)
if self.pretrained_path:
state_dict = torch.load(self.pretrained_path, "cpu")
if state_dict["classifier.weight"].shape[0] != self.num_classes:
del state_dict["classifier.weight"]
del state_dict["classifier.bias"]
self.logger.info(self.load_state_dict(state_dict, strict=False))
self.to(self.device)
def make_stage(
self, C_in, C_out, block_num, stride, expansion, kernel_sizes, use_se, act
):
cell = []
for i in range(block_num):
if i == 0:
s = stride
else:
s = 1
C_in = C_out
cell.append(
self.block_initializer(
expansion,
C_in,
C_out,
s,
kernel_sizes,
self.do_kernel_transform,
activation=act,
affine=True,
use_se=use_se,
)
)
return nn.ModuleList(cell)
def forward(self, inputs):
return self.forward_rollout(inputs)
def forward_rollout(self, inputs, rollout=None):
out = self.stem(inputs)
for i, cell in enumerate(self.cells):
for j, block in enumerate(cell):
if rollout is None:
out = block(out)
else:
if j >= rollout.depth[i]:
break
out = block.forward_rollout(
out, rollout.width[i][j], rollout.kernel[i][j]
)
out = self.conv_head(out)
out = out.mean(3, keepdim=True).mean(2, keepdim=True)
out = self.conv_final(out)
out = torch.flatten(out, 1)
return self.classifier(out)
def finalize(self, blocks, expansions, kernel_sizes):
cells = []
finalized_model = copy.deepcopy(self)
for i, cell in enumerate(self.cells):
cells.append([])
for j, block in enumerate(cell):
if j >= blocks[i]:
break
block.set_mask(expansions[i][j], kernel_sizes[i][j])
cells[-1].append(block.finalize())
cells[-1] = nn.ModuleList(cells[-1])
finalized_model.cells = nn.ModuleList(cells)
return finalized_model
def extract_features(self, inputs, p_levels, rollout=None, drop_connect_rate=0.0):
out = self.stem(inputs)
level_indexes = feature_level_to_stage_index(self.strides)
features = []
for i, cell in enumerate(self.cells):
for j, block in enumerate(cell):
if rollout is None:
out = block(out, drop_connect_rate)
else:
if j >= rollout.depth[i]:
break
out = block.forward_rollout(
out, rollout.width[i][j], rollout.kernel[i][j], drop_connect_rate
)
features.append(out)
out = self.conv_head(out)
features[-1] = out
return [features[level_indexes[p]] for p in p_levels], out
def get_feature_channel_num(self, p_levels):
level_indexes = feature_level_to_stage_index(self.strides + [1])
return [self.channels[1 + level_indexes[p]] for p in p_levels]
| 32.959072
| 98
| 0.519103
| 2,652
| 24,159
| 4.525641
| 0.090121
| 0.029995
| 0.028329
| 0.011081
| 0.806532
| 0.785869
| 0.764206
| 0.751208
| 0.723213
| 0.694301
| 0
| 0.027356
| 0.384163
| 24,159
| 732
| 99
| 33.004098
| 0.779339
| 0.003974
| 0
| 0.741433
| 0
| 0
| 0.01995
| 0.001084
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043614
| false
| 0
| 0.014019
| 0.003115
| 0.104361
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3da89b4ccbef0142ba6d237579edc093e8a90052
| 4,882
|
py
|
Python
|
tests/geomol_featurization_tests.py
|
boltzmannlabs/boltpro
|
3ddebbeb10191150a29fc81ef4336ec2dbe88092
|
[
"Apache-2.0"
] | 4
|
2022-02-21T12:01:04.000Z
|
2022-03-10T04:30:55.000Z
|
tests/geomol_featurization_tests.py
|
boltzmannlabs/boltpro
|
3ddebbeb10191150a29fc81ef4336ec2dbe88092
|
[
"Apache-2.0"
] | null | null | null |
tests/geomol_featurization_tests.py
|
boltzmannlabs/boltpro
|
3ddebbeb10191150a29fc81ef4336ec2dbe88092
|
[
"Apache-2.0"
] | 2
|
2022-03-11T07:58:58.000Z
|
2022-03-15T05:17:59.000Z
|
from geomol.data import drugs_confs as drugs_featuriter
from geomol.data import qm9_confs as qm9_featurizer
###### tests for drugs data featurization ###########
dataset = 'drugs'
files_path = r"molpro/geomol/sample_data/drugs"
split_path = r"molpro/geomol/sample_data/sample_data/drugs_split.npy"
featurizer = drugs_featuriter(files_path, split_path, "test")
pickle_files = featurizer.pickle_files
def test_open_pickle():
data_dict = featurizer.open_pickle(pickle_files[0])
assert data_dict is not None and type(data_dict).__name__ == 'dict'
data_dict = featurizer.open_pickle(pickle_files[0])
def test_featurize_mol():
data_obj = featurizer.featurize_mol(data_dict)
assert data_obj is not None and type(data_obj).__name__ == 'Data' and len(data_obj.keys) == 12
data_obj = featurizer.featurize_mol(data_dict)
def test_bolmann_weight():
boltzmann_weight = data_obj.boltzmann_weight
assert boltzmann_weight is not None and type(boltzmann_weight).__name__ == 'float'
def test_degeneracy():
degeneracy = data_obj.degeneracy
assert degeneracy is not None and type(degeneracy).__name__ == 'int'
def test_chiral_tag():
chiral_tag = data_obj.chiral_tag
assert chiral_tag is not None and type(chiral_tag).__name__ == 'Tensor'
def test_edge_attr():
edge_attr = data_obj.edge_attr
assert edge_attr is not None and type(edge_attr).__name__ == 'Tensor'
def test_edge_index():
edge_index = data_obj.edge_index
assert edge_index is not None and type(edge_index).__name__ == 'Tensor'
def test_mol():
mol = data_obj.mol
assert mol is not None and type(mol).__name__ == 'Mol'
def test_name():
smile_name = data_obj.name
assert smile_name is not None and type(smile_name).__name__ == 'str'
def test_neighbors():
neighbors = data_obj.neighbors
assert neighbors is not None and type(neighbors).__name__ == 'dict'
def test_neighbors():
neighbors = data_obj.neighbors
assert neighbors is not None and type(neighbors).__name__ == 'dict'
def test_pos():
pos = data_obj.pos
assert pos is not None and type(pos).__name__ == 'list' and type(pos[0]).__name__ == 'Tensor'
def test_pos_mask():
pos_mask = data_obj.pos_mask
assert pos_mask is not None and type(pos_mask).__name__ == 'Tensor'
def test_x():
x = data_obj.x
assert x is not None and type(x).__name__ == 'Tensor'
def test_z():
z = data_obj.z
assert z is not None and type(z).__name__ == 'Tensor'
###### tests for qm9 data featurization ###########
dataset = 'qm9'
files_path = r"molpro/geomol/sample_data/drugs"
split_path = r"molpro/geomol/sample_data/sample_data/drugs_split.npy"
featurizer = qm9_featurizer(files_path, split_path, "test")
pickle_files = featurizer.pickle_files
def test_open_pickle():
data_dict = featurizer.open_pickle(pickle_files[0])
assert data_dict is not None and type(data_dict).__name__ == 'dict'
data_dict = featurizer.open_pickle(pickle_files[0])
def test_featurize_mol():
data_obj = featurizer.featurize_mol(data_dict)
assert data_obj is not None and type(data_obj).__name__ == 'Data' and len(data_obj.keys) == 12
data_obj = featurizer.featurize_mol(data_dict)
def test_bolmann_weight():
boltzmann_weight = data_obj.boltzmann_weight
assert boltzmann_weight is not None and type(boltzmann_weight).__name__ == 'float'
def test_degeneracy():
degeneracy = data_obj.degeneracy
assert degeneracy is not None and type(degeneracy).__name__ == 'int'
def test_chiral_tag():
chiral_tag = data_obj.chiral_tag
assert chiral_tag is not None and type(chiral_tag).__name__ == 'Tensor'
def test_edge_attr():
edge_attr = data_obj.edge_attr
assert edge_attr is not None and type(edge_attr).__name__ == 'Tensor'
def test_edge_index():
edge_index = data_obj.edge_index
assert edge_index is not None and type(edge_index).__name__ == 'Tensor'
def test_mol():
mol = data_obj.mol
assert mol is not None and type(mol).__name__ == 'Mol'
def test_name():
smile_name = data_obj.name
assert smile_name is not None and type(smile_name).__name__ == 'str'
def test_neighbors():
neighbors = data_obj.neighbors
assert neighbors is not None and type(neighbors).__name__ == 'dict'
def test_neighbors():
neighbors = data_obj.neighbors
assert neighbors is not None and type(neighbors).__name__ == 'dict'
def test_pos():
pos = data_obj.pos
assert pos is not None and type(pos).__name__ == 'list' and type(pos[0]).__name__ == 'Tensor'
def test_pos_mask():
pos_mask = data_obj.pos_mask
assert pos_mask is not None and type(pos_mask).__name__ == 'Tensor'
def test_x():
x = data_obj.x
assert x is not None and type(x).__name__ == 'Tensor'
def test_z():
z = data_obj.z
assert z is not None and type(z).__name__ == 'Tensor'
print("Succesfully Done...")
| 26.677596
| 98
| 0.725932
| 747
| 4,882
| 4.337349
| 0.084337
| 0.077778
| 0.083333
| 0.111111
| 0.932716
| 0.932716
| 0.932716
| 0.932716
| 0.932716
| 0.932716
| 0
| 0.003695
| 0.168578
| 4,882
| 183
| 99
| 26.677596
| 0.794531
| 0.013929
| 0
| 0.934579
| 0
| 0
| 0.074299
| 0.035161
| 0
| 0
| 0
| 0
| 0.280374
| 1
| 0.280374
| false
| 0
| 0.018692
| 0
| 0.299065
| 0.009346
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9a990ac5679228f5c4423b2de82661fa8dd0ae60
| 1,144
|
py
|
Python
|
test/item_updater_test.py
|
dkalaxdk/IssueTableGenerator
|
a2999e3417c2514292b1197d8cfe83435f6f1b8d
|
[
"MIT"
] | null | null | null |
test/item_updater_test.py
|
dkalaxdk/IssueTableGenerator
|
a2999e3417c2514292b1197d8cfe83435f6f1b8d
|
[
"MIT"
] | null | null | null |
test/item_updater_test.py
|
dkalaxdk/IssueTableGenerator
|
a2999e3417c2514292b1197d8cfe83435f6f1b8d
|
[
"MIT"
] | null | null | null |
from classes.pull_requests import Pr
from helpers.item_updater import *
from classes.issues import Issue
def test_write_pr_written():
test_number = 5
pr = Pr()
pr.number = test_number
pull_requests = []
first_len = len(pull_requests)
write_pr(pr, pull_requests)
second_len = len(pull_requests)
assert first_len < second_len
def test_write_pr_written_false_already_in_list():
test_number = 5
pr = Pr()
pr.number = test_number
pull_requests = [pr]
first_len = len(pull_requests)
write_pr(pr, pull_requests)
second_len = len(pull_requests)
assert first_len == second_len
def test_write_issue_written():
test_number = 5
issue = Issue()
issue.number = test_number
issues = []
first_len = len(issues)
write_pr(issue, issues)
second_len = len(issues)
assert first_len < second_len
def test_write_issue_written_false_already_in_list():
test_number = 5
issue = Issue()
issue.number = test_number
issues = [issue]
first_len = len(issues)
write_pr(issue, issues)
second_len = len(issues)
assert first_len == second_len
| 23.833333
| 53
| 0.697552
| 162
| 1,144
| 4.567901
| 0.154321
| 0.145946
| 0.064865
| 0.097297
| 0.872973
| 0.841892
| 0.841892
| 0.841892
| 0.783784
| 0.783784
| 0
| 0.004484
| 0.22028
| 1,144
| 47
| 54
| 24.340426
| 0.825112
| 0
| 0
| 0.717949
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 1
| 0.102564
| false
| 0
| 0.076923
| 0
| 0.179487
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9a9950ad5d3e77303d5c4acb84e5bdc303a9e596
| 147
|
py
|
Python
|
models/slimmable/__init__.py
|
haolibai/dmcp
|
7d9e88230850d8094a040d8c4eaf8b0d0393a210
|
[
"CC-BY-4.0"
] | 119
|
2020-05-08T01:05:50.000Z
|
2022-03-02T07:36:24.000Z
|
models/slimmable/__init__.py
|
haolibai/dmcp
|
7d9e88230850d8094a040d8c4eaf8b0d0393a210
|
[
"CC-BY-4.0"
] | 13
|
2020-05-08T08:57:33.000Z
|
2021-09-02T09:14:51.000Z
|
models/slimmable/__init__.py
|
haolibai/dmcp
|
7d9e88230850d8094a040d8c4eaf8b0d0393a210
|
[
"CC-BY-4.0"
] | 23
|
2020-05-08T03:18:24.000Z
|
2021-08-28T16:04:31.000Z
|
# -*- coding:utf-8 -*-
from models.slimmable.us_resnet import us_resnet18, us_resnet50
from models.slimmable.us_mobilenet import us_mobilenet_v2
| 29.4
| 63
| 0.802721
| 22
| 147
| 5.090909
| 0.590909
| 0.178571
| 0.339286
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0.102041
| 147
| 4
| 64
| 36.75
| 0.80303
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.